mistral_7b_med_gen / trainer_state.json
olesya2096's picture
Upload 12 files
32f53f5 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.42345966546686425,
"eval_steps": 500,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 0.8464024662971497,
"learning_rate": 4e-05,
"loss": 0.3488,
"step": 1
},
{
"epoch": 0.0,
"grad_norm": 0.5379390716552734,
"learning_rate": 8e-05,
"loss": 0.202,
"step": 2
},
{
"epoch": 0.0,
"grad_norm": 0.6969972252845764,
"learning_rate": 0.00012,
"loss": 0.312,
"step": 3
},
{
"epoch": 0.0,
"grad_norm": 0.45794710516929626,
"learning_rate": 0.00016,
"loss": 0.2298,
"step": 4
},
{
"epoch": 0.0,
"grad_norm": 0.44517195224761963,
"learning_rate": 0.0002,
"loss": 0.1948,
"step": 5
},
{
"epoch": 0.0,
"grad_norm": 0.361826092004776,
"learning_rate": 0.00019979899497487438,
"loss": 0.2009,
"step": 6
},
{
"epoch": 0.0,
"grad_norm": 0.631228506565094,
"learning_rate": 0.00019959798994974876,
"loss": 0.2959,
"step": 7
},
{
"epoch": 0.0,
"grad_norm": 0.6042431592941284,
"learning_rate": 0.00019939698492462313,
"loss": 0.2594,
"step": 8
},
{
"epoch": 0.0,
"grad_norm": 0.6731159090995789,
"learning_rate": 0.0001991959798994975,
"loss": 0.3103,
"step": 9
},
{
"epoch": 0.0,
"grad_norm": 0.4393431544303894,
"learning_rate": 0.00019899497487437187,
"loss": 0.1662,
"step": 10
},
{
"epoch": 0.0,
"grad_norm": 0.47421371936798096,
"learning_rate": 0.00019879396984924622,
"loss": 0.2106,
"step": 11
},
{
"epoch": 0.01,
"grad_norm": 0.5907872319221497,
"learning_rate": 0.00019859296482412062,
"loss": 0.351,
"step": 12
},
{
"epoch": 0.01,
"grad_norm": 0.5357276201248169,
"learning_rate": 0.000198391959798995,
"loss": 0.2511,
"step": 13
},
{
"epoch": 0.01,
"grad_norm": 0.8222237825393677,
"learning_rate": 0.00019819095477386937,
"loss": 0.3362,
"step": 14
},
{
"epoch": 0.01,
"grad_norm": 0.4601736068725586,
"learning_rate": 0.0001979899497487437,
"loss": 0.2097,
"step": 15
},
{
"epoch": 0.01,
"grad_norm": 0.5627934336662292,
"learning_rate": 0.0001977889447236181,
"loss": 0.2663,
"step": 16
},
{
"epoch": 0.01,
"grad_norm": 0.6441662907600403,
"learning_rate": 0.00019758793969849249,
"loss": 0.2665,
"step": 17
},
{
"epoch": 0.01,
"grad_norm": 0.5793730020523071,
"learning_rate": 0.00019738693467336683,
"loss": 0.2452,
"step": 18
},
{
"epoch": 0.01,
"grad_norm": 0.5800638794898987,
"learning_rate": 0.0001971859296482412,
"loss": 0.2573,
"step": 19
},
{
"epoch": 0.01,
"grad_norm": 0.6255142092704773,
"learning_rate": 0.0001969849246231156,
"loss": 0.3046,
"step": 20
},
{
"epoch": 0.01,
"grad_norm": 0.5359498858451843,
"learning_rate": 0.00019678391959798995,
"loss": 0.2445,
"step": 21
},
{
"epoch": 0.01,
"grad_norm": 0.5056596398353577,
"learning_rate": 0.00019658291457286432,
"loss": 0.2487,
"step": 22
},
{
"epoch": 0.01,
"grad_norm": 0.7259389162063599,
"learning_rate": 0.0001963819095477387,
"loss": 0.3341,
"step": 23
},
{
"epoch": 0.01,
"grad_norm": 0.5617966055870056,
"learning_rate": 0.0001961809045226131,
"loss": 0.2495,
"step": 24
},
{
"epoch": 0.01,
"grad_norm": 0.5566623210906982,
"learning_rate": 0.00019597989949748744,
"loss": 0.2732,
"step": 25
},
{
"epoch": 0.01,
"grad_norm": 0.4777694344520569,
"learning_rate": 0.00019577889447236181,
"loss": 0.2614,
"step": 26
},
{
"epoch": 0.01,
"grad_norm": 0.4215550422668457,
"learning_rate": 0.0001955778894472362,
"loss": 0.2205,
"step": 27
},
{
"epoch": 0.01,
"grad_norm": 0.5983753204345703,
"learning_rate": 0.00019537688442211056,
"loss": 0.3514,
"step": 28
},
{
"epoch": 0.01,
"grad_norm": 0.5450871586799622,
"learning_rate": 0.00019517587939698493,
"loss": 0.2667,
"step": 29
},
{
"epoch": 0.01,
"grad_norm": 0.488443523645401,
"learning_rate": 0.0001949748743718593,
"loss": 0.2691,
"step": 30
},
{
"epoch": 0.01,
"grad_norm": 0.5746788382530212,
"learning_rate": 0.00019477386934673368,
"loss": 0.3224,
"step": 31
},
{
"epoch": 0.01,
"grad_norm": 0.517463207244873,
"learning_rate": 0.00019457286432160805,
"loss": 0.2269,
"step": 32
},
{
"epoch": 0.01,
"grad_norm": 0.3751838803291321,
"learning_rate": 0.00019437185929648243,
"loss": 0.1812,
"step": 33
},
{
"epoch": 0.01,
"grad_norm": 0.6401920318603516,
"learning_rate": 0.0001941708542713568,
"loss": 0.224,
"step": 34
},
{
"epoch": 0.01,
"grad_norm": 0.8179358243942261,
"learning_rate": 0.00019396984924623117,
"loss": 0.2426,
"step": 35
},
{
"epoch": 0.02,
"grad_norm": 0.5015102028846741,
"learning_rate": 0.00019376884422110552,
"loss": 0.2673,
"step": 36
},
{
"epoch": 0.02,
"grad_norm": 0.558474063873291,
"learning_rate": 0.00019356783919597992,
"loss": 0.2309,
"step": 37
},
{
"epoch": 0.02,
"grad_norm": 0.48645198345184326,
"learning_rate": 0.0001933668341708543,
"loss": 0.2097,
"step": 38
},
{
"epoch": 0.02,
"grad_norm": 0.6228073835372925,
"learning_rate": 0.00019316582914572864,
"loss": 0.223,
"step": 39
},
{
"epoch": 0.02,
"grad_norm": 0.7079476714134216,
"learning_rate": 0.000192964824120603,
"loss": 0.358,
"step": 40
},
{
"epoch": 0.02,
"grad_norm": 0.47177523374557495,
"learning_rate": 0.0001927638190954774,
"loss": 0.1826,
"step": 41
},
{
"epoch": 0.02,
"grad_norm": 0.5007954835891724,
"learning_rate": 0.00019256281407035178,
"loss": 0.235,
"step": 42
},
{
"epoch": 0.02,
"grad_norm": 0.6503958702087402,
"learning_rate": 0.00019236180904522613,
"loss": 0.2588,
"step": 43
},
{
"epoch": 0.02,
"grad_norm": 0.5159966349601746,
"learning_rate": 0.0001921608040201005,
"loss": 0.2266,
"step": 44
},
{
"epoch": 0.02,
"grad_norm": 0.6967421770095825,
"learning_rate": 0.0001919597989949749,
"loss": 0.2165,
"step": 45
},
{
"epoch": 0.02,
"grad_norm": 0.8722981810569763,
"learning_rate": 0.00019175879396984925,
"loss": 0.3716,
"step": 46
},
{
"epoch": 0.02,
"grad_norm": 0.45119017362594604,
"learning_rate": 0.00019155778894472362,
"loss": 0.2343,
"step": 47
},
{
"epoch": 0.02,
"grad_norm": 0.49875113368034363,
"learning_rate": 0.000191356783919598,
"loss": 0.1764,
"step": 48
},
{
"epoch": 0.02,
"grad_norm": 0.6528317332267761,
"learning_rate": 0.0001911557788944724,
"loss": 0.2664,
"step": 49
},
{
"epoch": 0.02,
"grad_norm": 0.5773115754127502,
"learning_rate": 0.00019095477386934674,
"loss": 0.2688,
"step": 50
},
{
"epoch": 0.02,
"grad_norm": 0.6044211387634277,
"learning_rate": 0.0001907537688442211,
"loss": 0.2723,
"step": 51
},
{
"epoch": 0.02,
"grad_norm": 0.5186501741409302,
"learning_rate": 0.00019055276381909548,
"loss": 0.2437,
"step": 52
},
{
"epoch": 0.02,
"grad_norm": 0.5406463742256165,
"learning_rate": 0.00019035175879396986,
"loss": 0.2152,
"step": 53
},
{
"epoch": 0.02,
"grad_norm": 0.6606569290161133,
"learning_rate": 0.00019015075376884423,
"loss": 0.25,
"step": 54
},
{
"epoch": 0.02,
"grad_norm": 0.45699411630630493,
"learning_rate": 0.0001899497487437186,
"loss": 0.1749,
"step": 55
},
{
"epoch": 0.02,
"grad_norm": 0.5122879147529602,
"learning_rate": 0.00018974874371859298,
"loss": 0.2857,
"step": 56
},
{
"epoch": 0.02,
"grad_norm": 0.5184636116027832,
"learning_rate": 0.00018954773869346732,
"loss": 0.2414,
"step": 57
},
{
"epoch": 0.02,
"grad_norm": 0.40956321358680725,
"learning_rate": 0.00018934673366834172,
"loss": 0.177,
"step": 58
},
{
"epoch": 0.02,
"grad_norm": 0.38545408844947815,
"learning_rate": 0.0001891457286432161,
"loss": 0.1856,
"step": 59
},
{
"epoch": 0.03,
"grad_norm": 0.575831949710846,
"learning_rate": 0.00018894472361809047,
"loss": 0.2726,
"step": 60
},
{
"epoch": 0.03,
"grad_norm": 0.6056025624275208,
"learning_rate": 0.00018874371859296481,
"loss": 0.161,
"step": 61
},
{
"epoch": 0.03,
"grad_norm": 0.43295279145240784,
"learning_rate": 0.00018854271356783921,
"loss": 0.1989,
"step": 62
},
{
"epoch": 0.03,
"grad_norm": 0.6881011724472046,
"learning_rate": 0.0001883417085427136,
"loss": 0.3154,
"step": 63
},
{
"epoch": 0.03,
"grad_norm": 0.548842191696167,
"learning_rate": 0.00018814070351758793,
"loss": 0.2496,
"step": 64
},
{
"epoch": 0.03,
"grad_norm": 0.7280535697937012,
"learning_rate": 0.0001879396984924623,
"loss": 0.2954,
"step": 65
},
{
"epoch": 0.03,
"grad_norm": 0.4259982407093048,
"learning_rate": 0.0001877386934673367,
"loss": 0.2039,
"step": 66
},
{
"epoch": 0.03,
"grad_norm": 0.5318437218666077,
"learning_rate": 0.00018753768844221108,
"loss": 0.2795,
"step": 67
},
{
"epoch": 0.03,
"grad_norm": 0.4900606572628021,
"learning_rate": 0.00018733668341708543,
"loss": 0.2276,
"step": 68
},
{
"epoch": 0.03,
"grad_norm": 0.72746342420578,
"learning_rate": 0.0001871356783919598,
"loss": 0.3113,
"step": 69
},
{
"epoch": 0.03,
"grad_norm": 0.48556947708129883,
"learning_rate": 0.0001869346733668342,
"loss": 0.2374,
"step": 70
},
{
"epoch": 0.03,
"grad_norm": 0.5738449692726135,
"learning_rate": 0.00018673366834170854,
"loss": 0.313,
"step": 71
},
{
"epoch": 0.03,
"grad_norm": 0.44591739773750305,
"learning_rate": 0.00018653266331658292,
"loss": 0.2157,
"step": 72
},
{
"epoch": 0.03,
"grad_norm": 0.578846275806427,
"learning_rate": 0.0001863316582914573,
"loss": 0.3421,
"step": 73
},
{
"epoch": 0.03,
"grad_norm": 0.37167009711265564,
"learning_rate": 0.0001861306532663317,
"loss": 0.2151,
"step": 74
},
{
"epoch": 0.03,
"grad_norm": 0.3555445671081543,
"learning_rate": 0.00018592964824120604,
"loss": 0.1793,
"step": 75
},
{
"epoch": 0.03,
"grad_norm": 0.7459139227867126,
"learning_rate": 0.0001857286432160804,
"loss": 0.4165,
"step": 76
},
{
"epoch": 0.03,
"grad_norm": 0.5642420649528503,
"learning_rate": 0.00018552763819095478,
"loss": 0.2745,
"step": 77
},
{
"epoch": 0.03,
"grad_norm": 0.7679473161697388,
"learning_rate": 0.00018532663316582915,
"loss": 0.323,
"step": 78
},
{
"epoch": 0.03,
"grad_norm": 0.2970917820930481,
"learning_rate": 0.00018512562814070353,
"loss": 0.1476,
"step": 79
},
{
"epoch": 0.03,
"grad_norm": 0.2861027121543884,
"learning_rate": 0.0001849246231155779,
"loss": 0.1457,
"step": 80
},
{
"epoch": 0.03,
"grad_norm": 0.7647842168807983,
"learning_rate": 0.00018472361809045227,
"loss": 0.3267,
"step": 81
},
{
"epoch": 0.03,
"grad_norm": 0.598876416683197,
"learning_rate": 0.00018452261306532662,
"loss": 0.2377,
"step": 82
},
{
"epoch": 0.04,
"grad_norm": 0.46886759996414185,
"learning_rate": 0.00018432160804020102,
"loss": 0.2444,
"step": 83
},
{
"epoch": 0.04,
"grad_norm": 0.6426663398742676,
"learning_rate": 0.0001841206030150754,
"loss": 0.2771,
"step": 84
},
{
"epoch": 0.04,
"grad_norm": 0.593606173992157,
"learning_rate": 0.00018391959798994977,
"loss": 0.2985,
"step": 85
},
{
"epoch": 0.04,
"grad_norm": 0.5843706130981445,
"learning_rate": 0.0001837185929648241,
"loss": 0.2548,
"step": 86
},
{
"epoch": 0.04,
"grad_norm": 0.5532352328300476,
"learning_rate": 0.0001835175879396985,
"loss": 0.2587,
"step": 87
},
{
"epoch": 0.04,
"grad_norm": 0.6341427564620972,
"learning_rate": 0.00018331658291457288,
"loss": 0.3246,
"step": 88
},
{
"epoch": 0.04,
"grad_norm": 0.6387240886688232,
"learning_rate": 0.00018311557788944723,
"loss": 0.3389,
"step": 89
},
{
"epoch": 0.04,
"grad_norm": 0.4927945137023926,
"learning_rate": 0.0001829145728643216,
"loss": 0.2141,
"step": 90
},
{
"epoch": 0.04,
"grad_norm": 0.552358090877533,
"learning_rate": 0.000182713567839196,
"loss": 0.3409,
"step": 91
},
{
"epoch": 0.04,
"grad_norm": 0.5346209406852722,
"learning_rate": 0.00018251256281407038,
"loss": 0.236,
"step": 92
},
{
"epoch": 0.04,
"grad_norm": 0.5484887957572937,
"learning_rate": 0.00018231155778894472,
"loss": 0.2121,
"step": 93
},
{
"epoch": 0.04,
"grad_norm": 0.45301929116249084,
"learning_rate": 0.0001821105527638191,
"loss": 0.196,
"step": 94
},
{
"epoch": 0.04,
"grad_norm": 0.5568594336509705,
"learning_rate": 0.0001819095477386935,
"loss": 0.2479,
"step": 95
},
{
"epoch": 0.04,
"grad_norm": 0.5509335994720459,
"learning_rate": 0.00018170854271356784,
"loss": 0.2424,
"step": 96
},
{
"epoch": 0.04,
"grad_norm": 0.4819609820842743,
"learning_rate": 0.00018150753768844221,
"loss": 0.2526,
"step": 97
},
{
"epoch": 0.04,
"grad_norm": 0.4724488854408264,
"learning_rate": 0.0001813065326633166,
"loss": 0.2192,
"step": 98
},
{
"epoch": 0.04,
"grad_norm": 0.554046094417572,
"learning_rate": 0.00018110552763819096,
"loss": 0.3003,
"step": 99
},
{
"epoch": 0.04,
"grad_norm": 0.42843714356422424,
"learning_rate": 0.00018090452261306533,
"loss": 0.1914,
"step": 100
},
{
"epoch": 0.04,
"grad_norm": 0.4597126543521881,
"learning_rate": 0.0001807035175879397,
"loss": 0.2409,
"step": 101
},
{
"epoch": 0.04,
"grad_norm": 0.6792829036712646,
"learning_rate": 0.00018050251256281408,
"loss": 0.2929,
"step": 102
},
{
"epoch": 0.04,
"grad_norm": 0.6163105368614197,
"learning_rate": 0.00018030150753768845,
"loss": 0.2995,
"step": 103
},
{
"epoch": 0.04,
"grad_norm": 0.5015051960945129,
"learning_rate": 0.00018010050251256282,
"loss": 0.2628,
"step": 104
},
{
"epoch": 0.04,
"grad_norm": 0.5530080795288086,
"learning_rate": 0.0001798994974874372,
"loss": 0.2616,
"step": 105
},
{
"epoch": 0.04,
"grad_norm": 0.5682195425033569,
"learning_rate": 0.00017969849246231157,
"loss": 0.2844,
"step": 106
},
{
"epoch": 0.05,
"grad_norm": 0.5251821875572205,
"learning_rate": 0.00017949748743718592,
"loss": 0.2502,
"step": 107
},
{
"epoch": 0.05,
"grad_norm": 0.6445398330688477,
"learning_rate": 0.00017929648241206032,
"loss": 0.2709,
"step": 108
},
{
"epoch": 0.05,
"grad_norm": 0.3485439419746399,
"learning_rate": 0.0001790954773869347,
"loss": 0.15,
"step": 109
},
{
"epoch": 0.05,
"grad_norm": 0.4932727813720703,
"learning_rate": 0.00017889447236180906,
"loss": 0.2608,
"step": 110
},
{
"epoch": 0.05,
"grad_norm": 0.6365306377410889,
"learning_rate": 0.0001786934673366834,
"loss": 0.212,
"step": 111
},
{
"epoch": 0.05,
"grad_norm": 0.6077302694320679,
"learning_rate": 0.0001784924623115578,
"loss": 0.2978,
"step": 112
},
{
"epoch": 0.05,
"grad_norm": 0.5916979908943176,
"learning_rate": 0.00017829145728643218,
"loss": 0.2934,
"step": 113
},
{
"epoch": 0.05,
"grad_norm": 0.634833037853241,
"learning_rate": 0.00017809045226130653,
"loss": 0.2495,
"step": 114
},
{
"epoch": 0.05,
"grad_norm": 0.49833521246910095,
"learning_rate": 0.0001778894472361809,
"loss": 0.2329,
"step": 115
},
{
"epoch": 0.05,
"grad_norm": 0.5283430218696594,
"learning_rate": 0.0001776884422110553,
"loss": 0.2317,
"step": 116
},
{
"epoch": 0.05,
"grad_norm": 0.45649638772010803,
"learning_rate": 0.00017748743718592967,
"loss": 0.2078,
"step": 117
},
{
"epoch": 0.05,
"grad_norm": 0.4763767123222351,
"learning_rate": 0.00017728643216080402,
"loss": 0.2394,
"step": 118
},
{
"epoch": 0.05,
"grad_norm": 0.42383307218551636,
"learning_rate": 0.0001770854271356784,
"loss": 0.2294,
"step": 119
},
{
"epoch": 0.05,
"grad_norm": 0.46265044808387756,
"learning_rate": 0.0001768844221105528,
"loss": 0.2321,
"step": 120
},
{
"epoch": 0.05,
"grad_norm": 0.6166386604309082,
"learning_rate": 0.00017668341708542714,
"loss": 0.2763,
"step": 121
},
{
"epoch": 0.05,
"grad_norm": 0.4315739870071411,
"learning_rate": 0.0001764824120603015,
"loss": 0.235,
"step": 122
},
{
"epoch": 0.05,
"grad_norm": 0.5360255241394043,
"learning_rate": 0.00017628140703517588,
"loss": 0.2765,
"step": 123
},
{
"epoch": 0.05,
"grad_norm": 0.24499636888504028,
"learning_rate": 0.00017608040201005026,
"loss": 0.1312,
"step": 124
},
{
"epoch": 0.05,
"grad_norm": 0.6798754930496216,
"learning_rate": 0.00017587939698492463,
"loss": 0.2664,
"step": 125
},
{
"epoch": 0.05,
"grad_norm": 0.3721320331096649,
"learning_rate": 0.000175678391959799,
"loss": 0.1916,
"step": 126
},
{
"epoch": 0.05,
"grad_norm": 0.46311238408088684,
"learning_rate": 0.00017547738693467338,
"loss": 0.2669,
"step": 127
},
{
"epoch": 0.05,
"grad_norm": 0.6015424728393555,
"learning_rate": 0.00017527638190954775,
"loss": 0.2961,
"step": 128
},
{
"epoch": 0.05,
"grad_norm": 0.4419313669204712,
"learning_rate": 0.00017507537688442212,
"loss": 0.2165,
"step": 129
},
{
"epoch": 0.06,
"grad_norm": 0.4701198935508728,
"learning_rate": 0.0001748743718592965,
"loss": 0.2125,
"step": 130
},
{
"epoch": 0.06,
"grad_norm": 0.655662477016449,
"learning_rate": 0.00017467336683417087,
"loss": 0.2876,
"step": 131
},
{
"epoch": 0.06,
"grad_norm": 0.516601026058197,
"learning_rate": 0.00017447236180904521,
"loss": 0.2212,
"step": 132
},
{
"epoch": 0.06,
"grad_norm": 0.41010063886642456,
"learning_rate": 0.00017427135678391961,
"loss": 0.193,
"step": 133
},
{
"epoch": 0.06,
"grad_norm": 0.5393264889717102,
"learning_rate": 0.000174070351758794,
"loss": 0.2937,
"step": 134
},
{
"epoch": 0.06,
"grad_norm": 0.7648224830627441,
"learning_rate": 0.00017386934673366836,
"loss": 0.2454,
"step": 135
},
{
"epoch": 0.06,
"grad_norm": 0.33873265981674194,
"learning_rate": 0.0001736683417085427,
"loss": 0.1509,
"step": 136
},
{
"epoch": 0.06,
"grad_norm": 0.4198644161224365,
"learning_rate": 0.0001734673366834171,
"loss": 0.2785,
"step": 137
},
{
"epoch": 0.06,
"grad_norm": 0.48141562938690186,
"learning_rate": 0.00017326633165829148,
"loss": 0.2382,
"step": 138
},
{
"epoch": 0.06,
"grad_norm": 0.503093421459198,
"learning_rate": 0.00017306532663316582,
"loss": 0.1948,
"step": 139
},
{
"epoch": 0.06,
"grad_norm": 0.5640888810157776,
"learning_rate": 0.0001728643216080402,
"loss": 0.3005,
"step": 140
},
{
"epoch": 0.06,
"grad_norm": 0.7310622334480286,
"learning_rate": 0.0001726633165829146,
"loss": 0.2385,
"step": 141
},
{
"epoch": 0.06,
"grad_norm": 0.4998970925807953,
"learning_rate": 0.00017246231155778897,
"loss": 0.1729,
"step": 142
},
{
"epoch": 0.06,
"grad_norm": 0.7067146897315979,
"learning_rate": 0.00017226130653266332,
"loss": 0.2471,
"step": 143
},
{
"epoch": 0.06,
"grad_norm": 0.50519198179245,
"learning_rate": 0.0001720603015075377,
"loss": 0.2125,
"step": 144
},
{
"epoch": 0.06,
"grad_norm": 0.740380048751831,
"learning_rate": 0.00017185929648241206,
"loss": 0.3141,
"step": 145
},
{
"epoch": 0.06,
"grad_norm": 0.5456702709197998,
"learning_rate": 0.00017165829145728644,
"loss": 0.1892,
"step": 146
},
{
"epoch": 0.06,
"grad_norm": 0.42799606919288635,
"learning_rate": 0.0001714572864321608,
"loss": 0.1578,
"step": 147
},
{
"epoch": 0.06,
"grad_norm": 0.4013369381427765,
"learning_rate": 0.00017125628140703518,
"loss": 0.1967,
"step": 148
},
{
"epoch": 0.06,
"grad_norm": 0.4266522228717804,
"learning_rate": 0.00017105527638190955,
"loss": 0.2127,
"step": 149
},
{
"epoch": 0.06,
"grad_norm": 0.6991720795631409,
"learning_rate": 0.00017085427135678393,
"loss": 0.1862,
"step": 150
},
{
"epoch": 0.06,
"grad_norm": 0.5733714699745178,
"learning_rate": 0.0001706532663316583,
"loss": 0.2042,
"step": 151
},
{
"epoch": 0.06,
"grad_norm": 0.5416551232337952,
"learning_rate": 0.00017045226130653267,
"loss": 0.2362,
"step": 152
},
{
"epoch": 0.06,
"grad_norm": 0.5882843732833862,
"learning_rate": 0.00017025125628140705,
"loss": 0.2472,
"step": 153
},
{
"epoch": 0.07,
"grad_norm": 0.5776798129081726,
"learning_rate": 0.00017005025125628142,
"loss": 0.2371,
"step": 154
},
{
"epoch": 0.07,
"grad_norm": 0.4624580442905426,
"learning_rate": 0.0001698492462311558,
"loss": 0.1732,
"step": 155
},
{
"epoch": 0.07,
"grad_norm": 0.6425135731697083,
"learning_rate": 0.00016964824120603016,
"loss": 0.2333,
"step": 156
},
{
"epoch": 0.07,
"grad_norm": 0.5413373708724976,
"learning_rate": 0.0001694472361809045,
"loss": 0.309,
"step": 157
},
{
"epoch": 0.07,
"grad_norm": 0.485989511013031,
"learning_rate": 0.0001692462311557789,
"loss": 0.2429,
"step": 158
},
{
"epoch": 0.07,
"grad_norm": 0.397554874420166,
"learning_rate": 0.00016904522613065328,
"loss": 0.1851,
"step": 159
},
{
"epoch": 0.07,
"grad_norm": 0.3770751357078552,
"learning_rate": 0.00016884422110552766,
"loss": 0.1868,
"step": 160
},
{
"epoch": 0.07,
"grad_norm": 0.4285247325897217,
"learning_rate": 0.000168643216080402,
"loss": 0.2162,
"step": 161
},
{
"epoch": 0.07,
"grad_norm": 0.5541285276412964,
"learning_rate": 0.0001684422110552764,
"loss": 0.222,
"step": 162
},
{
"epoch": 0.07,
"grad_norm": 0.46622520685195923,
"learning_rate": 0.00016824120603015078,
"loss": 0.2261,
"step": 163
},
{
"epoch": 0.07,
"grad_norm": 0.6006364822387695,
"learning_rate": 0.00016804020100502512,
"loss": 0.1759,
"step": 164
},
{
"epoch": 0.07,
"grad_norm": 0.5816208720207214,
"learning_rate": 0.0001678391959798995,
"loss": 0.2334,
"step": 165
},
{
"epoch": 0.07,
"grad_norm": 0.5955444574356079,
"learning_rate": 0.0001676381909547739,
"loss": 0.266,
"step": 166
},
{
"epoch": 0.07,
"grad_norm": 0.6741182804107666,
"learning_rate": 0.00016743718592964827,
"loss": 0.2282,
"step": 167
},
{
"epoch": 0.07,
"grad_norm": 0.5965931415557861,
"learning_rate": 0.0001672361809045226,
"loss": 0.2768,
"step": 168
},
{
"epoch": 0.07,
"grad_norm": 0.5617109537124634,
"learning_rate": 0.00016703517587939699,
"loss": 0.2306,
"step": 169
},
{
"epoch": 0.07,
"grad_norm": 0.4135819673538208,
"learning_rate": 0.00016683417085427136,
"loss": 0.1766,
"step": 170
},
{
"epoch": 0.07,
"grad_norm": 0.6315815448760986,
"learning_rate": 0.00016663316582914573,
"loss": 0.2797,
"step": 171
},
{
"epoch": 0.07,
"grad_norm": 0.47556841373443604,
"learning_rate": 0.0001664321608040201,
"loss": 0.2466,
"step": 172
},
{
"epoch": 0.07,
"grad_norm": 0.4448830783367157,
"learning_rate": 0.00016623115577889448,
"loss": 0.1973,
"step": 173
},
{
"epoch": 0.07,
"grad_norm": 0.4126035273075104,
"learning_rate": 0.00016603015075376885,
"loss": 0.1983,
"step": 174
},
{
"epoch": 0.07,
"grad_norm": 0.3132772445678711,
"learning_rate": 0.00016582914572864322,
"loss": 0.1092,
"step": 175
},
{
"epoch": 0.07,
"grad_norm": 0.35979005694389343,
"learning_rate": 0.0001656281407035176,
"loss": 0.1578,
"step": 176
},
{
"epoch": 0.07,
"grad_norm": 1.180555820465088,
"learning_rate": 0.00016542713567839197,
"loss": 0.2077,
"step": 177
},
{
"epoch": 0.08,
"grad_norm": 0.419072687625885,
"learning_rate": 0.00016522613065326634,
"loss": 0.2021,
"step": 178
},
{
"epoch": 0.08,
"grad_norm": 0.3829838037490845,
"learning_rate": 0.00016502512562814072,
"loss": 0.2232,
"step": 179
},
{
"epoch": 0.08,
"grad_norm": 0.46572762727737427,
"learning_rate": 0.0001648241206030151,
"loss": 0.24,
"step": 180
},
{
"epoch": 0.08,
"grad_norm": 0.5526518821716309,
"learning_rate": 0.00016462311557788946,
"loss": 0.1901,
"step": 181
},
{
"epoch": 0.08,
"grad_norm": 0.5201070308685303,
"learning_rate": 0.0001644221105527638,
"loss": 0.239,
"step": 182
},
{
"epoch": 0.08,
"grad_norm": 0.8323246836662292,
"learning_rate": 0.0001642211055276382,
"loss": 0.2358,
"step": 183
},
{
"epoch": 0.08,
"grad_norm": 0.4965194761753082,
"learning_rate": 0.00016402010050251258,
"loss": 0.2338,
"step": 184
},
{
"epoch": 0.08,
"grad_norm": 0.6949551701545715,
"learning_rate": 0.00016381909547738695,
"loss": 0.2852,
"step": 185
},
{
"epoch": 0.08,
"grad_norm": 0.5701273679733276,
"learning_rate": 0.0001636180904522613,
"loss": 0.2168,
"step": 186
},
{
"epoch": 0.08,
"grad_norm": 0.5376931428909302,
"learning_rate": 0.0001634170854271357,
"loss": 0.1949,
"step": 187
},
{
"epoch": 0.08,
"grad_norm": 0.71084064245224,
"learning_rate": 0.00016321608040201007,
"loss": 0.2778,
"step": 188
},
{
"epoch": 0.08,
"grad_norm": 0.4446854591369629,
"learning_rate": 0.00016301507537688442,
"loss": 0.1724,
"step": 189
},
{
"epoch": 0.08,
"grad_norm": 0.4885426163673401,
"learning_rate": 0.0001628140703517588,
"loss": 0.2089,
"step": 190
},
{
"epoch": 0.08,
"grad_norm": 0.6394463181495667,
"learning_rate": 0.00016261306532663316,
"loss": 0.3154,
"step": 191
},
{
"epoch": 0.08,
"grad_norm": 0.3582012355327606,
"learning_rate": 0.00016241206030150756,
"loss": 0.1843,
"step": 192
},
{
"epoch": 0.08,
"grad_norm": 0.4599544107913971,
"learning_rate": 0.0001622110552763819,
"loss": 0.2417,
"step": 193
},
{
"epoch": 0.08,
"grad_norm": 0.4549001157283783,
"learning_rate": 0.00016201005025125628,
"loss": 0.1791,
"step": 194
},
{
"epoch": 0.08,
"grad_norm": 0.6934012770652771,
"learning_rate": 0.00016180904522613066,
"loss": 0.3607,
"step": 195
},
{
"epoch": 0.08,
"grad_norm": 0.6693351864814758,
"learning_rate": 0.00016160804020100503,
"loss": 0.2773,
"step": 196
},
{
"epoch": 0.08,
"grad_norm": 0.5981723666191101,
"learning_rate": 0.0001614070351758794,
"loss": 0.2875,
"step": 197
},
{
"epoch": 0.08,
"grad_norm": 0.4262889623641968,
"learning_rate": 0.00016120603015075378,
"loss": 0.2016,
"step": 198
},
{
"epoch": 0.08,
"grad_norm": 0.35076144337654114,
"learning_rate": 0.00016100502512562815,
"loss": 0.1483,
"step": 199
},
{
"epoch": 0.08,
"grad_norm": 0.5418830513954163,
"learning_rate": 0.00016080402010050252,
"loss": 0.2251,
"step": 200
},
{
"epoch": 0.09,
"grad_norm": 0.4608229696750641,
"learning_rate": 0.0001606030150753769,
"loss": 0.2225,
"step": 201
},
{
"epoch": 0.09,
"grad_norm": 0.6093661785125732,
"learning_rate": 0.00016040201005025127,
"loss": 0.2892,
"step": 202
},
{
"epoch": 0.09,
"grad_norm": 0.5232911705970764,
"learning_rate": 0.00016020100502512564,
"loss": 0.2725,
"step": 203
},
{
"epoch": 0.09,
"grad_norm": 0.4431813359260559,
"learning_rate": 0.00016,
"loss": 0.1911,
"step": 204
},
{
"epoch": 0.09,
"grad_norm": 0.3894941806793213,
"learning_rate": 0.00015979899497487439,
"loss": 0.1865,
"step": 205
},
{
"epoch": 0.09,
"grad_norm": 0.5542987585067749,
"learning_rate": 0.00015959798994974876,
"loss": 0.2471,
"step": 206
},
{
"epoch": 0.09,
"grad_norm": 0.4694427251815796,
"learning_rate": 0.0001593969849246231,
"loss": 0.1946,
"step": 207
},
{
"epoch": 0.09,
"grad_norm": 0.6252871155738831,
"learning_rate": 0.0001591959798994975,
"loss": 0.274,
"step": 208
},
{
"epoch": 0.09,
"grad_norm": 0.4899269640445709,
"learning_rate": 0.00015899497487437188,
"loss": 0.2558,
"step": 209
},
{
"epoch": 0.09,
"grad_norm": 0.470769464969635,
"learning_rate": 0.00015879396984924625,
"loss": 0.1648,
"step": 210
},
{
"epoch": 0.09,
"grad_norm": 0.5405287742614746,
"learning_rate": 0.0001585929648241206,
"loss": 0.258,
"step": 211
},
{
"epoch": 0.09,
"grad_norm": 0.7451691627502441,
"learning_rate": 0.000158391959798995,
"loss": 0.3103,
"step": 212
},
{
"epoch": 0.09,
"grad_norm": 0.5479302406311035,
"learning_rate": 0.00015819095477386937,
"loss": 0.2905,
"step": 213
},
{
"epoch": 0.09,
"grad_norm": 0.442613810300827,
"learning_rate": 0.00015798994974874372,
"loss": 0.1913,
"step": 214
},
{
"epoch": 0.09,
"grad_norm": 0.5008172392845154,
"learning_rate": 0.0001577889447236181,
"loss": 0.1954,
"step": 215
},
{
"epoch": 0.09,
"grad_norm": 0.7391428351402283,
"learning_rate": 0.00015758793969849246,
"loss": 0.2254,
"step": 216
},
{
"epoch": 0.09,
"grad_norm": 0.5006258487701416,
"learning_rate": 0.00015738693467336686,
"loss": 0.172,
"step": 217
},
{
"epoch": 0.09,
"grad_norm": 0.5547099113464355,
"learning_rate": 0.0001571859296482412,
"loss": 0.1796,
"step": 218
},
{
"epoch": 0.09,
"grad_norm": 0.6475242376327515,
"learning_rate": 0.00015698492462311558,
"loss": 0.2977,
"step": 219
},
{
"epoch": 0.09,
"grad_norm": 0.6442254185676575,
"learning_rate": 0.00015678391959798995,
"loss": 0.2228,
"step": 220
},
{
"epoch": 0.09,
"grad_norm": 0.6465861797332764,
"learning_rate": 0.00015658291457286433,
"loss": 0.341,
"step": 221
},
{
"epoch": 0.09,
"grad_norm": 0.45650309324264526,
"learning_rate": 0.0001563819095477387,
"loss": 0.251,
"step": 222
},
{
"epoch": 0.09,
"grad_norm": 0.4780837893486023,
"learning_rate": 0.00015618090452261307,
"loss": 0.1468,
"step": 223
},
{
"epoch": 0.09,
"grad_norm": 0.464327871799469,
"learning_rate": 0.00015597989949748745,
"loss": 0.2534,
"step": 224
},
{
"epoch": 0.1,
"grad_norm": 0.3201116621494293,
"learning_rate": 0.00015577889447236182,
"loss": 0.1893,
"step": 225
},
{
"epoch": 0.1,
"grad_norm": 0.4082617461681366,
"learning_rate": 0.0001555778894472362,
"loss": 0.2026,
"step": 226
},
{
"epoch": 0.1,
"grad_norm": 0.35708555579185486,
"learning_rate": 0.00015537688442211056,
"loss": 0.1746,
"step": 227
},
{
"epoch": 0.1,
"grad_norm": 0.4080864489078522,
"learning_rate": 0.00015517587939698494,
"loss": 0.1585,
"step": 228
},
{
"epoch": 0.1,
"grad_norm": 0.471068799495697,
"learning_rate": 0.0001549748743718593,
"loss": 0.2246,
"step": 229
},
{
"epoch": 0.1,
"grad_norm": 0.450642466545105,
"learning_rate": 0.00015477386934673368,
"loss": 0.1979,
"step": 230
},
{
"epoch": 0.1,
"grad_norm": 0.5421797633171082,
"learning_rate": 0.00015457286432160806,
"loss": 0.2056,
"step": 231
},
{
"epoch": 0.1,
"grad_norm": 0.41133153438568115,
"learning_rate": 0.0001543718592964824,
"loss": 0.2068,
"step": 232
},
{
"epoch": 0.1,
"grad_norm": 0.45967182517051697,
"learning_rate": 0.0001541708542713568,
"loss": 0.2182,
"step": 233
},
{
"epoch": 0.1,
"grad_norm": 0.49316030740737915,
"learning_rate": 0.00015396984924623117,
"loss": 0.1856,
"step": 234
},
{
"epoch": 0.1,
"grad_norm": 0.8641966581344604,
"learning_rate": 0.00015376884422110555,
"loss": 0.3491,
"step": 235
},
{
"epoch": 0.1,
"grad_norm": 0.5350483655929565,
"learning_rate": 0.0001535678391959799,
"loss": 0.2346,
"step": 236
},
{
"epoch": 0.1,
"grad_norm": 0.7115418314933777,
"learning_rate": 0.00015336683417085427,
"loss": 0.2561,
"step": 237
},
{
"epoch": 0.1,
"grad_norm": 0.6251563429832458,
"learning_rate": 0.00015316582914572867,
"loss": 0.2683,
"step": 238
},
{
"epoch": 0.1,
"grad_norm": 0.5587172508239746,
"learning_rate": 0.000152964824120603,
"loss": 0.227,
"step": 239
},
{
"epoch": 0.1,
"grad_norm": 0.5323506593704224,
"learning_rate": 0.00015276381909547739,
"loss": 0.1803,
"step": 240
},
{
"epoch": 0.1,
"grad_norm": 0.4084484577178955,
"learning_rate": 0.00015256281407035176,
"loss": 0.1665,
"step": 241
},
{
"epoch": 0.1,
"grad_norm": 0.6423041224479675,
"learning_rate": 0.00015236180904522613,
"loss": 0.3084,
"step": 242
},
{
"epoch": 0.1,
"grad_norm": 0.3014443516731262,
"learning_rate": 0.0001521608040201005,
"loss": 0.1687,
"step": 243
},
{
"epoch": 0.1,
"grad_norm": 0.437581866979599,
"learning_rate": 0.00015195979899497488,
"loss": 0.2215,
"step": 244
},
{
"epoch": 0.1,
"grad_norm": 0.49017801880836487,
"learning_rate": 0.00015175879396984925,
"loss": 0.2294,
"step": 245
},
{
"epoch": 0.1,
"grad_norm": 0.31686311960220337,
"learning_rate": 0.00015155778894472362,
"loss": 0.1547,
"step": 246
},
{
"epoch": 0.1,
"grad_norm": 0.47575148940086365,
"learning_rate": 0.000151356783919598,
"loss": 0.1956,
"step": 247
},
{
"epoch": 0.11,
"grad_norm": 0.5036256909370422,
"learning_rate": 0.00015115577889447237,
"loss": 0.2365,
"step": 248
},
{
"epoch": 0.11,
"grad_norm": 0.6078888177871704,
"learning_rate": 0.00015095477386934674,
"loss": 0.3543,
"step": 249
},
{
"epoch": 0.11,
"grad_norm": 0.5907254219055176,
"learning_rate": 0.00015075376884422112,
"loss": 0.3306,
"step": 250
},
{
"epoch": 0.11,
"grad_norm": 0.5760742425918579,
"learning_rate": 0.0001505527638190955,
"loss": 0.3161,
"step": 251
},
{
"epoch": 0.11,
"grad_norm": 0.5956338047981262,
"learning_rate": 0.00015035175879396986,
"loss": 0.3077,
"step": 252
},
{
"epoch": 0.11,
"grad_norm": 0.30162495374679565,
"learning_rate": 0.00015015075376884423,
"loss": 0.1726,
"step": 253
},
{
"epoch": 0.11,
"grad_norm": 0.5765033960342407,
"learning_rate": 0.0001499497487437186,
"loss": 0.2605,
"step": 254
},
{
"epoch": 0.11,
"grad_norm": 0.607266902923584,
"learning_rate": 0.00014974874371859298,
"loss": 0.25,
"step": 255
},
{
"epoch": 0.11,
"grad_norm": 0.5608094930648804,
"learning_rate": 0.00014954773869346735,
"loss": 0.2919,
"step": 256
},
{
"epoch": 0.11,
"grad_norm": 0.48317083716392517,
"learning_rate": 0.0001493467336683417,
"loss": 0.2096,
"step": 257
},
{
"epoch": 0.11,
"grad_norm": 0.4825833737850189,
"learning_rate": 0.0001491457286432161,
"loss": 0.2043,
"step": 258
},
{
"epoch": 0.11,
"grad_norm": 0.4634295105934143,
"learning_rate": 0.00014894472361809047,
"loss": 0.2384,
"step": 259
},
{
"epoch": 0.11,
"grad_norm": 0.4482268691062927,
"learning_rate": 0.00014874371859296482,
"loss": 0.2357,
"step": 260
},
{
"epoch": 0.11,
"grad_norm": 0.46578094363212585,
"learning_rate": 0.0001485427135678392,
"loss": 0.2681,
"step": 261
},
{
"epoch": 0.11,
"grad_norm": 0.5069098472595215,
"learning_rate": 0.00014834170854271356,
"loss": 0.2637,
"step": 262
},
{
"epoch": 0.11,
"grad_norm": 0.3945165276527405,
"learning_rate": 0.00014814070351758796,
"loss": 0.1639,
"step": 263
},
{
"epoch": 0.11,
"grad_norm": 0.30467841029167175,
"learning_rate": 0.0001479396984924623,
"loss": 0.1697,
"step": 264
},
{
"epoch": 0.11,
"grad_norm": 0.4881652891635895,
"learning_rate": 0.00014773869346733668,
"loss": 0.2332,
"step": 265
},
{
"epoch": 0.11,
"grad_norm": 0.4942628741264343,
"learning_rate": 0.00014753768844221106,
"loss": 0.2342,
"step": 266
},
{
"epoch": 0.11,
"grad_norm": 0.42789214849472046,
"learning_rate": 0.00014733668341708543,
"loss": 0.1512,
"step": 267
},
{
"epoch": 0.11,
"grad_norm": 0.6232418417930603,
"learning_rate": 0.0001471356783919598,
"loss": 0.3112,
"step": 268
},
{
"epoch": 0.11,
"grad_norm": 0.528900146484375,
"learning_rate": 0.00014693467336683417,
"loss": 0.2127,
"step": 269
},
{
"epoch": 0.11,
"grad_norm": 0.6290087103843689,
"learning_rate": 0.00014673366834170855,
"loss": 0.2726,
"step": 270
},
{
"epoch": 0.11,
"grad_norm": 0.42025241255760193,
"learning_rate": 0.00014653266331658292,
"loss": 0.1842,
"step": 271
},
{
"epoch": 0.12,
"grad_norm": 0.41428080201148987,
"learning_rate": 0.0001463316582914573,
"loss": 0.2146,
"step": 272
},
{
"epoch": 0.12,
"grad_norm": 0.3822866976261139,
"learning_rate": 0.00014613065326633167,
"loss": 0.157,
"step": 273
},
{
"epoch": 0.12,
"grad_norm": 0.5606590509414673,
"learning_rate": 0.00014592964824120604,
"loss": 0.2617,
"step": 274
},
{
"epoch": 0.12,
"grad_norm": 0.5164960026741028,
"learning_rate": 0.0001457286432160804,
"loss": 0.2353,
"step": 275
},
{
"epoch": 0.12,
"grad_norm": 0.47546109557151794,
"learning_rate": 0.00014552763819095479,
"loss": 0.1443,
"step": 276
},
{
"epoch": 0.12,
"grad_norm": 0.6333800554275513,
"learning_rate": 0.00014532663316582916,
"loss": 0.2557,
"step": 277
},
{
"epoch": 0.12,
"grad_norm": 0.5148735642433167,
"learning_rate": 0.00014512562814070353,
"loss": 0.2115,
"step": 278
},
{
"epoch": 0.12,
"grad_norm": 0.46984830498695374,
"learning_rate": 0.0001449246231155779,
"loss": 0.2134,
"step": 279
},
{
"epoch": 0.12,
"grad_norm": 0.5740910768508911,
"learning_rate": 0.00014472361809045228,
"loss": 0.2558,
"step": 280
},
{
"epoch": 0.12,
"grad_norm": 0.3297008275985718,
"learning_rate": 0.00014452261306532665,
"loss": 0.1292,
"step": 281
},
{
"epoch": 0.12,
"grad_norm": 0.5956743359565735,
"learning_rate": 0.000144321608040201,
"loss": 0.1468,
"step": 282
},
{
"epoch": 0.12,
"grad_norm": 0.7425946593284607,
"learning_rate": 0.00014412060301507537,
"loss": 0.3394,
"step": 283
},
{
"epoch": 0.12,
"grad_norm": 0.6454773545265198,
"learning_rate": 0.00014391959798994977,
"loss": 0.223,
"step": 284
},
{
"epoch": 0.12,
"grad_norm": 0.6458725333213806,
"learning_rate": 0.00014371859296482411,
"loss": 0.3424,
"step": 285
},
{
"epoch": 0.12,
"grad_norm": 0.440280944108963,
"learning_rate": 0.0001435175879396985,
"loss": 0.1558,
"step": 286
},
{
"epoch": 0.12,
"grad_norm": 0.4852254390716553,
"learning_rate": 0.00014331658291457286,
"loss": 0.2069,
"step": 287
},
{
"epoch": 0.12,
"grad_norm": 0.8012005090713501,
"learning_rate": 0.00014311557788944726,
"loss": 0.247,
"step": 288
},
{
"epoch": 0.12,
"grad_norm": 0.46413126587867737,
"learning_rate": 0.0001429145728643216,
"loss": 0.2691,
"step": 289
},
{
"epoch": 0.12,
"grad_norm": 0.3412766456604004,
"learning_rate": 0.00014271356783919598,
"loss": 0.1581,
"step": 290
},
{
"epoch": 0.12,
"grad_norm": 0.4218701422214508,
"learning_rate": 0.00014251256281407035,
"loss": 0.2102,
"step": 291
},
{
"epoch": 0.12,
"grad_norm": 0.569365382194519,
"learning_rate": 0.00014231155778894473,
"loss": 0.2343,
"step": 292
},
{
"epoch": 0.12,
"grad_norm": 0.4240405261516571,
"learning_rate": 0.0001421105527638191,
"loss": 0.2201,
"step": 293
},
{
"epoch": 0.12,
"grad_norm": 0.5527992844581604,
"learning_rate": 0.00014190954773869347,
"loss": 0.3343,
"step": 294
},
{
"epoch": 0.12,
"grad_norm": 0.5937294960021973,
"learning_rate": 0.00014170854271356784,
"loss": 0.2895,
"step": 295
},
{
"epoch": 0.13,
"grad_norm": 0.43181246519088745,
"learning_rate": 0.00014150753768844222,
"loss": 0.2039,
"step": 296
},
{
"epoch": 0.13,
"grad_norm": 0.5217970013618469,
"learning_rate": 0.0001413065326633166,
"loss": 0.2642,
"step": 297
},
{
"epoch": 0.13,
"grad_norm": 0.496409147977829,
"learning_rate": 0.00014110552763819096,
"loss": 0.2071,
"step": 298
},
{
"epoch": 0.13,
"grad_norm": 0.38781318068504333,
"learning_rate": 0.00014090452261306534,
"loss": 0.1885,
"step": 299
},
{
"epoch": 0.13,
"grad_norm": 0.48885929584503174,
"learning_rate": 0.0001407035175879397,
"loss": 0.2035,
"step": 300
},
{
"epoch": 0.13,
"grad_norm": 0.3917163014411926,
"learning_rate": 0.00014050251256281408,
"loss": 0.1853,
"step": 301
},
{
"epoch": 0.13,
"grad_norm": 0.5597235560417175,
"learning_rate": 0.00014030150753768846,
"loss": 0.2303,
"step": 302
},
{
"epoch": 0.13,
"grad_norm": 0.656257688999176,
"learning_rate": 0.0001401005025125628,
"loss": 0.3848,
"step": 303
},
{
"epoch": 0.13,
"grad_norm": 0.4178178608417511,
"learning_rate": 0.0001398994974874372,
"loss": 0.1598,
"step": 304
},
{
"epoch": 0.13,
"grad_norm": 0.5133432745933533,
"learning_rate": 0.00013969849246231157,
"loss": 0.2427,
"step": 305
},
{
"epoch": 0.13,
"grad_norm": 0.39841488003730774,
"learning_rate": 0.00013949748743718595,
"loss": 0.2144,
"step": 306
},
{
"epoch": 0.13,
"grad_norm": 0.30867376923561096,
"learning_rate": 0.0001392964824120603,
"loss": 0.12,
"step": 307
},
{
"epoch": 0.13,
"grad_norm": 0.5577691197395325,
"learning_rate": 0.00013909547738693467,
"loss": 0.2167,
"step": 308
},
{
"epoch": 0.13,
"grad_norm": 0.4248412847518921,
"learning_rate": 0.00013889447236180907,
"loss": 0.2066,
"step": 309
},
{
"epoch": 0.13,
"grad_norm": 0.41768893599510193,
"learning_rate": 0.0001386934673366834,
"loss": 0.1845,
"step": 310
},
{
"epoch": 0.13,
"grad_norm": 0.40006211400032043,
"learning_rate": 0.00013849246231155778,
"loss": 0.2085,
"step": 311
},
{
"epoch": 0.13,
"grad_norm": 0.479081928730011,
"learning_rate": 0.00013829145728643216,
"loss": 0.2253,
"step": 312
},
{
"epoch": 0.13,
"grad_norm": 0.6218622922897339,
"learning_rate": 0.00013809045226130656,
"loss": 0.2304,
"step": 313
},
{
"epoch": 0.13,
"grad_norm": 0.3523159921169281,
"learning_rate": 0.0001378894472361809,
"loss": 0.1487,
"step": 314
},
{
"epoch": 0.13,
"grad_norm": 0.5560610294342041,
"learning_rate": 0.00013768844221105528,
"loss": 0.2568,
"step": 315
},
{
"epoch": 0.13,
"grad_norm": 0.40717363357543945,
"learning_rate": 0.00013748743718592965,
"loss": 0.1973,
"step": 316
},
{
"epoch": 0.13,
"grad_norm": 0.5085451006889343,
"learning_rate": 0.00013728643216080402,
"loss": 0.2091,
"step": 317
},
{
"epoch": 0.13,
"grad_norm": 0.4253947138786316,
"learning_rate": 0.0001370854271356784,
"loss": 0.1766,
"step": 318
},
{
"epoch": 0.14,
"grad_norm": 0.569270133972168,
"learning_rate": 0.00013688442211055277,
"loss": 0.224,
"step": 319
},
{
"epoch": 0.14,
"grad_norm": 0.637840986251831,
"learning_rate": 0.00013668341708542714,
"loss": 0.2496,
"step": 320
},
{
"epoch": 0.14,
"grad_norm": 0.38835230469703674,
"learning_rate": 0.00013648241206030151,
"loss": 0.1423,
"step": 321
},
{
"epoch": 0.14,
"grad_norm": 0.4860616624355316,
"learning_rate": 0.0001362814070351759,
"loss": 0.1989,
"step": 322
},
{
"epoch": 0.14,
"grad_norm": 0.35445964336395264,
"learning_rate": 0.00013608040201005026,
"loss": 0.1497,
"step": 323
},
{
"epoch": 0.14,
"grad_norm": 0.3935982584953308,
"learning_rate": 0.00013587939698492463,
"loss": 0.1486,
"step": 324
},
{
"epoch": 0.14,
"grad_norm": 0.49870574474334717,
"learning_rate": 0.000135678391959799,
"loss": 0.2537,
"step": 325
},
{
"epoch": 0.14,
"grad_norm": 0.5200526118278503,
"learning_rate": 0.00013547738693467338,
"loss": 0.229,
"step": 326
},
{
"epoch": 0.14,
"grad_norm": 0.32679593563079834,
"learning_rate": 0.00013527638190954775,
"loss": 0.1545,
"step": 327
},
{
"epoch": 0.14,
"grad_norm": 0.5571056604385376,
"learning_rate": 0.0001350753768844221,
"loss": 0.2033,
"step": 328
},
{
"epoch": 0.14,
"grad_norm": 0.6576252579689026,
"learning_rate": 0.00013487437185929647,
"loss": 0.2517,
"step": 329
},
{
"epoch": 0.14,
"grad_norm": 0.5548920631408691,
"learning_rate": 0.00013467336683417087,
"loss": 0.288,
"step": 330
},
{
"epoch": 0.14,
"grad_norm": 0.6064463257789612,
"learning_rate": 0.00013447236180904524,
"loss": 0.3079,
"step": 331
},
{
"epoch": 0.14,
"grad_norm": 0.31661224365234375,
"learning_rate": 0.0001342713567839196,
"loss": 0.1413,
"step": 332
},
{
"epoch": 0.14,
"grad_norm": 0.5535265803337097,
"learning_rate": 0.00013407035175879396,
"loss": 0.2165,
"step": 333
},
{
"epoch": 0.14,
"grad_norm": 0.6876853108406067,
"learning_rate": 0.00013386934673366836,
"loss": 0.2922,
"step": 334
},
{
"epoch": 0.14,
"grad_norm": 0.4079119861125946,
"learning_rate": 0.0001336683417085427,
"loss": 0.2241,
"step": 335
},
{
"epoch": 0.14,
"grad_norm": 0.6994270086288452,
"learning_rate": 0.00013346733668341708,
"loss": 0.2341,
"step": 336
},
{
"epoch": 0.14,
"grad_norm": 0.44559770822525024,
"learning_rate": 0.00013326633165829146,
"loss": 0.223,
"step": 337
},
{
"epoch": 0.14,
"grad_norm": 0.45040494203567505,
"learning_rate": 0.00013306532663316586,
"loss": 0.2135,
"step": 338
},
{
"epoch": 0.14,
"grad_norm": 0.3731357753276825,
"learning_rate": 0.0001328643216080402,
"loss": 0.1656,
"step": 339
},
{
"epoch": 0.14,
"grad_norm": 0.3673096299171448,
"learning_rate": 0.00013266331658291457,
"loss": 0.1443,
"step": 340
},
{
"epoch": 0.14,
"grad_norm": 0.3495692014694214,
"learning_rate": 0.00013246231155778895,
"loss": 0.1709,
"step": 341
},
{
"epoch": 0.14,
"grad_norm": 0.5194134712219238,
"learning_rate": 0.00013226130653266332,
"loss": 0.2019,
"step": 342
},
{
"epoch": 0.15,
"grad_norm": 0.4705762565135956,
"learning_rate": 0.0001320603015075377,
"loss": 0.2321,
"step": 343
},
{
"epoch": 0.15,
"grad_norm": 0.5076374411582947,
"learning_rate": 0.00013185929648241207,
"loss": 0.1972,
"step": 344
},
{
"epoch": 0.15,
"grad_norm": 0.48760005831718445,
"learning_rate": 0.00013165829145728644,
"loss": 0.1725,
"step": 345
},
{
"epoch": 0.15,
"grad_norm": 0.5571461319923401,
"learning_rate": 0.0001314572864321608,
"loss": 0.1909,
"step": 346
},
{
"epoch": 0.15,
"grad_norm": 0.45073091983795166,
"learning_rate": 0.00013125628140703518,
"loss": 0.215,
"step": 347
},
{
"epoch": 0.15,
"grad_norm": 0.3622073531150818,
"learning_rate": 0.00013105527638190956,
"loss": 0.1711,
"step": 348
},
{
"epoch": 0.15,
"grad_norm": 0.62618488073349,
"learning_rate": 0.00013085427135678393,
"loss": 0.2632,
"step": 349
},
{
"epoch": 0.15,
"grad_norm": 0.4555056095123291,
"learning_rate": 0.0001306532663316583,
"loss": 0.2097,
"step": 350
},
{
"epoch": 0.15,
"grad_norm": 0.6761806607246399,
"learning_rate": 0.00013045226130653268,
"loss": 0.2299,
"step": 351
},
{
"epoch": 0.15,
"grad_norm": 0.5507418513298035,
"learning_rate": 0.00013025125628140705,
"loss": 0.2501,
"step": 352
},
{
"epoch": 0.15,
"grad_norm": 0.5329481363296509,
"learning_rate": 0.0001300502512562814,
"loss": 0.2035,
"step": 353
},
{
"epoch": 0.15,
"grad_norm": 0.5065528750419617,
"learning_rate": 0.00012984924623115577,
"loss": 0.1878,
"step": 354
},
{
"epoch": 0.15,
"grad_norm": 0.42520710825920105,
"learning_rate": 0.00012964824120603017,
"loss": 0.1861,
"step": 355
},
{
"epoch": 0.15,
"grad_norm": 0.5527524948120117,
"learning_rate": 0.00012944723618090454,
"loss": 0.2651,
"step": 356
},
{
"epoch": 0.15,
"grad_norm": 0.38461214303970337,
"learning_rate": 0.0001292462311557789,
"loss": 0.1386,
"step": 357
},
{
"epoch": 0.15,
"grad_norm": 0.31263014674186707,
"learning_rate": 0.00012904522613065326,
"loss": 0.1746,
"step": 358
},
{
"epoch": 0.15,
"grad_norm": 0.38606059551239014,
"learning_rate": 0.00012884422110552766,
"loss": 0.1502,
"step": 359
},
{
"epoch": 0.15,
"grad_norm": 0.37648263573646545,
"learning_rate": 0.000128643216080402,
"loss": 0.1662,
"step": 360
},
{
"epoch": 0.15,
"grad_norm": 0.550672173500061,
"learning_rate": 0.00012844221105527638,
"loss": 0.2232,
"step": 361
},
{
"epoch": 0.15,
"grad_norm": 0.42321744561195374,
"learning_rate": 0.00012824120603015075,
"loss": 0.1622,
"step": 362
},
{
"epoch": 0.15,
"grad_norm": 0.7612898945808411,
"learning_rate": 0.00012804020100502515,
"loss": 0.317,
"step": 363
},
{
"epoch": 0.15,
"grad_norm": 0.3842167556285858,
"learning_rate": 0.0001278391959798995,
"loss": 0.1435,
"step": 364
},
{
"epoch": 0.15,
"grad_norm": 0.40250709652900696,
"learning_rate": 0.00012763819095477387,
"loss": 0.2155,
"step": 365
},
{
"epoch": 0.15,
"grad_norm": 0.5009702444076538,
"learning_rate": 0.00012743718592964824,
"loss": 0.2199,
"step": 366
},
{
"epoch": 0.16,
"grad_norm": 0.4045158922672272,
"learning_rate": 0.00012723618090452262,
"loss": 0.1815,
"step": 367
},
{
"epoch": 0.16,
"grad_norm": 0.6103566884994507,
"learning_rate": 0.000127035175879397,
"loss": 0.2449,
"step": 368
},
{
"epoch": 0.16,
"grad_norm": 0.42164695262908936,
"learning_rate": 0.00012683417085427136,
"loss": 0.2092,
"step": 369
},
{
"epoch": 0.16,
"grad_norm": 0.30808696150779724,
"learning_rate": 0.00012663316582914574,
"loss": 0.1614,
"step": 370
},
{
"epoch": 0.16,
"grad_norm": 0.4597577452659607,
"learning_rate": 0.0001264321608040201,
"loss": 0.1898,
"step": 371
},
{
"epoch": 0.16,
"grad_norm": 0.6087939143180847,
"learning_rate": 0.00012623115577889448,
"loss": 0.2863,
"step": 372
},
{
"epoch": 0.16,
"grad_norm": 0.42504316568374634,
"learning_rate": 0.00012603015075376885,
"loss": 0.1619,
"step": 373
},
{
"epoch": 0.16,
"grad_norm": 0.45179635286331177,
"learning_rate": 0.00012582914572864323,
"loss": 0.2398,
"step": 374
},
{
"epoch": 0.16,
"grad_norm": 0.3470228612422943,
"learning_rate": 0.0001256281407035176,
"loss": 0.1546,
"step": 375
},
{
"epoch": 0.16,
"grad_norm": 0.4229773283004761,
"learning_rate": 0.00012542713567839197,
"loss": 0.178,
"step": 376
},
{
"epoch": 0.16,
"grad_norm": 0.3456679880619049,
"learning_rate": 0.00012522613065326635,
"loss": 0.1597,
"step": 377
},
{
"epoch": 0.16,
"grad_norm": 0.4441853165626526,
"learning_rate": 0.0001250251256281407,
"loss": 0.2628,
"step": 378
},
{
"epoch": 0.16,
"grad_norm": 0.640588104724884,
"learning_rate": 0.00012482412060301507,
"loss": 0.3056,
"step": 379
},
{
"epoch": 0.16,
"grad_norm": 0.4708755612373352,
"learning_rate": 0.00012462311557788947,
"loss": 0.1812,
"step": 380
},
{
"epoch": 0.16,
"grad_norm": 0.5513359308242798,
"learning_rate": 0.00012442211055276384,
"loss": 0.268,
"step": 381
},
{
"epoch": 0.16,
"grad_norm": 0.4472403824329376,
"learning_rate": 0.00012422110552763818,
"loss": 0.2437,
"step": 382
},
{
"epoch": 0.16,
"grad_norm": 0.3861692249774933,
"learning_rate": 0.00012402010050251256,
"loss": 0.2121,
"step": 383
},
{
"epoch": 0.16,
"grad_norm": 0.39574527740478516,
"learning_rate": 0.00012381909547738696,
"loss": 0.1584,
"step": 384
},
{
"epoch": 0.16,
"grad_norm": 0.3973689377307892,
"learning_rate": 0.0001236180904522613,
"loss": 0.1864,
"step": 385
},
{
"epoch": 0.16,
"grad_norm": 0.3852390944957733,
"learning_rate": 0.00012341708542713568,
"loss": 0.1936,
"step": 386
},
{
"epoch": 0.16,
"grad_norm": 0.7175599336624146,
"learning_rate": 0.00012321608040201005,
"loss": 0.3122,
"step": 387
},
{
"epoch": 0.16,
"grad_norm": 0.38100698590278625,
"learning_rate": 0.00012301507537688445,
"loss": 0.1986,
"step": 388
},
{
"epoch": 0.16,
"grad_norm": 0.5062547326087952,
"learning_rate": 0.0001228140703517588,
"loss": 0.2042,
"step": 389
},
{
"epoch": 0.17,
"grad_norm": 0.596307098865509,
"learning_rate": 0.00012261306532663317,
"loss": 0.2033,
"step": 390
},
{
"epoch": 0.17,
"grad_norm": 0.5087392926216125,
"learning_rate": 0.00012241206030150754,
"loss": 0.2427,
"step": 391
},
{
"epoch": 0.17,
"grad_norm": 0.39303168654441833,
"learning_rate": 0.00012221105527638191,
"loss": 0.2413,
"step": 392
},
{
"epoch": 0.17,
"grad_norm": 0.5655479431152344,
"learning_rate": 0.00012201005025125629,
"loss": 0.2434,
"step": 393
},
{
"epoch": 0.17,
"grad_norm": 0.37353000044822693,
"learning_rate": 0.00012180904522613066,
"loss": 0.1971,
"step": 394
},
{
"epoch": 0.17,
"grad_norm": 0.4436759948730469,
"learning_rate": 0.00012160804020100502,
"loss": 0.2238,
"step": 395
},
{
"epoch": 0.17,
"grad_norm": 0.3882415294647217,
"learning_rate": 0.00012140703517587942,
"loss": 0.1489,
"step": 396
},
{
"epoch": 0.17,
"grad_norm": 0.5103885531425476,
"learning_rate": 0.00012120603015075378,
"loss": 0.2176,
"step": 397
},
{
"epoch": 0.17,
"grad_norm": 0.30824366211891174,
"learning_rate": 0.00012100502512562815,
"loss": 0.1488,
"step": 398
},
{
"epoch": 0.17,
"grad_norm": 0.6127710342407227,
"learning_rate": 0.00012080402010050251,
"loss": 0.3003,
"step": 399
},
{
"epoch": 0.17,
"grad_norm": 0.7671993374824524,
"learning_rate": 0.00012060301507537688,
"loss": 0.3096,
"step": 400
},
{
"epoch": 0.17,
"grad_norm": 0.5708138942718506,
"learning_rate": 0.00012040201005025127,
"loss": 0.2392,
"step": 401
},
{
"epoch": 0.17,
"grad_norm": 0.42668575048446655,
"learning_rate": 0.00012020100502512563,
"loss": 0.1882,
"step": 402
},
{
"epoch": 0.17,
"grad_norm": 0.5934561491012573,
"learning_rate": 0.00012,
"loss": 0.2632,
"step": 403
},
{
"epoch": 0.17,
"grad_norm": 0.43244239687919617,
"learning_rate": 0.00011979899497487436,
"loss": 0.2458,
"step": 404
},
{
"epoch": 0.17,
"grad_norm": 0.47703075408935547,
"learning_rate": 0.00011959798994974876,
"loss": 0.2011,
"step": 405
},
{
"epoch": 0.17,
"grad_norm": 0.722775399684906,
"learning_rate": 0.00011939698492462312,
"loss": 0.2793,
"step": 406
},
{
"epoch": 0.17,
"grad_norm": 0.7606828212738037,
"learning_rate": 0.0001191959798994975,
"loss": 0.2416,
"step": 407
},
{
"epoch": 0.17,
"grad_norm": 0.4368147552013397,
"learning_rate": 0.00011899497487437185,
"loss": 0.2078,
"step": 408
},
{
"epoch": 0.17,
"grad_norm": 0.5169294476509094,
"learning_rate": 0.00011879396984924624,
"loss": 0.2636,
"step": 409
},
{
"epoch": 0.17,
"grad_norm": 0.6035071611404419,
"learning_rate": 0.00011859296482412061,
"loss": 0.2767,
"step": 410
},
{
"epoch": 0.17,
"grad_norm": 0.5761665105819702,
"learning_rate": 0.00011839195979899497,
"loss": 0.2485,
"step": 411
},
{
"epoch": 0.17,
"grad_norm": 0.37852367758750916,
"learning_rate": 0.00011819095477386935,
"loss": 0.182,
"step": 412
},
{
"epoch": 0.17,
"grad_norm": 0.5448431968688965,
"learning_rate": 0.00011798994974874373,
"loss": 0.2761,
"step": 413
},
{
"epoch": 0.18,
"grad_norm": 0.46888917684555054,
"learning_rate": 0.0001177889447236181,
"loss": 0.193,
"step": 414
},
{
"epoch": 0.18,
"grad_norm": 0.42235323786735535,
"learning_rate": 0.00011758793969849247,
"loss": 0.1648,
"step": 415
},
{
"epoch": 0.18,
"grad_norm": 0.6363163590431213,
"learning_rate": 0.00011738693467336684,
"loss": 0.2847,
"step": 416
},
{
"epoch": 0.18,
"grad_norm": 0.49301624298095703,
"learning_rate": 0.00011718592964824122,
"loss": 0.2491,
"step": 417
},
{
"epoch": 0.18,
"grad_norm": 0.38518691062927246,
"learning_rate": 0.00011698492462311558,
"loss": 0.1776,
"step": 418
},
{
"epoch": 0.18,
"grad_norm": 0.7396824955940247,
"learning_rate": 0.00011678391959798996,
"loss": 0.1623,
"step": 419
},
{
"epoch": 0.18,
"grad_norm": 0.43891581892967224,
"learning_rate": 0.00011658291457286432,
"loss": 0.1766,
"step": 420
},
{
"epoch": 0.18,
"grad_norm": 0.28952324390411377,
"learning_rate": 0.00011638190954773872,
"loss": 0.1274,
"step": 421
},
{
"epoch": 0.18,
"grad_norm": 0.3468928039073944,
"learning_rate": 0.00011618090452261308,
"loss": 0.1249,
"step": 422
},
{
"epoch": 0.18,
"grad_norm": 0.49591997265815735,
"learning_rate": 0.00011597989949748745,
"loss": 0.1978,
"step": 423
},
{
"epoch": 0.18,
"grad_norm": 0.4968617260456085,
"learning_rate": 0.00011577889447236181,
"loss": 0.2065,
"step": 424
},
{
"epoch": 0.18,
"grad_norm": 0.47323212027549744,
"learning_rate": 0.00011557788944723618,
"loss": 0.2298,
"step": 425
},
{
"epoch": 0.18,
"grad_norm": 0.6332685947418213,
"learning_rate": 0.00011537688442211057,
"loss": 0.2474,
"step": 426
},
{
"epoch": 0.18,
"grad_norm": 0.5754814743995667,
"learning_rate": 0.00011517587939698493,
"loss": 0.2771,
"step": 427
},
{
"epoch": 0.18,
"grad_norm": 0.5415279269218445,
"learning_rate": 0.0001149748743718593,
"loss": 0.2068,
"step": 428
},
{
"epoch": 0.18,
"grad_norm": 0.5374659895896912,
"learning_rate": 0.00011477386934673366,
"loss": 0.2339,
"step": 429
},
{
"epoch": 0.18,
"grad_norm": 0.5732151865959167,
"learning_rate": 0.00011457286432160806,
"loss": 0.1922,
"step": 430
},
{
"epoch": 0.18,
"grad_norm": 0.5028908252716064,
"learning_rate": 0.00011437185929648242,
"loss": 0.1981,
"step": 431
},
{
"epoch": 0.18,
"grad_norm": 0.6984832286834717,
"learning_rate": 0.00011417085427135679,
"loss": 0.2972,
"step": 432
},
{
"epoch": 0.18,
"grad_norm": 0.4545430839061737,
"learning_rate": 0.00011396984924623115,
"loss": 0.172,
"step": 433
},
{
"epoch": 0.18,
"grad_norm": 0.5130873918533325,
"learning_rate": 0.00011376884422110554,
"loss": 0.1968,
"step": 434
},
{
"epoch": 0.18,
"grad_norm": 0.6122608184814453,
"learning_rate": 0.00011356783919597991,
"loss": 0.2781,
"step": 435
},
{
"epoch": 0.18,
"grad_norm": 0.8052571415901184,
"learning_rate": 0.00011336683417085427,
"loss": 0.2175,
"step": 436
},
{
"epoch": 0.19,
"grad_norm": 0.5444523096084595,
"learning_rate": 0.00011316582914572864,
"loss": 0.2295,
"step": 437
},
{
"epoch": 0.19,
"grad_norm": 0.4526660442352295,
"learning_rate": 0.00011296482412060303,
"loss": 0.1815,
"step": 438
},
{
"epoch": 0.19,
"grad_norm": 0.3232409656047821,
"learning_rate": 0.0001127638190954774,
"loss": 0.1794,
"step": 439
},
{
"epoch": 0.19,
"grad_norm": 0.6111499071121216,
"learning_rate": 0.00011256281407035176,
"loss": 0.2706,
"step": 440
},
{
"epoch": 0.19,
"grad_norm": 0.45419833064079285,
"learning_rate": 0.00011236180904522614,
"loss": 0.2081,
"step": 441
},
{
"epoch": 0.19,
"grad_norm": 0.4061923623085022,
"learning_rate": 0.00011216080402010052,
"loss": 0.1677,
"step": 442
},
{
"epoch": 0.19,
"grad_norm": 0.3937043845653534,
"learning_rate": 0.00011195979899497488,
"loss": 0.1704,
"step": 443
},
{
"epoch": 0.19,
"grad_norm": 0.38572314381599426,
"learning_rate": 0.00011175879396984925,
"loss": 0.18,
"step": 444
},
{
"epoch": 0.19,
"grad_norm": 0.6058553457260132,
"learning_rate": 0.00011155778894472361,
"loss": 0.2878,
"step": 445
},
{
"epoch": 0.19,
"grad_norm": 0.43011635541915894,
"learning_rate": 0.00011135678391959799,
"loss": 0.1797,
"step": 446
},
{
"epoch": 0.19,
"grad_norm": 0.44222158193588257,
"learning_rate": 0.00011115577889447237,
"loss": 0.1713,
"step": 447
},
{
"epoch": 0.19,
"grad_norm": 0.38344600796699524,
"learning_rate": 0.00011095477386934675,
"loss": 0.1663,
"step": 448
},
{
"epoch": 0.19,
"grad_norm": 0.6274626851081848,
"learning_rate": 0.0001107537688442211,
"loss": 0.2461,
"step": 449
},
{
"epoch": 0.19,
"grad_norm": 0.5568159222602844,
"learning_rate": 0.00011055276381909548,
"loss": 0.2774,
"step": 450
},
{
"epoch": 0.19,
"grad_norm": 0.3897724747657776,
"learning_rate": 0.00011035175879396986,
"loss": 0.1874,
"step": 451
},
{
"epoch": 0.19,
"grad_norm": 0.42583590745925903,
"learning_rate": 0.00011015075376884422,
"loss": 0.1591,
"step": 452
},
{
"epoch": 0.19,
"grad_norm": 0.3379085659980774,
"learning_rate": 0.0001099497487437186,
"loss": 0.1679,
"step": 453
},
{
"epoch": 0.19,
"grad_norm": 0.38965246081352234,
"learning_rate": 0.00010974874371859296,
"loss": 0.1503,
"step": 454
},
{
"epoch": 0.19,
"grad_norm": 0.5767619609832764,
"learning_rate": 0.00010954773869346736,
"loss": 0.2193,
"step": 455
},
{
"epoch": 0.19,
"grad_norm": 0.44832924008369446,
"learning_rate": 0.00010934673366834172,
"loss": 0.1931,
"step": 456
},
{
"epoch": 0.19,
"grad_norm": 0.43048256635665894,
"learning_rate": 0.00010914572864321609,
"loss": 0.1937,
"step": 457
},
{
"epoch": 0.19,
"grad_norm": 0.35769662261009216,
"learning_rate": 0.00010894472361809045,
"loss": 0.164,
"step": 458
},
{
"epoch": 0.19,
"grad_norm": 0.3910098373889923,
"learning_rate": 0.00010874371859296483,
"loss": 0.1505,
"step": 459
},
{
"epoch": 0.19,
"grad_norm": 0.5478328466415405,
"learning_rate": 0.00010854271356783921,
"loss": 0.2857,
"step": 460
},
{
"epoch": 0.2,
"grad_norm": 0.4424973130226135,
"learning_rate": 0.00010834170854271357,
"loss": 0.2499,
"step": 461
},
{
"epoch": 0.2,
"grad_norm": 0.6312743425369263,
"learning_rate": 0.00010814070351758794,
"loss": 0.2633,
"step": 462
},
{
"epoch": 0.2,
"grad_norm": 0.4388771653175354,
"learning_rate": 0.00010793969849246233,
"loss": 0.1829,
"step": 463
},
{
"epoch": 0.2,
"grad_norm": 0.391568660736084,
"learning_rate": 0.0001077386934673367,
"loss": 0.1809,
"step": 464
},
{
"epoch": 0.2,
"grad_norm": 0.696977972984314,
"learning_rate": 0.00010753768844221106,
"loss": 0.2329,
"step": 465
},
{
"epoch": 0.2,
"grad_norm": 0.5525539517402649,
"learning_rate": 0.00010733668341708543,
"loss": 0.2176,
"step": 466
},
{
"epoch": 0.2,
"grad_norm": 0.42988142371177673,
"learning_rate": 0.00010713567839195982,
"loss": 0.2135,
"step": 467
},
{
"epoch": 0.2,
"grad_norm": 0.43678900599479675,
"learning_rate": 0.00010693467336683418,
"loss": 0.1607,
"step": 468
},
{
"epoch": 0.2,
"grad_norm": 0.40529149770736694,
"learning_rate": 0.00010673366834170855,
"loss": 0.1831,
"step": 469
},
{
"epoch": 0.2,
"grad_norm": 0.47344741225242615,
"learning_rate": 0.00010653266331658291,
"loss": 0.1809,
"step": 470
},
{
"epoch": 0.2,
"grad_norm": 0.4426578879356384,
"learning_rate": 0.00010633165829145728,
"loss": 0.213,
"step": 471
},
{
"epoch": 0.2,
"grad_norm": 0.3296988904476166,
"learning_rate": 0.00010613065326633167,
"loss": 0.151,
"step": 472
},
{
"epoch": 0.2,
"grad_norm": 0.6979312300682068,
"learning_rate": 0.00010592964824120604,
"loss": 0.2797,
"step": 473
},
{
"epoch": 0.2,
"grad_norm": 0.3550786077976227,
"learning_rate": 0.0001057286432160804,
"loss": 0.2019,
"step": 474
},
{
"epoch": 0.2,
"grad_norm": 0.5628048181533813,
"learning_rate": 0.00010552763819095478,
"loss": 0.252,
"step": 475
},
{
"epoch": 0.2,
"grad_norm": 0.3417305052280426,
"learning_rate": 0.00010532663316582916,
"loss": 0.1568,
"step": 476
},
{
"epoch": 0.2,
"grad_norm": 0.5894454121589661,
"learning_rate": 0.00010512562814070352,
"loss": 0.2911,
"step": 477
},
{
"epoch": 0.2,
"grad_norm": 0.7387587428092957,
"learning_rate": 0.0001049246231155779,
"loss": 0.316,
"step": 478
},
{
"epoch": 0.2,
"grad_norm": 0.585311770439148,
"learning_rate": 0.00010472361809045225,
"loss": 0.2556,
"step": 479
},
{
"epoch": 0.2,
"grad_norm": 0.43730881810188293,
"learning_rate": 0.00010452261306532664,
"loss": 0.2182,
"step": 480
},
{
"epoch": 0.2,
"grad_norm": 0.6188883185386658,
"learning_rate": 0.00010432160804020101,
"loss": 0.3168,
"step": 481
},
{
"epoch": 0.2,
"grad_norm": 0.4580572545528412,
"learning_rate": 0.00010412060301507539,
"loss": 0.1873,
"step": 482
},
{
"epoch": 0.2,
"grad_norm": 0.46275004744529724,
"learning_rate": 0.00010391959798994975,
"loss": 0.2342,
"step": 483
},
{
"epoch": 0.2,
"grad_norm": 0.6386345624923706,
"learning_rate": 0.00010371859296482413,
"loss": 0.1601,
"step": 484
},
{
"epoch": 0.21,
"grad_norm": 0.4647381901741028,
"learning_rate": 0.0001035175879396985,
"loss": 0.2553,
"step": 485
},
{
"epoch": 0.21,
"grad_norm": 0.392676442861557,
"learning_rate": 0.00010331658291457286,
"loss": 0.1727,
"step": 486
},
{
"epoch": 0.21,
"grad_norm": 0.6019845008850098,
"learning_rate": 0.00010311557788944724,
"loss": 0.3154,
"step": 487
},
{
"epoch": 0.21,
"grad_norm": 0.4485069215297699,
"learning_rate": 0.00010291457286432162,
"loss": 0.197,
"step": 488
},
{
"epoch": 0.21,
"grad_norm": 0.5247390866279602,
"learning_rate": 0.00010271356783919598,
"loss": 0.2212,
"step": 489
},
{
"epoch": 0.21,
"grad_norm": 0.5502858757972717,
"learning_rate": 0.00010251256281407036,
"loss": 0.2542,
"step": 490
},
{
"epoch": 0.21,
"grad_norm": 0.5899858474731445,
"learning_rate": 0.00010231155778894473,
"loss": 0.2944,
"step": 491
},
{
"epoch": 0.21,
"grad_norm": 0.3429397642612457,
"learning_rate": 0.00010211055276381909,
"loss": 0.1621,
"step": 492
},
{
"epoch": 0.21,
"grad_norm": 0.5685318112373352,
"learning_rate": 0.00010190954773869348,
"loss": 0.274,
"step": 493
},
{
"epoch": 0.21,
"grad_norm": 0.4867880046367645,
"learning_rate": 0.00010170854271356785,
"loss": 0.2044,
"step": 494
},
{
"epoch": 0.21,
"grad_norm": 0.5068891644477844,
"learning_rate": 0.00010150753768844221,
"loss": 0.2473,
"step": 495
},
{
"epoch": 0.21,
"grad_norm": 0.5069141983985901,
"learning_rate": 0.00010130653266331658,
"loss": 0.2217,
"step": 496
},
{
"epoch": 0.21,
"grad_norm": 0.5071575045585632,
"learning_rate": 0.00010110552763819097,
"loss": 0.255,
"step": 497
},
{
"epoch": 0.21,
"grad_norm": 0.37730205059051514,
"learning_rate": 0.00010090452261306533,
"loss": 0.1576,
"step": 498
},
{
"epoch": 0.21,
"grad_norm": 0.3155452609062195,
"learning_rate": 0.0001007035175879397,
"loss": 0.1367,
"step": 499
},
{
"epoch": 0.21,
"grad_norm": 0.43023696541786194,
"learning_rate": 0.00010050251256281407,
"loss": 0.2032,
"step": 500
},
{
"epoch": 0.21,
"grad_norm": 0.3815259635448456,
"learning_rate": 0.00010030150753768846,
"loss": 0.1504,
"step": 501
},
{
"epoch": 0.21,
"grad_norm": 0.4405147135257721,
"learning_rate": 0.00010010050251256282,
"loss": 0.1929,
"step": 502
},
{
"epoch": 0.21,
"grad_norm": 0.5072082877159119,
"learning_rate": 9.989949748743719e-05,
"loss": 0.2101,
"step": 503
},
{
"epoch": 0.21,
"grad_norm": 0.42251914739608765,
"learning_rate": 9.969849246231156e-05,
"loss": 0.2453,
"step": 504
},
{
"epoch": 0.21,
"grad_norm": 0.5401110053062439,
"learning_rate": 9.949748743718594e-05,
"loss": 0.2435,
"step": 505
},
{
"epoch": 0.21,
"grad_norm": 0.6041588187217712,
"learning_rate": 9.929648241206031e-05,
"loss": 0.262,
"step": 506
},
{
"epoch": 0.21,
"grad_norm": 0.518314778804779,
"learning_rate": 9.909547738693468e-05,
"loss": 0.2278,
"step": 507
},
{
"epoch": 0.22,
"grad_norm": 0.4269994795322418,
"learning_rate": 9.889447236180906e-05,
"loss": 0.1982,
"step": 508
},
{
"epoch": 0.22,
"grad_norm": 0.41217511892318726,
"learning_rate": 9.869346733668342e-05,
"loss": 0.2239,
"step": 509
},
{
"epoch": 0.22,
"grad_norm": 0.3829472064971924,
"learning_rate": 9.84924623115578e-05,
"loss": 0.1467,
"step": 510
},
{
"epoch": 0.22,
"grad_norm": 0.44718316197395325,
"learning_rate": 9.829145728643216e-05,
"loss": 0.2209,
"step": 511
},
{
"epoch": 0.22,
"grad_norm": 0.3699786961078644,
"learning_rate": 9.809045226130655e-05,
"loss": 0.1533,
"step": 512
},
{
"epoch": 0.22,
"grad_norm": 0.42279669642448425,
"learning_rate": 9.788944723618091e-05,
"loss": 0.1862,
"step": 513
},
{
"epoch": 0.22,
"grad_norm": 1.2697815895080566,
"learning_rate": 9.768844221105528e-05,
"loss": 0.2211,
"step": 514
},
{
"epoch": 0.22,
"grad_norm": 0.4647808074951172,
"learning_rate": 9.748743718592965e-05,
"loss": 0.2384,
"step": 515
},
{
"epoch": 0.22,
"grad_norm": 0.45755669474601746,
"learning_rate": 9.728643216080403e-05,
"loss": 0.2094,
"step": 516
},
{
"epoch": 0.22,
"grad_norm": 0.6136307716369629,
"learning_rate": 9.70854271356784e-05,
"loss": 0.2892,
"step": 517
},
{
"epoch": 0.22,
"grad_norm": 0.4117870628833771,
"learning_rate": 9.688442211055276e-05,
"loss": 0.1885,
"step": 518
},
{
"epoch": 0.22,
"grad_norm": 0.43240073323249817,
"learning_rate": 9.668341708542715e-05,
"loss": 0.2072,
"step": 519
},
{
"epoch": 0.22,
"grad_norm": 0.4392816722393036,
"learning_rate": 9.64824120603015e-05,
"loss": 0.188,
"step": 520
},
{
"epoch": 0.22,
"grad_norm": 0.31601712107658386,
"learning_rate": 9.628140703517589e-05,
"loss": 0.1547,
"step": 521
},
{
"epoch": 0.22,
"grad_norm": 0.38773342967033386,
"learning_rate": 9.608040201005025e-05,
"loss": 0.1645,
"step": 522
},
{
"epoch": 0.22,
"grad_norm": 0.4962211847305298,
"learning_rate": 9.587939698492462e-05,
"loss": 0.212,
"step": 523
},
{
"epoch": 0.22,
"grad_norm": 0.49664270877838135,
"learning_rate": 9.5678391959799e-05,
"loss": 0.2379,
"step": 524
},
{
"epoch": 0.22,
"grad_norm": 0.5445250272750854,
"learning_rate": 9.547738693467337e-05,
"loss": 0.2364,
"step": 525
},
{
"epoch": 0.22,
"grad_norm": 0.5319061279296875,
"learning_rate": 9.527638190954774e-05,
"loss": 0.2105,
"step": 526
},
{
"epoch": 0.22,
"grad_norm": 0.4577290713787079,
"learning_rate": 9.507537688442212e-05,
"loss": 0.1805,
"step": 527
},
{
"epoch": 0.22,
"grad_norm": 0.5664304494857788,
"learning_rate": 9.487437185929649e-05,
"loss": 0.1455,
"step": 528
},
{
"epoch": 0.22,
"grad_norm": 0.4801952838897705,
"learning_rate": 9.467336683417086e-05,
"loss": 0.259,
"step": 529
},
{
"epoch": 0.22,
"grad_norm": 0.735759973526001,
"learning_rate": 9.447236180904523e-05,
"loss": 0.3454,
"step": 530
},
{
"epoch": 0.22,
"grad_norm": 0.4495560824871063,
"learning_rate": 9.427135678391961e-05,
"loss": 0.2254,
"step": 531
},
{
"epoch": 0.23,
"grad_norm": 0.41473764181137085,
"learning_rate": 9.407035175879397e-05,
"loss": 0.1851,
"step": 532
},
{
"epoch": 0.23,
"grad_norm": 0.3230622112751007,
"learning_rate": 9.386934673366835e-05,
"loss": 0.1518,
"step": 533
},
{
"epoch": 0.23,
"grad_norm": 0.6365942358970642,
"learning_rate": 9.366834170854271e-05,
"loss": 0.2762,
"step": 534
},
{
"epoch": 0.23,
"grad_norm": 0.4550526738166809,
"learning_rate": 9.34673366834171e-05,
"loss": 0.1956,
"step": 535
},
{
"epoch": 0.23,
"grad_norm": 0.5498403310775757,
"learning_rate": 9.326633165829146e-05,
"loss": 0.2066,
"step": 536
},
{
"epoch": 0.23,
"grad_norm": 0.5257768034934998,
"learning_rate": 9.306532663316585e-05,
"loss": 0.2014,
"step": 537
},
{
"epoch": 0.23,
"grad_norm": 0.49358436465263367,
"learning_rate": 9.28643216080402e-05,
"loss": 0.221,
"step": 538
},
{
"epoch": 0.23,
"grad_norm": 0.33671697974205017,
"learning_rate": 9.266331658291458e-05,
"loss": 0.1663,
"step": 539
},
{
"epoch": 0.23,
"grad_norm": 0.5135949850082397,
"learning_rate": 9.246231155778895e-05,
"loss": 0.2232,
"step": 540
},
{
"epoch": 0.23,
"grad_norm": 0.38979530334472656,
"learning_rate": 9.226130653266331e-05,
"loss": 0.1623,
"step": 541
},
{
"epoch": 0.23,
"grad_norm": 0.29901430010795593,
"learning_rate": 9.20603015075377e-05,
"loss": 0.1219,
"step": 542
},
{
"epoch": 0.23,
"grad_norm": 0.5453833341598511,
"learning_rate": 9.185929648241206e-05,
"loss": 0.2261,
"step": 543
},
{
"epoch": 0.23,
"grad_norm": 0.5688655972480774,
"learning_rate": 9.165829145728644e-05,
"loss": 0.2187,
"step": 544
},
{
"epoch": 0.23,
"grad_norm": 0.5330553650856018,
"learning_rate": 9.14572864321608e-05,
"loss": 0.2126,
"step": 545
},
{
"epoch": 0.23,
"grad_norm": 0.4559541940689087,
"learning_rate": 9.125628140703519e-05,
"loss": 0.2291,
"step": 546
},
{
"epoch": 0.23,
"grad_norm": 0.3465883135795593,
"learning_rate": 9.105527638190955e-05,
"loss": 0.198,
"step": 547
},
{
"epoch": 0.23,
"grad_norm": 0.33882659673690796,
"learning_rate": 9.085427135678392e-05,
"loss": 0.168,
"step": 548
},
{
"epoch": 0.23,
"grad_norm": 0.42802536487579346,
"learning_rate": 9.06532663316583e-05,
"loss": 0.2351,
"step": 549
},
{
"epoch": 0.23,
"grad_norm": 0.5641934275627136,
"learning_rate": 9.045226130653267e-05,
"loss": 0.2587,
"step": 550
},
{
"epoch": 0.23,
"grad_norm": 0.5979325175285339,
"learning_rate": 9.025125628140704e-05,
"loss": 0.2807,
"step": 551
},
{
"epoch": 0.23,
"grad_norm": 0.43575671315193176,
"learning_rate": 9.005025125628141e-05,
"loss": 0.1673,
"step": 552
},
{
"epoch": 0.23,
"grad_norm": 0.49646639823913574,
"learning_rate": 8.984924623115579e-05,
"loss": 0.2079,
"step": 553
},
{
"epoch": 0.23,
"grad_norm": 0.6471161246299744,
"learning_rate": 8.964824120603016e-05,
"loss": 0.2519,
"step": 554
},
{
"epoch": 0.24,
"grad_norm": 0.6349927186965942,
"learning_rate": 8.944723618090453e-05,
"loss": 0.2993,
"step": 555
},
{
"epoch": 0.24,
"grad_norm": 0.46648383140563965,
"learning_rate": 8.92462311557789e-05,
"loss": 0.2477,
"step": 556
},
{
"epoch": 0.24,
"grad_norm": 0.38790756464004517,
"learning_rate": 8.904522613065326e-05,
"loss": 0.1964,
"step": 557
},
{
"epoch": 0.24,
"grad_norm": 0.38935327529907227,
"learning_rate": 8.884422110552765e-05,
"loss": 0.1942,
"step": 558
},
{
"epoch": 0.24,
"grad_norm": 0.45376303791999817,
"learning_rate": 8.864321608040201e-05,
"loss": 0.2201,
"step": 559
},
{
"epoch": 0.24,
"grad_norm": 0.301658570766449,
"learning_rate": 8.84422110552764e-05,
"loss": 0.1673,
"step": 560
},
{
"epoch": 0.24,
"grad_norm": 0.45029112696647644,
"learning_rate": 8.824120603015076e-05,
"loss": 0.247,
"step": 561
},
{
"epoch": 0.24,
"grad_norm": 0.31792956590652466,
"learning_rate": 8.804020100502513e-05,
"loss": 0.1804,
"step": 562
},
{
"epoch": 0.24,
"grad_norm": 0.5120775103569031,
"learning_rate": 8.78391959798995e-05,
"loss": 0.2089,
"step": 563
},
{
"epoch": 0.24,
"grad_norm": 0.36469754576683044,
"learning_rate": 8.763819095477387e-05,
"loss": 0.1186,
"step": 564
},
{
"epoch": 0.24,
"grad_norm": 0.5263894200325012,
"learning_rate": 8.743718592964825e-05,
"loss": 0.2857,
"step": 565
},
{
"epoch": 0.24,
"grad_norm": 0.5383757948875427,
"learning_rate": 8.723618090452261e-05,
"loss": 0.2347,
"step": 566
},
{
"epoch": 0.24,
"grad_norm": 0.39517709612846375,
"learning_rate": 8.7035175879397e-05,
"loss": 0.1794,
"step": 567
},
{
"epoch": 0.24,
"grad_norm": 0.44381988048553467,
"learning_rate": 8.683417085427135e-05,
"loss": 0.2065,
"step": 568
},
{
"epoch": 0.24,
"grad_norm": 0.3555888235569,
"learning_rate": 8.663316582914574e-05,
"loss": 0.1825,
"step": 569
},
{
"epoch": 0.24,
"grad_norm": 0.565974235534668,
"learning_rate": 8.64321608040201e-05,
"loss": 0.2543,
"step": 570
},
{
"epoch": 0.24,
"grad_norm": 0.37384283542633057,
"learning_rate": 8.623115577889449e-05,
"loss": 0.1703,
"step": 571
},
{
"epoch": 0.24,
"grad_norm": 0.4946988821029663,
"learning_rate": 8.603015075376884e-05,
"loss": 0.241,
"step": 572
},
{
"epoch": 0.24,
"grad_norm": 0.3822475075721741,
"learning_rate": 8.582914572864322e-05,
"loss": 0.1982,
"step": 573
},
{
"epoch": 0.24,
"grad_norm": 0.5194388628005981,
"learning_rate": 8.562814070351759e-05,
"loss": 0.2266,
"step": 574
},
{
"epoch": 0.24,
"grad_norm": 0.3615367114543915,
"learning_rate": 8.542713567839196e-05,
"loss": 0.1643,
"step": 575
},
{
"epoch": 0.24,
"grad_norm": 0.47169193625450134,
"learning_rate": 8.522613065326634e-05,
"loss": 0.2381,
"step": 576
},
{
"epoch": 0.24,
"grad_norm": 0.38423123955726624,
"learning_rate": 8.502512562814071e-05,
"loss": 0.2342,
"step": 577
},
{
"epoch": 0.24,
"grad_norm": 0.4775454103946686,
"learning_rate": 8.482412060301508e-05,
"loss": 0.2582,
"step": 578
},
{
"epoch": 0.25,
"grad_norm": 0.33534660935401917,
"learning_rate": 8.462311557788946e-05,
"loss": 0.1475,
"step": 579
},
{
"epoch": 0.25,
"grad_norm": 0.5885127186775208,
"learning_rate": 8.442211055276383e-05,
"loss": 0.2584,
"step": 580
},
{
"epoch": 0.25,
"grad_norm": 0.5246654748916626,
"learning_rate": 8.42211055276382e-05,
"loss": 0.2736,
"step": 581
},
{
"epoch": 0.25,
"grad_norm": 0.5678746104240417,
"learning_rate": 8.402010050251256e-05,
"loss": 0.1765,
"step": 582
},
{
"epoch": 0.25,
"grad_norm": 0.485567182302475,
"learning_rate": 8.381909547738695e-05,
"loss": 0.1889,
"step": 583
},
{
"epoch": 0.25,
"grad_norm": 0.49088627099990845,
"learning_rate": 8.36180904522613e-05,
"loss": 0.2282,
"step": 584
},
{
"epoch": 0.25,
"grad_norm": 0.43999719619750977,
"learning_rate": 8.341708542713568e-05,
"loss": 0.1682,
"step": 585
},
{
"epoch": 0.25,
"grad_norm": 0.4806053340435028,
"learning_rate": 8.321608040201005e-05,
"loss": 0.2292,
"step": 586
},
{
"epoch": 0.25,
"grad_norm": 0.3324624300003052,
"learning_rate": 8.301507537688443e-05,
"loss": 0.1654,
"step": 587
},
{
"epoch": 0.25,
"grad_norm": 0.4315497577190399,
"learning_rate": 8.28140703517588e-05,
"loss": 0.2407,
"step": 588
},
{
"epoch": 0.25,
"grad_norm": 0.30554506182670593,
"learning_rate": 8.261306532663317e-05,
"loss": 0.1315,
"step": 589
},
{
"epoch": 0.25,
"grad_norm": 0.45997923612594604,
"learning_rate": 8.241206030150754e-05,
"loss": 0.1902,
"step": 590
},
{
"epoch": 0.25,
"grad_norm": 0.4228266179561615,
"learning_rate": 8.22110552763819e-05,
"loss": 0.1868,
"step": 591
},
{
"epoch": 0.25,
"grad_norm": 0.31143641471862793,
"learning_rate": 8.201005025125629e-05,
"loss": 0.158,
"step": 592
},
{
"epoch": 0.25,
"grad_norm": 0.4374869167804718,
"learning_rate": 8.180904522613065e-05,
"loss": 0.1965,
"step": 593
},
{
"epoch": 0.25,
"grad_norm": 0.35473427176475525,
"learning_rate": 8.160804020100504e-05,
"loss": 0.14,
"step": 594
},
{
"epoch": 0.25,
"grad_norm": 0.3932483196258545,
"learning_rate": 8.14070351758794e-05,
"loss": 0.1904,
"step": 595
},
{
"epoch": 0.25,
"grad_norm": 0.4485935568809509,
"learning_rate": 8.120603015075378e-05,
"loss": 0.2045,
"step": 596
},
{
"epoch": 0.25,
"grad_norm": 0.47243842482566833,
"learning_rate": 8.100502512562814e-05,
"loss": 0.2102,
"step": 597
},
{
"epoch": 0.25,
"grad_norm": 0.5314076542854309,
"learning_rate": 8.080402010050251e-05,
"loss": 0.2139,
"step": 598
},
{
"epoch": 0.25,
"grad_norm": 0.4143196642398834,
"learning_rate": 8.060301507537689e-05,
"loss": 0.192,
"step": 599
},
{
"epoch": 0.25,
"grad_norm": 0.8205462098121643,
"learning_rate": 8.040201005025126e-05,
"loss": 0.2679,
"step": 600
},
{
"epoch": 0.25,
"grad_norm": 0.5004960298538208,
"learning_rate": 8.020100502512563e-05,
"loss": 0.2012,
"step": 601
},
{
"epoch": 0.25,
"grad_norm": 0.3939405083656311,
"learning_rate": 8e-05,
"loss": 0.1939,
"step": 602
},
{
"epoch": 0.26,
"grad_norm": 0.4227248430252075,
"learning_rate": 7.979899497487438e-05,
"loss": 0.2312,
"step": 603
},
{
"epoch": 0.26,
"grad_norm": 0.328396201133728,
"learning_rate": 7.959798994974875e-05,
"loss": 0.1883,
"step": 604
},
{
"epoch": 0.26,
"grad_norm": 0.3216836154460907,
"learning_rate": 7.939698492462313e-05,
"loss": 0.1427,
"step": 605
},
{
"epoch": 0.26,
"grad_norm": 0.5136520862579346,
"learning_rate": 7.91959798994975e-05,
"loss": 0.1962,
"step": 606
},
{
"epoch": 0.26,
"grad_norm": 0.3919290006160736,
"learning_rate": 7.899497487437186e-05,
"loss": 0.1825,
"step": 607
},
{
"epoch": 0.26,
"grad_norm": 0.42786556482315063,
"learning_rate": 7.879396984924623e-05,
"loss": 0.201,
"step": 608
},
{
"epoch": 0.26,
"grad_norm": 0.40081706643104553,
"learning_rate": 7.85929648241206e-05,
"loss": 0.1494,
"step": 609
},
{
"epoch": 0.26,
"grad_norm": 0.5619733929634094,
"learning_rate": 7.839195979899498e-05,
"loss": 0.2593,
"step": 610
},
{
"epoch": 0.26,
"grad_norm": 0.35712411999702454,
"learning_rate": 7.819095477386935e-05,
"loss": 0.1535,
"step": 611
},
{
"epoch": 0.26,
"grad_norm": 0.42240583896636963,
"learning_rate": 7.798994974874372e-05,
"loss": 0.2288,
"step": 612
},
{
"epoch": 0.26,
"grad_norm": 0.360383003950119,
"learning_rate": 7.77889447236181e-05,
"loss": 0.1702,
"step": 613
},
{
"epoch": 0.26,
"grad_norm": 0.38333067297935486,
"learning_rate": 7.758793969849247e-05,
"loss": 0.18,
"step": 614
},
{
"epoch": 0.26,
"grad_norm": 0.3969596028327942,
"learning_rate": 7.738693467336684e-05,
"loss": 0.1836,
"step": 615
},
{
"epoch": 0.26,
"grad_norm": 0.5108949542045593,
"learning_rate": 7.71859296482412e-05,
"loss": 0.2344,
"step": 616
},
{
"epoch": 0.26,
"grad_norm": 0.700456976890564,
"learning_rate": 7.698492462311559e-05,
"loss": 0.3223,
"step": 617
},
{
"epoch": 0.26,
"grad_norm": 0.3161924481391907,
"learning_rate": 7.678391959798995e-05,
"loss": 0.1758,
"step": 618
},
{
"epoch": 0.26,
"grad_norm": 0.3729405105113983,
"learning_rate": 7.658291457286433e-05,
"loss": 0.1738,
"step": 619
},
{
"epoch": 0.26,
"grad_norm": 0.4030870795249939,
"learning_rate": 7.638190954773869e-05,
"loss": 0.1905,
"step": 620
},
{
"epoch": 0.26,
"grad_norm": 0.44550663232803345,
"learning_rate": 7.618090452261307e-05,
"loss": 0.2326,
"step": 621
},
{
"epoch": 0.26,
"grad_norm": 0.5030593872070312,
"learning_rate": 7.597989949748744e-05,
"loss": 0.2007,
"step": 622
},
{
"epoch": 0.26,
"grad_norm": 0.7457550764083862,
"learning_rate": 7.577889447236181e-05,
"loss": 0.2438,
"step": 623
},
{
"epoch": 0.26,
"grad_norm": 0.3807595670223236,
"learning_rate": 7.557788944723618e-05,
"loss": 0.1561,
"step": 624
},
{
"epoch": 0.26,
"grad_norm": 0.5035004019737244,
"learning_rate": 7.537688442211056e-05,
"loss": 0.2496,
"step": 625
},
{
"epoch": 0.27,
"grad_norm": 0.32844075560569763,
"learning_rate": 7.517587939698493e-05,
"loss": 0.2129,
"step": 626
},
{
"epoch": 0.27,
"grad_norm": 0.4441528916358948,
"learning_rate": 7.49748743718593e-05,
"loss": 0.2066,
"step": 627
},
{
"epoch": 0.27,
"grad_norm": 0.4826768934726715,
"learning_rate": 7.477386934673368e-05,
"loss": 0.2123,
"step": 628
},
{
"epoch": 0.27,
"grad_norm": 0.632175624370575,
"learning_rate": 7.457286432160805e-05,
"loss": 0.2659,
"step": 629
},
{
"epoch": 0.27,
"grad_norm": 0.42496258020401,
"learning_rate": 7.437185929648241e-05,
"loss": 0.2294,
"step": 630
},
{
"epoch": 0.27,
"grad_norm": 0.5259225368499756,
"learning_rate": 7.417085427135678e-05,
"loss": 0.2113,
"step": 631
},
{
"epoch": 0.27,
"grad_norm": 0.425467312335968,
"learning_rate": 7.396984924623115e-05,
"loss": 0.194,
"step": 632
},
{
"epoch": 0.27,
"grad_norm": 0.43209174275398254,
"learning_rate": 7.376884422110553e-05,
"loss": 0.1925,
"step": 633
},
{
"epoch": 0.27,
"grad_norm": 0.3306873142719269,
"learning_rate": 7.35678391959799e-05,
"loss": 0.1812,
"step": 634
},
{
"epoch": 0.27,
"grad_norm": 0.40517109632492065,
"learning_rate": 7.336683417085427e-05,
"loss": 0.1464,
"step": 635
},
{
"epoch": 0.27,
"grad_norm": 0.4161038100719452,
"learning_rate": 7.316582914572865e-05,
"loss": 0.1761,
"step": 636
},
{
"epoch": 0.27,
"grad_norm": 0.42401084303855896,
"learning_rate": 7.296482412060302e-05,
"loss": 0.18,
"step": 637
},
{
"epoch": 0.27,
"grad_norm": 0.5300508141517639,
"learning_rate": 7.276381909547739e-05,
"loss": 0.1941,
"step": 638
},
{
"epoch": 0.27,
"grad_norm": 0.502947986125946,
"learning_rate": 7.256281407035177e-05,
"loss": 0.2022,
"step": 639
},
{
"epoch": 0.27,
"grad_norm": 0.4637830853462219,
"learning_rate": 7.236180904522614e-05,
"loss": 0.2126,
"step": 640
},
{
"epoch": 0.27,
"grad_norm": 0.5627738833427429,
"learning_rate": 7.21608040201005e-05,
"loss": 0.2623,
"step": 641
},
{
"epoch": 0.27,
"grad_norm": 0.35125699639320374,
"learning_rate": 7.195979899497488e-05,
"loss": 0.1693,
"step": 642
},
{
"epoch": 0.27,
"grad_norm": 0.2746927738189697,
"learning_rate": 7.175879396984924e-05,
"loss": 0.1302,
"step": 643
},
{
"epoch": 0.27,
"grad_norm": 0.3855142891407013,
"learning_rate": 7.155778894472363e-05,
"loss": 0.1871,
"step": 644
},
{
"epoch": 0.27,
"grad_norm": 0.4590088129043579,
"learning_rate": 7.135678391959799e-05,
"loss": 0.1887,
"step": 645
},
{
"epoch": 0.27,
"grad_norm": 0.36200350522994995,
"learning_rate": 7.115577889447236e-05,
"loss": 0.1729,
"step": 646
},
{
"epoch": 0.27,
"grad_norm": 0.49908801913261414,
"learning_rate": 7.095477386934674e-05,
"loss": 0.2256,
"step": 647
},
{
"epoch": 0.27,
"grad_norm": 0.5674376487731934,
"learning_rate": 7.075376884422111e-05,
"loss": 0.2588,
"step": 648
},
{
"epoch": 0.27,
"grad_norm": 0.512169599533081,
"learning_rate": 7.055276381909548e-05,
"loss": 0.1977,
"step": 649
},
{
"epoch": 0.28,
"grad_norm": 0.49708080291748047,
"learning_rate": 7.035175879396985e-05,
"loss": 0.2387,
"step": 650
},
{
"epoch": 0.28,
"grad_norm": 0.4566737413406372,
"learning_rate": 7.015075376884423e-05,
"loss": 0.207,
"step": 651
},
{
"epoch": 0.28,
"grad_norm": 0.5629884004592896,
"learning_rate": 6.99497487437186e-05,
"loss": 0.2256,
"step": 652
},
{
"epoch": 0.28,
"grad_norm": 0.4460812211036682,
"learning_rate": 6.974874371859297e-05,
"loss": 0.2072,
"step": 653
},
{
"epoch": 0.28,
"grad_norm": 0.5162737369537354,
"learning_rate": 6.954773869346733e-05,
"loss": 0.2438,
"step": 654
},
{
"epoch": 0.28,
"grad_norm": 0.37910178303718567,
"learning_rate": 6.93467336683417e-05,
"loss": 0.143,
"step": 655
},
{
"epoch": 0.28,
"grad_norm": 0.49443650245666504,
"learning_rate": 6.914572864321608e-05,
"loss": 0.1988,
"step": 656
},
{
"epoch": 0.28,
"grad_norm": 0.4551987648010254,
"learning_rate": 6.894472361809045e-05,
"loss": 0.1939,
"step": 657
},
{
"epoch": 0.28,
"grad_norm": 0.3725804090499878,
"learning_rate": 6.874371859296482e-05,
"loss": 0.1934,
"step": 658
},
{
"epoch": 0.28,
"grad_norm": 0.5743696689605713,
"learning_rate": 6.85427135678392e-05,
"loss": 0.2682,
"step": 659
},
{
"epoch": 0.28,
"grad_norm": 0.4046136140823364,
"learning_rate": 6.834170854271357e-05,
"loss": 0.1855,
"step": 660
},
{
"epoch": 0.28,
"grad_norm": 0.5430765748023987,
"learning_rate": 6.814070351758794e-05,
"loss": 0.2382,
"step": 661
},
{
"epoch": 0.28,
"grad_norm": 0.27529728412628174,
"learning_rate": 6.793969849246232e-05,
"loss": 0.1014,
"step": 662
},
{
"epoch": 0.28,
"grad_norm": 0.3833965063095093,
"learning_rate": 6.773869346733669e-05,
"loss": 0.1588,
"step": 663
},
{
"epoch": 0.28,
"grad_norm": 0.3148491680622101,
"learning_rate": 6.753768844221105e-05,
"loss": 0.1547,
"step": 664
},
{
"epoch": 0.28,
"grad_norm": 0.46502408385276794,
"learning_rate": 6.733668341708544e-05,
"loss": 0.2111,
"step": 665
},
{
"epoch": 0.28,
"grad_norm": 0.3772050142288208,
"learning_rate": 6.71356783919598e-05,
"loss": 0.1872,
"step": 666
},
{
"epoch": 0.28,
"grad_norm": 0.3045889735221863,
"learning_rate": 6.693467336683418e-05,
"loss": 0.1099,
"step": 667
},
{
"epoch": 0.28,
"grad_norm": 0.6520645022392273,
"learning_rate": 6.673366834170854e-05,
"loss": 0.2573,
"step": 668
},
{
"epoch": 0.28,
"grad_norm": 0.3540431261062622,
"learning_rate": 6.653266331658293e-05,
"loss": 0.1699,
"step": 669
},
{
"epoch": 0.28,
"grad_norm": 0.41358596086502075,
"learning_rate": 6.633165829145729e-05,
"loss": 0.2099,
"step": 670
},
{
"epoch": 0.28,
"grad_norm": 0.5491157174110413,
"learning_rate": 6.613065326633166e-05,
"loss": 0.2703,
"step": 671
},
{
"epoch": 0.28,
"grad_norm": 0.4711921513080597,
"learning_rate": 6.592964824120603e-05,
"loss": 0.1761,
"step": 672
},
{
"epoch": 0.28,
"grad_norm": 0.33731257915496826,
"learning_rate": 6.57286432160804e-05,
"loss": 0.1713,
"step": 673
},
{
"epoch": 0.29,
"grad_norm": 0.423803448677063,
"learning_rate": 6.552763819095478e-05,
"loss": 0.1764,
"step": 674
},
{
"epoch": 0.29,
"grad_norm": 0.33093374967575073,
"learning_rate": 6.532663316582915e-05,
"loss": 0.1334,
"step": 675
},
{
"epoch": 0.29,
"grad_norm": 0.4147561490535736,
"learning_rate": 6.512562814070352e-05,
"loss": 0.1557,
"step": 676
},
{
"epoch": 0.29,
"grad_norm": 0.3329070210456848,
"learning_rate": 6.492462311557788e-05,
"loss": 0.1688,
"step": 677
},
{
"epoch": 0.29,
"grad_norm": 0.48782962560653687,
"learning_rate": 6.472361809045227e-05,
"loss": 0.2571,
"step": 678
},
{
"epoch": 0.29,
"grad_norm": 0.38781771063804626,
"learning_rate": 6.452261306532663e-05,
"loss": 0.2022,
"step": 679
},
{
"epoch": 0.29,
"grad_norm": 0.3884035646915436,
"learning_rate": 6.4321608040201e-05,
"loss": 0.1418,
"step": 680
},
{
"epoch": 0.29,
"grad_norm": 0.48443275690078735,
"learning_rate": 6.412060301507538e-05,
"loss": 0.2499,
"step": 681
},
{
"epoch": 0.29,
"grad_norm": 0.38683968782424927,
"learning_rate": 6.391959798994975e-05,
"loss": 0.2103,
"step": 682
},
{
"epoch": 0.29,
"grad_norm": 0.31988051533699036,
"learning_rate": 6.371859296482412e-05,
"loss": 0.172,
"step": 683
},
{
"epoch": 0.29,
"grad_norm": 0.351112425327301,
"learning_rate": 6.35175879396985e-05,
"loss": 0.1635,
"step": 684
},
{
"epoch": 0.29,
"grad_norm": 0.40820181369781494,
"learning_rate": 6.331658291457287e-05,
"loss": 0.1636,
"step": 685
},
{
"epoch": 0.29,
"grad_norm": 0.5173302888870239,
"learning_rate": 6.311557788944724e-05,
"loss": 0.2135,
"step": 686
},
{
"epoch": 0.29,
"grad_norm": 0.5712177753448486,
"learning_rate": 6.291457286432161e-05,
"loss": 0.1266,
"step": 687
},
{
"epoch": 0.29,
"grad_norm": 0.44783997535705566,
"learning_rate": 6.271356783919599e-05,
"loss": 0.218,
"step": 688
},
{
"epoch": 0.29,
"grad_norm": 0.31131142377853394,
"learning_rate": 6.251256281407035e-05,
"loss": 0.1404,
"step": 689
},
{
"epoch": 0.29,
"grad_norm": 0.39051759243011475,
"learning_rate": 6.231155778894473e-05,
"loss": 0.1428,
"step": 690
},
{
"epoch": 0.29,
"grad_norm": 0.4637140929698944,
"learning_rate": 6.211055276381909e-05,
"loss": 0.1846,
"step": 691
},
{
"epoch": 0.29,
"grad_norm": 0.4765917956829071,
"learning_rate": 6.190954773869348e-05,
"loss": 0.1501,
"step": 692
},
{
"epoch": 0.29,
"grad_norm": 0.45205971598625183,
"learning_rate": 6.170854271356784e-05,
"loss": 0.1898,
"step": 693
},
{
"epoch": 0.29,
"grad_norm": 0.5680738687515259,
"learning_rate": 6.150753768844222e-05,
"loss": 0.2216,
"step": 694
},
{
"epoch": 0.29,
"grad_norm": 0.3384318947792053,
"learning_rate": 6.130653266331658e-05,
"loss": 0.1613,
"step": 695
},
{
"epoch": 0.29,
"grad_norm": 0.5899595618247986,
"learning_rate": 6.110552763819096e-05,
"loss": 0.2388,
"step": 696
},
{
"epoch": 0.3,
"grad_norm": 0.44541215896606445,
"learning_rate": 6.090452261306533e-05,
"loss": 0.1734,
"step": 697
},
{
"epoch": 0.3,
"grad_norm": 0.4676991105079651,
"learning_rate": 6.070351758793971e-05,
"loss": 0.221,
"step": 698
},
{
"epoch": 0.3,
"grad_norm": 0.6239830851554871,
"learning_rate": 6.0502512562814076e-05,
"loss": 0.2688,
"step": 699
},
{
"epoch": 0.3,
"grad_norm": 0.43823522329330444,
"learning_rate": 6.030150753768844e-05,
"loss": 0.1981,
"step": 700
},
{
"epoch": 0.3,
"grad_norm": 0.3032325804233551,
"learning_rate": 6.0100502512562815e-05,
"loss": 0.1329,
"step": 701
},
{
"epoch": 0.3,
"grad_norm": 0.33419281244277954,
"learning_rate": 5.989949748743718e-05,
"loss": 0.1361,
"step": 702
},
{
"epoch": 0.3,
"grad_norm": 0.5834112167358398,
"learning_rate": 5.969849246231156e-05,
"loss": 0.2134,
"step": 703
},
{
"epoch": 0.3,
"grad_norm": 0.47133928537368774,
"learning_rate": 5.949748743718593e-05,
"loss": 0.2316,
"step": 704
},
{
"epoch": 0.3,
"grad_norm": 0.4087996184825897,
"learning_rate": 5.929648241206031e-05,
"loss": 0.1973,
"step": 705
},
{
"epoch": 0.3,
"grad_norm": 0.38998982310295105,
"learning_rate": 5.909547738693467e-05,
"loss": 0.1719,
"step": 706
},
{
"epoch": 0.3,
"grad_norm": 0.40847426652908325,
"learning_rate": 5.889447236180905e-05,
"loss": 0.1913,
"step": 707
},
{
"epoch": 0.3,
"grad_norm": 0.37600865960121155,
"learning_rate": 5.869346733668342e-05,
"loss": 0.1841,
"step": 708
},
{
"epoch": 0.3,
"grad_norm": 0.3851812183856964,
"learning_rate": 5.849246231155779e-05,
"loss": 0.1692,
"step": 709
},
{
"epoch": 0.3,
"grad_norm": 0.5539536476135254,
"learning_rate": 5.829145728643216e-05,
"loss": 0.2312,
"step": 710
},
{
"epoch": 0.3,
"grad_norm": 0.33982518315315247,
"learning_rate": 5.809045226130654e-05,
"loss": 0.1547,
"step": 711
},
{
"epoch": 0.3,
"grad_norm": 0.44841164350509644,
"learning_rate": 5.7889447236180904e-05,
"loss": 0.2319,
"step": 712
},
{
"epoch": 0.3,
"grad_norm": 0.4287841022014618,
"learning_rate": 5.7688442211055284e-05,
"loss": 0.2062,
"step": 713
},
{
"epoch": 0.3,
"grad_norm": 0.4406593441963196,
"learning_rate": 5.748743718592965e-05,
"loss": 0.2187,
"step": 714
},
{
"epoch": 0.3,
"grad_norm": 0.3746339976787567,
"learning_rate": 5.728643216080403e-05,
"loss": 0.2016,
"step": 715
},
{
"epoch": 0.3,
"grad_norm": 0.7049340009689331,
"learning_rate": 5.7085427135678396e-05,
"loss": 0.2532,
"step": 716
},
{
"epoch": 0.3,
"grad_norm": 0.5307794213294983,
"learning_rate": 5.688442211055277e-05,
"loss": 0.2093,
"step": 717
},
{
"epoch": 0.3,
"grad_norm": 0.31563398241996765,
"learning_rate": 5.6683417085427135e-05,
"loss": 0.1565,
"step": 718
},
{
"epoch": 0.3,
"grad_norm": 0.5347794890403748,
"learning_rate": 5.6482412060301515e-05,
"loss": 0.2473,
"step": 719
},
{
"epoch": 0.3,
"grad_norm": 0.3637223541736603,
"learning_rate": 5.628140703517588e-05,
"loss": 0.1663,
"step": 720
},
{
"epoch": 0.31,
"grad_norm": 0.6633073091506958,
"learning_rate": 5.608040201005026e-05,
"loss": 0.2649,
"step": 721
},
{
"epoch": 0.31,
"grad_norm": 0.36611008644104004,
"learning_rate": 5.587939698492463e-05,
"loss": 0.1544,
"step": 722
},
{
"epoch": 0.31,
"grad_norm": 0.43361905217170715,
"learning_rate": 5.567839195979899e-05,
"loss": 0.1687,
"step": 723
},
{
"epoch": 0.31,
"grad_norm": 0.48814257979393005,
"learning_rate": 5.547738693467337e-05,
"loss": 0.2227,
"step": 724
},
{
"epoch": 0.31,
"grad_norm": 0.3796922564506531,
"learning_rate": 5.527638190954774e-05,
"loss": 0.1915,
"step": 725
},
{
"epoch": 0.31,
"grad_norm": 0.36174800992012024,
"learning_rate": 5.507537688442211e-05,
"loss": 0.1723,
"step": 726
},
{
"epoch": 0.31,
"grad_norm": 0.36948803067207336,
"learning_rate": 5.487437185929648e-05,
"loss": 0.1525,
"step": 727
},
{
"epoch": 0.31,
"grad_norm": 0.4576868414878845,
"learning_rate": 5.467336683417086e-05,
"loss": 0.1663,
"step": 728
},
{
"epoch": 0.31,
"grad_norm": 0.38392430543899536,
"learning_rate": 5.4472361809045224e-05,
"loss": 0.1431,
"step": 729
},
{
"epoch": 0.31,
"grad_norm": 0.44476935267448425,
"learning_rate": 5.4271356783919604e-05,
"loss": 0.2179,
"step": 730
},
{
"epoch": 0.31,
"grad_norm": 0.40011876821517944,
"learning_rate": 5.407035175879397e-05,
"loss": 0.1557,
"step": 731
},
{
"epoch": 0.31,
"grad_norm": 0.42617473006248474,
"learning_rate": 5.386934673366835e-05,
"loss": 0.2283,
"step": 732
},
{
"epoch": 0.31,
"grad_norm": 0.45721670985221863,
"learning_rate": 5.3668341708542716e-05,
"loss": 0.2026,
"step": 733
},
{
"epoch": 0.31,
"grad_norm": 0.5912308096885681,
"learning_rate": 5.346733668341709e-05,
"loss": 0.2628,
"step": 734
},
{
"epoch": 0.31,
"grad_norm": 0.44696366786956787,
"learning_rate": 5.3266331658291455e-05,
"loss": 0.1878,
"step": 735
},
{
"epoch": 0.31,
"grad_norm": 0.36148297786712646,
"learning_rate": 5.3065326633165835e-05,
"loss": 0.164,
"step": 736
},
{
"epoch": 0.31,
"grad_norm": 0.3467967212200165,
"learning_rate": 5.28643216080402e-05,
"loss": 0.1657,
"step": 737
},
{
"epoch": 0.31,
"grad_norm": 0.6262069940567017,
"learning_rate": 5.266331658291458e-05,
"loss": 0.2245,
"step": 738
},
{
"epoch": 0.31,
"grad_norm": 0.42778122425079346,
"learning_rate": 5.246231155778895e-05,
"loss": 0.1717,
"step": 739
},
{
"epoch": 0.31,
"grad_norm": 0.51275634765625,
"learning_rate": 5.226130653266332e-05,
"loss": 0.2183,
"step": 740
},
{
"epoch": 0.31,
"grad_norm": 0.49025994539260864,
"learning_rate": 5.206030150753769e-05,
"loss": 0.2648,
"step": 741
},
{
"epoch": 0.31,
"grad_norm": 0.5096611380577087,
"learning_rate": 5.1859296482412066e-05,
"loss": 0.2544,
"step": 742
},
{
"epoch": 0.31,
"grad_norm": 0.4826028645038605,
"learning_rate": 5.165829145728643e-05,
"loss": 0.2256,
"step": 743
},
{
"epoch": 0.32,
"grad_norm": 0.54673832654953,
"learning_rate": 5.145728643216081e-05,
"loss": 0.2344,
"step": 744
},
{
"epoch": 0.32,
"grad_norm": 0.46049684286117554,
"learning_rate": 5.125628140703518e-05,
"loss": 0.186,
"step": 745
},
{
"epoch": 0.32,
"grad_norm": 0.5595524311065674,
"learning_rate": 5.1055276381909544e-05,
"loss": 0.2271,
"step": 746
},
{
"epoch": 0.32,
"grad_norm": 0.42573636770248413,
"learning_rate": 5.0854271356783924e-05,
"loss": 0.2002,
"step": 747
},
{
"epoch": 0.32,
"grad_norm": 0.37139996886253357,
"learning_rate": 5.065326633165829e-05,
"loss": 0.199,
"step": 748
},
{
"epoch": 0.32,
"grad_norm": 0.46777206659317017,
"learning_rate": 5.045226130653266e-05,
"loss": 0.2271,
"step": 749
},
{
"epoch": 0.32,
"grad_norm": 0.4091203212738037,
"learning_rate": 5.0251256281407036e-05,
"loss": 0.2002,
"step": 750
},
{
"epoch": 0.32,
"grad_norm": 0.4840552806854248,
"learning_rate": 5.005025125628141e-05,
"loss": 0.2238,
"step": 751
},
{
"epoch": 0.32,
"grad_norm": 0.3718883693218231,
"learning_rate": 4.984924623115578e-05,
"loss": 0.2041,
"step": 752
},
{
"epoch": 0.32,
"grad_norm": 0.4906879663467407,
"learning_rate": 4.9648241206030155e-05,
"loss": 0.2466,
"step": 753
},
{
"epoch": 0.32,
"grad_norm": 0.582410991191864,
"learning_rate": 4.944723618090453e-05,
"loss": 0.1961,
"step": 754
},
{
"epoch": 0.32,
"grad_norm": 0.40116363763809204,
"learning_rate": 4.92462311557789e-05,
"loss": 0.2109,
"step": 755
},
{
"epoch": 0.32,
"grad_norm": 0.39108482003211975,
"learning_rate": 4.9045226130653274e-05,
"loss": 0.1804,
"step": 756
},
{
"epoch": 0.32,
"grad_norm": 0.5282275676727295,
"learning_rate": 4.884422110552764e-05,
"loss": 0.175,
"step": 757
},
{
"epoch": 0.32,
"grad_norm": 0.4697287976741791,
"learning_rate": 4.864321608040201e-05,
"loss": 0.168,
"step": 758
},
{
"epoch": 0.32,
"grad_norm": 0.6108348369598389,
"learning_rate": 4.844221105527638e-05,
"loss": 0.1997,
"step": 759
},
{
"epoch": 0.32,
"grad_norm": 0.31648653745651245,
"learning_rate": 4.824120603015075e-05,
"loss": 0.148,
"step": 760
},
{
"epoch": 0.32,
"grad_norm": 0.7432184219360352,
"learning_rate": 4.8040201005025125e-05,
"loss": 0.3999,
"step": 761
},
{
"epoch": 0.32,
"grad_norm": 0.5415976643562317,
"learning_rate": 4.78391959798995e-05,
"loss": 0.2219,
"step": 762
},
{
"epoch": 0.32,
"grad_norm": 0.47961345314979553,
"learning_rate": 4.763819095477387e-05,
"loss": 0.2282,
"step": 763
},
{
"epoch": 0.32,
"grad_norm": 0.4368441104888916,
"learning_rate": 4.7437185929648244e-05,
"loss": 0.1894,
"step": 764
},
{
"epoch": 0.32,
"grad_norm": 0.36008429527282715,
"learning_rate": 4.723618090452262e-05,
"loss": 0.1797,
"step": 765
},
{
"epoch": 0.32,
"grad_norm": 0.3960406184196472,
"learning_rate": 4.703517587939698e-05,
"loss": 0.1641,
"step": 766
},
{
"epoch": 0.32,
"grad_norm": 0.5364041328430176,
"learning_rate": 4.6834170854271356e-05,
"loss": 0.2232,
"step": 767
},
{
"epoch": 0.33,
"grad_norm": 0.5432655811309814,
"learning_rate": 4.663316582914573e-05,
"loss": 0.2936,
"step": 768
},
{
"epoch": 0.33,
"grad_norm": 0.5242969393730164,
"learning_rate": 4.64321608040201e-05,
"loss": 0.2779,
"step": 769
},
{
"epoch": 0.33,
"grad_norm": 0.597951352596283,
"learning_rate": 4.6231155778894475e-05,
"loss": 0.2731,
"step": 770
},
{
"epoch": 0.33,
"grad_norm": 0.459395170211792,
"learning_rate": 4.603015075376885e-05,
"loss": 0.2536,
"step": 771
},
{
"epoch": 0.33,
"grad_norm": 0.3068626821041107,
"learning_rate": 4.582914572864322e-05,
"loss": 0.1761,
"step": 772
},
{
"epoch": 0.33,
"grad_norm": 0.456222265958786,
"learning_rate": 4.5628140703517594e-05,
"loss": 0.2256,
"step": 773
},
{
"epoch": 0.33,
"grad_norm": 0.4786625802516937,
"learning_rate": 4.542713567839196e-05,
"loss": 0.2026,
"step": 774
},
{
"epoch": 0.33,
"grad_norm": 0.6046634912490845,
"learning_rate": 4.522613065326633e-05,
"loss": 0.2876,
"step": 775
},
{
"epoch": 0.33,
"grad_norm": 0.3308449387550354,
"learning_rate": 4.5025125628140706e-05,
"loss": 0.1685,
"step": 776
},
{
"epoch": 0.33,
"grad_norm": 0.5068316459655762,
"learning_rate": 4.482412060301508e-05,
"loss": 0.2652,
"step": 777
},
{
"epoch": 0.33,
"grad_norm": 0.6490353941917419,
"learning_rate": 4.462311557788945e-05,
"loss": 0.2178,
"step": 778
},
{
"epoch": 0.33,
"grad_norm": 0.4765259921550751,
"learning_rate": 4.4422110552763825e-05,
"loss": 0.2673,
"step": 779
},
{
"epoch": 0.33,
"grad_norm": 0.4649331271648407,
"learning_rate": 4.42211055276382e-05,
"loss": 0.2806,
"step": 780
},
{
"epoch": 0.33,
"grad_norm": 0.5351561903953552,
"learning_rate": 4.4020100502512564e-05,
"loss": 0.263,
"step": 781
},
{
"epoch": 0.33,
"grad_norm": 0.49080169200897217,
"learning_rate": 4.381909547738694e-05,
"loss": 0.2438,
"step": 782
},
{
"epoch": 0.33,
"grad_norm": 0.3744328022003174,
"learning_rate": 4.3618090452261303e-05,
"loss": 0.1542,
"step": 783
},
{
"epoch": 0.33,
"grad_norm": 0.5244365930557251,
"learning_rate": 4.3417085427135676e-05,
"loss": 0.1754,
"step": 784
},
{
"epoch": 0.33,
"grad_norm": 0.24274443089962006,
"learning_rate": 4.321608040201005e-05,
"loss": 0.1502,
"step": 785
},
{
"epoch": 0.33,
"grad_norm": 0.41415631771087646,
"learning_rate": 4.301507537688442e-05,
"loss": 0.2229,
"step": 786
},
{
"epoch": 0.33,
"grad_norm": 0.3658473491668701,
"learning_rate": 4.2814070351758795e-05,
"loss": 0.2292,
"step": 787
},
{
"epoch": 0.33,
"grad_norm": 0.22280116379261017,
"learning_rate": 4.261306532663317e-05,
"loss": 0.1124,
"step": 788
},
{
"epoch": 0.33,
"grad_norm": 0.44349828362464905,
"learning_rate": 4.241206030150754e-05,
"loss": 0.2261,
"step": 789
},
{
"epoch": 0.33,
"grad_norm": 0.7576150298118591,
"learning_rate": 4.2211055276381914e-05,
"loss": 0.2237,
"step": 790
},
{
"epoch": 0.33,
"grad_norm": 0.41980302333831787,
"learning_rate": 4.201005025125628e-05,
"loss": 0.2102,
"step": 791
},
{
"epoch": 0.34,
"grad_norm": 0.7608839273452759,
"learning_rate": 4.180904522613065e-05,
"loss": 0.3884,
"step": 792
},
{
"epoch": 0.34,
"grad_norm": 0.6635060906410217,
"learning_rate": 4.1608040201005026e-05,
"loss": 0.3462,
"step": 793
},
{
"epoch": 0.34,
"grad_norm": 0.5781263113021851,
"learning_rate": 4.14070351758794e-05,
"loss": 0.2675,
"step": 794
},
{
"epoch": 0.34,
"grad_norm": 0.354825496673584,
"learning_rate": 4.120603015075377e-05,
"loss": 0.19,
"step": 795
},
{
"epoch": 0.34,
"grad_norm": 0.516942024230957,
"learning_rate": 4.1005025125628145e-05,
"loss": 0.2157,
"step": 796
},
{
"epoch": 0.34,
"grad_norm": 0.3210093677043915,
"learning_rate": 4.080402010050252e-05,
"loss": 0.167,
"step": 797
},
{
"epoch": 0.34,
"grad_norm": 0.36717918515205383,
"learning_rate": 4.060301507537689e-05,
"loss": 0.176,
"step": 798
},
{
"epoch": 0.34,
"grad_norm": 0.33211013674736023,
"learning_rate": 4.040201005025126e-05,
"loss": 0.1659,
"step": 799
},
{
"epoch": 0.34,
"grad_norm": 0.522240161895752,
"learning_rate": 4.020100502512563e-05,
"loss": 0.257,
"step": 800
},
{
"epoch": 0.34,
"grad_norm": 0.5097737312316895,
"learning_rate": 4e-05,
"loss": 0.2492,
"step": 801
},
{
"epoch": 0.34,
"grad_norm": 0.5827239751815796,
"learning_rate": 3.9798994974874376e-05,
"loss": 0.287,
"step": 802
},
{
"epoch": 0.34,
"grad_norm": 0.4194738268852234,
"learning_rate": 3.959798994974875e-05,
"loss": 0.1696,
"step": 803
},
{
"epoch": 0.34,
"grad_norm": 0.49148353934288025,
"learning_rate": 3.9396984924623115e-05,
"loss": 0.2323,
"step": 804
},
{
"epoch": 0.34,
"grad_norm": 0.5025193095207214,
"learning_rate": 3.919597989949749e-05,
"loss": 0.2973,
"step": 805
},
{
"epoch": 0.34,
"grad_norm": 0.3152432143688202,
"learning_rate": 3.899497487437186e-05,
"loss": 0.181,
"step": 806
},
{
"epoch": 0.34,
"grad_norm": 0.5241256356239319,
"learning_rate": 3.8793969849246234e-05,
"loss": 0.2553,
"step": 807
},
{
"epoch": 0.34,
"grad_norm": 0.29373928904533386,
"learning_rate": 3.85929648241206e-05,
"loss": 0.156,
"step": 808
},
{
"epoch": 0.34,
"grad_norm": 0.5213522911071777,
"learning_rate": 3.8391959798994973e-05,
"loss": 0.3123,
"step": 809
},
{
"epoch": 0.34,
"grad_norm": 0.2721177339553833,
"learning_rate": 3.8190954773869346e-05,
"loss": 0.1192,
"step": 810
},
{
"epoch": 0.34,
"grad_norm": 0.4929058253765106,
"learning_rate": 3.798994974874372e-05,
"loss": 0.2325,
"step": 811
},
{
"epoch": 0.34,
"grad_norm": 0.553525984287262,
"learning_rate": 3.778894472361809e-05,
"loss": 0.2711,
"step": 812
},
{
"epoch": 0.34,
"grad_norm": 0.32851460576057434,
"learning_rate": 3.7587939698492465e-05,
"loss": 0.189,
"step": 813
},
{
"epoch": 0.34,
"grad_norm": 0.40988585352897644,
"learning_rate": 3.738693467336684e-05,
"loss": 0.2204,
"step": 814
},
{
"epoch": 0.35,
"grad_norm": 0.5577826499938965,
"learning_rate": 3.7185929648241204e-05,
"loss": 0.2279,
"step": 815
},
{
"epoch": 0.35,
"grad_norm": 0.3908914029598236,
"learning_rate": 3.698492462311558e-05,
"loss": 0.1832,
"step": 816
},
{
"epoch": 0.35,
"grad_norm": 0.44297468662261963,
"learning_rate": 3.678391959798995e-05,
"loss": 0.2031,
"step": 817
},
{
"epoch": 0.35,
"grad_norm": 0.46193307638168335,
"learning_rate": 3.658291457286432e-05,
"loss": 0.2362,
"step": 818
},
{
"epoch": 0.35,
"grad_norm": 0.449309378862381,
"learning_rate": 3.6381909547738696e-05,
"loss": 0.2741,
"step": 819
},
{
"epoch": 0.35,
"grad_norm": 0.47377705574035645,
"learning_rate": 3.618090452261307e-05,
"loss": 0.2441,
"step": 820
},
{
"epoch": 0.35,
"grad_norm": 0.5499680638313293,
"learning_rate": 3.597989949748744e-05,
"loss": 0.2853,
"step": 821
},
{
"epoch": 0.35,
"grad_norm": 0.4443001449108124,
"learning_rate": 3.5778894472361815e-05,
"loss": 0.2124,
"step": 822
},
{
"epoch": 0.35,
"grad_norm": 0.31199464201927185,
"learning_rate": 3.557788944723618e-05,
"loss": 0.1669,
"step": 823
},
{
"epoch": 0.35,
"grad_norm": 0.4009048342704773,
"learning_rate": 3.5376884422110554e-05,
"loss": 0.1822,
"step": 824
},
{
"epoch": 0.35,
"grad_norm": 0.35128268599510193,
"learning_rate": 3.517587939698493e-05,
"loss": 0.2077,
"step": 825
},
{
"epoch": 0.35,
"grad_norm": 0.6096314787864685,
"learning_rate": 3.49748743718593e-05,
"loss": 0.319,
"step": 826
},
{
"epoch": 0.35,
"grad_norm": 0.49398887157440186,
"learning_rate": 3.4773869346733667e-05,
"loss": 0.3382,
"step": 827
},
{
"epoch": 0.35,
"grad_norm": 0.46479663252830505,
"learning_rate": 3.457286432160804e-05,
"loss": 0.2339,
"step": 828
},
{
"epoch": 0.35,
"grad_norm": 0.3224971294403076,
"learning_rate": 3.437185929648241e-05,
"loss": 0.1626,
"step": 829
},
{
"epoch": 0.35,
"grad_norm": 0.5267730951309204,
"learning_rate": 3.4170854271356785e-05,
"loss": 0.2813,
"step": 830
},
{
"epoch": 0.35,
"grad_norm": 0.5544459223747253,
"learning_rate": 3.396984924623116e-05,
"loss": 0.3294,
"step": 831
},
{
"epoch": 0.35,
"grad_norm": 0.5161508917808533,
"learning_rate": 3.3768844221105525e-05,
"loss": 0.2428,
"step": 832
},
{
"epoch": 0.35,
"grad_norm": 0.5045228004455566,
"learning_rate": 3.35678391959799e-05,
"loss": 0.2672,
"step": 833
},
{
"epoch": 0.35,
"grad_norm": 0.33565956354141235,
"learning_rate": 3.336683417085427e-05,
"loss": 0.1272,
"step": 834
},
{
"epoch": 0.35,
"grad_norm": 0.27528029680252075,
"learning_rate": 3.3165829145728643e-05,
"loss": 0.1325,
"step": 835
},
{
"epoch": 0.35,
"grad_norm": 0.3082004189491272,
"learning_rate": 3.2964824120603016e-05,
"loss": 0.173,
"step": 836
},
{
"epoch": 0.35,
"grad_norm": 0.38152727484703064,
"learning_rate": 3.276381909547739e-05,
"loss": 0.1784,
"step": 837
},
{
"epoch": 0.35,
"grad_norm": 0.5074553489685059,
"learning_rate": 3.256281407035176e-05,
"loss": 0.2878,
"step": 838
},
{
"epoch": 0.36,
"grad_norm": 0.37047079205513,
"learning_rate": 3.2361809045226135e-05,
"loss": 0.167,
"step": 839
},
{
"epoch": 0.36,
"grad_norm": 0.4814290702342987,
"learning_rate": 3.21608040201005e-05,
"loss": 0.2293,
"step": 840
},
{
"epoch": 0.36,
"grad_norm": 0.37862345576286316,
"learning_rate": 3.1959798994974875e-05,
"loss": 0.2462,
"step": 841
},
{
"epoch": 0.36,
"grad_norm": 0.302799791097641,
"learning_rate": 3.175879396984925e-05,
"loss": 0.1738,
"step": 842
},
{
"epoch": 0.36,
"grad_norm": 0.35300469398498535,
"learning_rate": 3.155778894472362e-05,
"loss": 0.1652,
"step": 843
},
{
"epoch": 0.36,
"grad_norm": 0.45807746052742004,
"learning_rate": 3.1356783919597993e-05,
"loss": 0.2621,
"step": 844
},
{
"epoch": 0.36,
"grad_norm": 0.6076413989067078,
"learning_rate": 3.1155778894472366e-05,
"loss": 0.3297,
"step": 845
},
{
"epoch": 0.36,
"grad_norm": 0.35612595081329346,
"learning_rate": 3.095477386934674e-05,
"loss": 0.1883,
"step": 846
},
{
"epoch": 0.36,
"grad_norm": 0.28016719222068787,
"learning_rate": 3.075376884422111e-05,
"loss": 0.1264,
"step": 847
},
{
"epoch": 0.36,
"grad_norm": 0.4207148253917694,
"learning_rate": 3.055276381909548e-05,
"loss": 0.2308,
"step": 848
},
{
"epoch": 0.36,
"grad_norm": 0.3742695748806,
"learning_rate": 3.0351758793969855e-05,
"loss": 0.1903,
"step": 849
},
{
"epoch": 0.36,
"grad_norm": 0.5703909993171692,
"learning_rate": 3.015075376884422e-05,
"loss": 0.1784,
"step": 850
},
{
"epoch": 0.36,
"grad_norm": 0.3696439266204834,
"learning_rate": 2.994974874371859e-05,
"loss": 0.1743,
"step": 851
},
{
"epoch": 0.36,
"grad_norm": 0.557442843914032,
"learning_rate": 2.9748743718592964e-05,
"loss": 0.2909,
"step": 852
},
{
"epoch": 0.36,
"grad_norm": 0.6531166434288025,
"learning_rate": 2.9547738693467337e-05,
"loss": 0.2273,
"step": 853
},
{
"epoch": 0.36,
"grad_norm": 0.6506884098052979,
"learning_rate": 2.934673366834171e-05,
"loss": 0.2878,
"step": 854
},
{
"epoch": 0.36,
"grad_norm": 0.4518173933029175,
"learning_rate": 2.914572864321608e-05,
"loss": 0.2037,
"step": 855
},
{
"epoch": 0.36,
"grad_norm": 0.4443354606628418,
"learning_rate": 2.8944723618090452e-05,
"loss": 0.2299,
"step": 856
},
{
"epoch": 0.36,
"grad_norm": 0.23116129636764526,
"learning_rate": 2.8743718592964825e-05,
"loss": 0.1179,
"step": 857
},
{
"epoch": 0.36,
"grad_norm": 0.40902626514434814,
"learning_rate": 2.8542713567839198e-05,
"loss": 0.2197,
"step": 858
},
{
"epoch": 0.36,
"grad_norm": 0.5275218486785889,
"learning_rate": 2.8341708542713568e-05,
"loss": 0.2427,
"step": 859
},
{
"epoch": 0.36,
"grad_norm": 0.3060431480407715,
"learning_rate": 2.814070351758794e-05,
"loss": 0.162,
"step": 860
},
{
"epoch": 0.36,
"grad_norm": 0.6324546933174133,
"learning_rate": 2.7939698492462314e-05,
"loss": 0.2545,
"step": 861
},
{
"epoch": 0.37,
"grad_norm": 0.4083345830440521,
"learning_rate": 2.7738693467336686e-05,
"loss": 0.1776,
"step": 862
},
{
"epoch": 0.37,
"grad_norm": 0.7091811299324036,
"learning_rate": 2.7537688442211056e-05,
"loss": 0.335,
"step": 863
},
{
"epoch": 0.37,
"grad_norm": 0.3959479033946991,
"learning_rate": 2.733668341708543e-05,
"loss": 0.2122,
"step": 864
},
{
"epoch": 0.37,
"grad_norm": 0.37795379757881165,
"learning_rate": 2.7135678391959802e-05,
"loss": 0.1956,
"step": 865
},
{
"epoch": 0.37,
"grad_norm": 0.34550997614860535,
"learning_rate": 2.6934673366834175e-05,
"loss": 0.1586,
"step": 866
},
{
"epoch": 0.37,
"grad_norm": 0.3692917823791504,
"learning_rate": 2.6733668341708545e-05,
"loss": 0.2125,
"step": 867
},
{
"epoch": 0.37,
"grad_norm": 0.48720014095306396,
"learning_rate": 2.6532663316582917e-05,
"loss": 0.2308,
"step": 868
},
{
"epoch": 0.37,
"grad_norm": 0.6313906908035278,
"learning_rate": 2.633165829145729e-05,
"loss": 0.2026,
"step": 869
},
{
"epoch": 0.37,
"grad_norm": 0.3886338770389557,
"learning_rate": 2.613065326633166e-05,
"loss": 0.2053,
"step": 870
},
{
"epoch": 0.37,
"grad_norm": 0.6648421883583069,
"learning_rate": 2.5929648241206033e-05,
"loss": 0.385,
"step": 871
},
{
"epoch": 0.37,
"grad_norm": 0.5134801864624023,
"learning_rate": 2.5728643216080406e-05,
"loss": 0.1886,
"step": 872
},
{
"epoch": 0.37,
"grad_norm": 0.40226277709007263,
"learning_rate": 2.5527638190954772e-05,
"loss": 0.1778,
"step": 873
},
{
"epoch": 0.37,
"grad_norm": 0.3925773799419403,
"learning_rate": 2.5326633165829145e-05,
"loss": 0.2397,
"step": 874
},
{
"epoch": 0.37,
"grad_norm": 0.4807821214199066,
"learning_rate": 2.5125628140703518e-05,
"loss": 0.1851,
"step": 875
},
{
"epoch": 0.37,
"grad_norm": 0.47010698914527893,
"learning_rate": 2.492462311557789e-05,
"loss": 0.2635,
"step": 876
},
{
"epoch": 0.37,
"grad_norm": 0.43276745080947876,
"learning_rate": 2.4723618090452264e-05,
"loss": 0.1813,
"step": 877
},
{
"epoch": 0.37,
"grad_norm": 0.4495302736759186,
"learning_rate": 2.4522613065326637e-05,
"loss": 0.2207,
"step": 878
},
{
"epoch": 0.37,
"grad_norm": 0.30723971128463745,
"learning_rate": 2.4321608040201007e-05,
"loss": 0.2037,
"step": 879
},
{
"epoch": 0.37,
"grad_norm": 0.3359546661376953,
"learning_rate": 2.4120603015075376e-05,
"loss": 0.1761,
"step": 880
},
{
"epoch": 0.37,
"grad_norm": 1.9301133155822754,
"learning_rate": 2.391959798994975e-05,
"loss": 0.182,
"step": 881
},
{
"epoch": 0.37,
"grad_norm": 0.3476134240627289,
"learning_rate": 2.3718592964824122e-05,
"loss": 0.1796,
"step": 882
},
{
"epoch": 0.37,
"grad_norm": 0.28115808963775635,
"learning_rate": 2.351758793969849e-05,
"loss": 0.1378,
"step": 883
},
{
"epoch": 0.37,
"grad_norm": 0.46458545327186584,
"learning_rate": 2.3316582914572865e-05,
"loss": 0.2178,
"step": 884
},
{
"epoch": 0.37,
"grad_norm": 0.3719908595085144,
"learning_rate": 2.3115577889447238e-05,
"loss": 0.2225,
"step": 885
},
{
"epoch": 0.38,
"grad_norm": 0.4343881905078888,
"learning_rate": 2.291457286432161e-05,
"loss": 0.1423,
"step": 886
},
{
"epoch": 0.38,
"grad_norm": 0.3831157982349396,
"learning_rate": 2.271356783919598e-05,
"loss": 0.2008,
"step": 887
},
{
"epoch": 0.38,
"grad_norm": 0.3542693853378296,
"learning_rate": 2.2512562814070353e-05,
"loss": 0.239,
"step": 888
},
{
"epoch": 0.38,
"grad_norm": 0.3413107693195343,
"learning_rate": 2.2311557788944726e-05,
"loss": 0.1751,
"step": 889
},
{
"epoch": 0.38,
"grad_norm": 0.6149652004241943,
"learning_rate": 2.21105527638191e-05,
"loss": 0.239,
"step": 890
},
{
"epoch": 0.38,
"grad_norm": 0.39706915616989136,
"learning_rate": 2.190954773869347e-05,
"loss": 0.189,
"step": 891
},
{
"epoch": 0.38,
"grad_norm": 0.4143030345439911,
"learning_rate": 2.1708542713567838e-05,
"loss": 0.2113,
"step": 892
},
{
"epoch": 0.38,
"grad_norm": 0.46678248047828674,
"learning_rate": 2.150753768844221e-05,
"loss": 0.2433,
"step": 893
},
{
"epoch": 0.38,
"grad_norm": 0.5161128640174866,
"learning_rate": 2.1306532663316584e-05,
"loss": 0.2854,
"step": 894
},
{
"epoch": 0.38,
"grad_norm": 0.4151817560195923,
"learning_rate": 2.1105527638190957e-05,
"loss": 0.175,
"step": 895
},
{
"epoch": 0.38,
"grad_norm": 0.3842785954475403,
"learning_rate": 2.0904522613065327e-05,
"loss": 0.208,
"step": 896
},
{
"epoch": 0.38,
"grad_norm": 0.2840658724308014,
"learning_rate": 2.07035175879397e-05,
"loss": 0.1405,
"step": 897
},
{
"epoch": 0.38,
"grad_norm": 0.3122474253177643,
"learning_rate": 2.0502512562814073e-05,
"loss": 0.1946,
"step": 898
},
{
"epoch": 0.38,
"grad_norm": 0.48670870065689087,
"learning_rate": 2.0301507537688446e-05,
"loss": 0.2515,
"step": 899
},
{
"epoch": 0.38,
"grad_norm": 0.3537992238998413,
"learning_rate": 2.0100502512562815e-05,
"loss": 0.1731,
"step": 900
},
{
"epoch": 0.38,
"grad_norm": 0.894879937171936,
"learning_rate": 1.9899497487437188e-05,
"loss": 0.2873,
"step": 901
},
{
"epoch": 0.38,
"grad_norm": 0.820382297039032,
"learning_rate": 1.9698492462311558e-05,
"loss": 0.2662,
"step": 902
},
{
"epoch": 0.38,
"grad_norm": 0.4541124999523163,
"learning_rate": 1.949748743718593e-05,
"loss": 0.2322,
"step": 903
},
{
"epoch": 0.38,
"grad_norm": 0.2983895540237427,
"learning_rate": 1.92964824120603e-05,
"loss": 0.126,
"step": 904
},
{
"epoch": 0.38,
"grad_norm": 0.2186942845582962,
"learning_rate": 1.9095477386934673e-05,
"loss": 0.1206,
"step": 905
},
{
"epoch": 0.38,
"grad_norm": 0.6614108085632324,
"learning_rate": 1.8894472361809046e-05,
"loss": 0.2525,
"step": 906
},
{
"epoch": 0.38,
"grad_norm": 0.6433589458465576,
"learning_rate": 1.869346733668342e-05,
"loss": 0.2856,
"step": 907
},
{
"epoch": 0.38,
"grad_norm": 0.3145541548728943,
"learning_rate": 1.849246231155779e-05,
"loss": 0.1602,
"step": 908
},
{
"epoch": 0.38,
"grad_norm": 0.4058758318424225,
"learning_rate": 1.829145728643216e-05,
"loss": 0.2108,
"step": 909
},
{
"epoch": 0.39,
"grad_norm": 0.31698545813560486,
"learning_rate": 1.8090452261306535e-05,
"loss": 0.1923,
"step": 910
},
{
"epoch": 0.39,
"grad_norm": 0.2648496627807617,
"learning_rate": 1.7889447236180908e-05,
"loss": 0.1372,
"step": 911
},
{
"epoch": 0.39,
"grad_norm": 0.38380950689315796,
"learning_rate": 1.7688442211055277e-05,
"loss": 0.1771,
"step": 912
},
{
"epoch": 0.39,
"grad_norm": 0.6659491658210754,
"learning_rate": 1.748743718592965e-05,
"loss": 0.2835,
"step": 913
},
{
"epoch": 0.39,
"grad_norm": 0.5000067353248596,
"learning_rate": 1.728643216080402e-05,
"loss": 0.205,
"step": 914
},
{
"epoch": 0.39,
"grad_norm": 0.2938293516635895,
"learning_rate": 1.7085427135678393e-05,
"loss": 0.1422,
"step": 915
},
{
"epoch": 0.39,
"grad_norm": 0.2897784113883972,
"learning_rate": 1.6884422110552762e-05,
"loss": 0.1297,
"step": 916
},
{
"epoch": 0.39,
"grad_norm": 0.3621378242969513,
"learning_rate": 1.6683417085427135e-05,
"loss": 0.1713,
"step": 917
},
{
"epoch": 0.39,
"grad_norm": 0.25102704763412476,
"learning_rate": 1.6482412060301508e-05,
"loss": 0.112,
"step": 918
},
{
"epoch": 0.39,
"grad_norm": 0.33787477016448975,
"learning_rate": 1.628140703517588e-05,
"loss": 0.1573,
"step": 919
},
{
"epoch": 0.39,
"grad_norm": 0.5255904197692871,
"learning_rate": 1.608040201005025e-05,
"loss": 0.2663,
"step": 920
},
{
"epoch": 0.39,
"grad_norm": 0.43443411588668823,
"learning_rate": 1.5879396984924624e-05,
"loss": 0.1731,
"step": 921
},
{
"epoch": 0.39,
"grad_norm": 0.371311753988266,
"learning_rate": 1.5678391959798997e-05,
"loss": 0.194,
"step": 922
},
{
"epoch": 0.39,
"grad_norm": 0.46048715710639954,
"learning_rate": 1.547738693467337e-05,
"loss": 0.2677,
"step": 923
},
{
"epoch": 0.39,
"grad_norm": 0.4153316020965576,
"learning_rate": 1.527638190954774e-05,
"loss": 0.1633,
"step": 924
},
{
"epoch": 0.39,
"grad_norm": 0.4759661555290222,
"learning_rate": 1.507537688442211e-05,
"loss": 0.2009,
"step": 925
},
{
"epoch": 0.39,
"grad_norm": 0.44403836131095886,
"learning_rate": 1.4874371859296482e-05,
"loss": 0.213,
"step": 926
},
{
"epoch": 0.39,
"grad_norm": 0.5492647886276245,
"learning_rate": 1.4673366834170855e-05,
"loss": 0.2041,
"step": 927
},
{
"epoch": 0.39,
"grad_norm": 0.43692106008529663,
"learning_rate": 1.4472361809045226e-05,
"loss": 0.2253,
"step": 928
},
{
"epoch": 0.39,
"grad_norm": 0.46681973338127136,
"learning_rate": 1.4271356783919599e-05,
"loss": 0.2476,
"step": 929
},
{
"epoch": 0.39,
"grad_norm": 0.2761835753917694,
"learning_rate": 1.407035175879397e-05,
"loss": 0.1159,
"step": 930
},
{
"epoch": 0.39,
"grad_norm": 0.32756364345550537,
"learning_rate": 1.3869346733668343e-05,
"loss": 0.1649,
"step": 931
},
{
"epoch": 0.39,
"grad_norm": 0.26436007022857666,
"learning_rate": 1.3668341708542715e-05,
"loss": 0.1635,
"step": 932
},
{
"epoch": 0.4,
"grad_norm": 0.40596601366996765,
"learning_rate": 1.3467336683417087e-05,
"loss": 0.2097,
"step": 933
},
{
"epoch": 0.4,
"grad_norm": 0.2804524898529053,
"learning_rate": 1.3266331658291459e-05,
"loss": 0.1472,
"step": 934
},
{
"epoch": 0.4,
"grad_norm": 0.3760283589363098,
"learning_rate": 1.306532663316583e-05,
"loss": 0.1813,
"step": 935
},
{
"epoch": 0.4,
"grad_norm": 0.29557615518569946,
"learning_rate": 1.2864321608040203e-05,
"loss": 0.1399,
"step": 936
},
{
"epoch": 0.4,
"grad_norm": 0.6540164351463318,
"learning_rate": 1.2663316582914573e-05,
"loss": 0.2999,
"step": 937
},
{
"epoch": 0.4,
"grad_norm": 0.44547465443611145,
"learning_rate": 1.2462311557788946e-05,
"loss": 0.1994,
"step": 938
},
{
"epoch": 0.4,
"grad_norm": 0.6938953995704651,
"learning_rate": 1.2261306532663318e-05,
"loss": 0.3192,
"step": 939
},
{
"epoch": 0.4,
"grad_norm": 0.7034733891487122,
"learning_rate": 1.2060301507537688e-05,
"loss": 0.352,
"step": 940
},
{
"epoch": 0.4,
"grad_norm": 0.6876631379127502,
"learning_rate": 1.1859296482412061e-05,
"loss": 0.313,
"step": 941
},
{
"epoch": 0.4,
"grad_norm": 0.4050236642360687,
"learning_rate": 1.1658291457286432e-05,
"loss": 0.2126,
"step": 942
},
{
"epoch": 0.4,
"grad_norm": 0.24842841923236847,
"learning_rate": 1.1457286432160805e-05,
"loss": 0.1333,
"step": 943
},
{
"epoch": 0.4,
"grad_norm": 0.38990554213523865,
"learning_rate": 1.1256281407035177e-05,
"loss": 0.2205,
"step": 944
},
{
"epoch": 0.4,
"grad_norm": 0.34424301981925964,
"learning_rate": 1.105527638190955e-05,
"loss": 0.1987,
"step": 945
},
{
"epoch": 0.4,
"grad_norm": 0.3523971736431122,
"learning_rate": 1.0854271356783919e-05,
"loss": 0.1813,
"step": 946
},
{
"epoch": 0.4,
"grad_norm": 0.4130074083805084,
"learning_rate": 1.0653266331658292e-05,
"loss": 0.2409,
"step": 947
},
{
"epoch": 0.4,
"grad_norm": 0.614470899105072,
"learning_rate": 1.0452261306532663e-05,
"loss": 0.275,
"step": 948
},
{
"epoch": 0.4,
"grad_norm": 0.3006427586078644,
"learning_rate": 1.0251256281407036e-05,
"loss": 0.1483,
"step": 949
},
{
"epoch": 0.4,
"grad_norm": 0.4929478168487549,
"learning_rate": 1.0050251256281408e-05,
"loss": 0.2328,
"step": 950
},
{
"epoch": 0.4,
"grad_norm": 0.4946010112762451,
"learning_rate": 9.849246231155779e-06,
"loss": 0.2307,
"step": 951
},
{
"epoch": 0.4,
"grad_norm": 0.47001707553863525,
"learning_rate": 9.64824120603015e-06,
"loss": 0.2133,
"step": 952
},
{
"epoch": 0.4,
"grad_norm": 0.5728352069854736,
"learning_rate": 9.447236180904523e-06,
"loss": 0.2261,
"step": 953
},
{
"epoch": 0.4,
"grad_norm": 0.45893171429634094,
"learning_rate": 9.246231155778894e-06,
"loss": 0.257,
"step": 954
},
{
"epoch": 0.4,
"grad_norm": 0.4369294345378876,
"learning_rate": 9.045226130653267e-06,
"loss": 0.2485,
"step": 955
},
{
"epoch": 0.4,
"grad_norm": 0.3858097493648529,
"learning_rate": 8.844221105527639e-06,
"loss": 0.1506,
"step": 956
},
{
"epoch": 0.41,
"grad_norm": 0.4247424304485321,
"learning_rate": 8.64321608040201e-06,
"loss": 0.1968,
"step": 957
},
{
"epoch": 0.41,
"grad_norm": 0.6413017511367798,
"learning_rate": 8.442211055276381e-06,
"loss": 0.3137,
"step": 958
},
{
"epoch": 0.41,
"grad_norm": 0.47462546825408936,
"learning_rate": 8.241206030150754e-06,
"loss": 0.2323,
"step": 959
},
{
"epoch": 0.41,
"grad_norm": 0.4901590347290039,
"learning_rate": 8.040201005025125e-06,
"loss": 0.2167,
"step": 960
},
{
"epoch": 0.41,
"grad_norm": 0.46188369393348694,
"learning_rate": 7.839195979899498e-06,
"loss": 0.2188,
"step": 961
},
{
"epoch": 0.41,
"grad_norm": 0.5853116512298584,
"learning_rate": 7.63819095477387e-06,
"loss": 0.2758,
"step": 962
},
{
"epoch": 0.41,
"grad_norm": 0.5120633840560913,
"learning_rate": 7.437185929648241e-06,
"loss": 0.2467,
"step": 963
},
{
"epoch": 0.41,
"grad_norm": 0.525434672832489,
"learning_rate": 7.236180904522613e-06,
"loss": 0.2374,
"step": 964
},
{
"epoch": 0.41,
"grad_norm": 0.5119476914405823,
"learning_rate": 7.035175879396985e-06,
"loss": 0.2483,
"step": 965
},
{
"epoch": 0.41,
"grad_norm": 0.2736360728740692,
"learning_rate": 6.834170854271357e-06,
"loss": 0.1504,
"step": 966
},
{
"epoch": 0.41,
"grad_norm": 0.4709615111351013,
"learning_rate": 6.633165829145729e-06,
"loss": 0.2252,
"step": 967
},
{
"epoch": 0.41,
"grad_norm": 0.6076705455780029,
"learning_rate": 6.4321608040201015e-06,
"loss": 0.2818,
"step": 968
},
{
"epoch": 0.41,
"grad_norm": 0.29698169231414795,
"learning_rate": 6.231155778894473e-06,
"loss": 0.1461,
"step": 969
},
{
"epoch": 0.41,
"grad_norm": 0.26702263951301575,
"learning_rate": 6.030150753768844e-06,
"loss": 0.125,
"step": 970
},
{
"epoch": 0.41,
"grad_norm": 0.43181735277175903,
"learning_rate": 5.829145728643216e-06,
"loss": 0.1723,
"step": 971
},
{
"epoch": 0.41,
"grad_norm": 0.3504771888256073,
"learning_rate": 5.628140703517588e-06,
"loss": 0.1807,
"step": 972
},
{
"epoch": 0.41,
"grad_norm": 0.4474789798259735,
"learning_rate": 5.4271356783919595e-06,
"loss": 0.2144,
"step": 973
},
{
"epoch": 0.41,
"grad_norm": 0.29683947563171387,
"learning_rate": 5.226130653266332e-06,
"loss": 0.1741,
"step": 974
},
{
"epoch": 0.41,
"grad_norm": 0.461755633354187,
"learning_rate": 5.025125628140704e-06,
"loss": 0.1881,
"step": 975
},
{
"epoch": 0.41,
"grad_norm": 0.37622129917144775,
"learning_rate": 4.824120603015075e-06,
"loss": 0.2117,
"step": 976
},
{
"epoch": 0.41,
"grad_norm": 0.4308166801929474,
"learning_rate": 4.623115577889447e-06,
"loss": 0.2124,
"step": 977
},
{
"epoch": 0.41,
"grad_norm": 0.4249589741230011,
"learning_rate": 4.422110552763819e-06,
"loss": 0.1813,
"step": 978
},
{
"epoch": 0.41,
"grad_norm": 0.3398144245147705,
"learning_rate": 4.2211055276381906e-06,
"loss": 0.1889,
"step": 979
},
{
"epoch": 0.41,
"grad_norm": 0.3491975963115692,
"learning_rate": 4.020100502512563e-06,
"loss": 0.1784,
"step": 980
},
{
"epoch": 0.42,
"grad_norm": 0.3642324209213257,
"learning_rate": 3.819095477386935e-06,
"loss": 0.1502,
"step": 981
},
{
"epoch": 0.42,
"grad_norm": 0.31112533807754517,
"learning_rate": 3.6180904522613065e-06,
"loss": 0.1326,
"step": 982
},
{
"epoch": 0.42,
"grad_norm": 0.4723191261291504,
"learning_rate": 3.4170854271356786e-06,
"loss": 0.2128,
"step": 983
},
{
"epoch": 0.42,
"grad_norm": 0.3740854561328888,
"learning_rate": 3.2160804020100507e-06,
"loss": 0.1296,
"step": 984
},
{
"epoch": 0.42,
"grad_norm": 0.4681090712547302,
"learning_rate": 3.015075376884422e-06,
"loss": 0.2435,
"step": 985
},
{
"epoch": 0.42,
"grad_norm": 0.4755851924419403,
"learning_rate": 2.814070351758794e-06,
"loss": 0.192,
"step": 986
},
{
"epoch": 0.42,
"grad_norm": 0.7064125537872314,
"learning_rate": 2.613065326633166e-06,
"loss": 0.3695,
"step": 987
},
{
"epoch": 0.42,
"grad_norm": 0.3578234314918518,
"learning_rate": 2.4120603015075375e-06,
"loss": 0.1774,
"step": 988
},
{
"epoch": 0.42,
"grad_norm": 0.4586274027824402,
"learning_rate": 2.2110552763819096e-06,
"loss": 0.1662,
"step": 989
},
{
"epoch": 0.42,
"grad_norm": 0.9307977557182312,
"learning_rate": 2.0100502512562813e-06,
"loss": 0.3233,
"step": 990
},
{
"epoch": 0.42,
"grad_norm": 0.4612172842025757,
"learning_rate": 1.8090452261306533e-06,
"loss": 0.1946,
"step": 991
},
{
"epoch": 0.42,
"grad_norm": 0.35452717542648315,
"learning_rate": 1.6080402010050254e-06,
"loss": 0.2148,
"step": 992
},
{
"epoch": 0.42,
"grad_norm": 0.41886764764785767,
"learning_rate": 1.407035175879397e-06,
"loss": 0.2024,
"step": 993
},
{
"epoch": 0.42,
"grad_norm": 0.36347076296806335,
"learning_rate": 1.2060301507537688e-06,
"loss": 0.2238,
"step": 994
},
{
"epoch": 0.42,
"grad_norm": 0.30956777930259705,
"learning_rate": 1.0050251256281407e-06,
"loss": 0.1816,
"step": 995
},
{
"epoch": 0.42,
"grad_norm": 0.29292166233062744,
"learning_rate": 8.040201005025127e-07,
"loss": 0.1565,
"step": 996
},
{
"epoch": 0.42,
"grad_norm": 0.5314776301383972,
"learning_rate": 6.030150753768844e-07,
"loss": 0.2801,
"step": 997
},
{
"epoch": 0.42,
"grad_norm": 0.5279101133346558,
"learning_rate": 4.0201005025125634e-07,
"loss": 0.2125,
"step": 998
},
{
"epoch": 0.42,
"grad_norm": 0.5519235730171204,
"learning_rate": 2.0100502512562817e-07,
"loss": 0.2095,
"step": 999
},
{
"epoch": 0.42,
"grad_norm": 0.4091325104236603,
"learning_rate": 0.0,
"loss": 0.2152,
"step": 1000
}
],
"logging_steps": 1,
"max_steps": 1000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 6.759802199059661e+16,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}