pixelmelt's picture
Upload 11 files
af72639 verified
raw
history blame contribute delete
No virus
39.2 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.642857142857143,
"eval_steps": 500,
"global_step": 236,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01680672268907563,
"grad_norm": 1.71875,
"learning_rate": 2e-05,
"loss": 2.9247,
"step": 1
},
{
"epoch": 0.03361344537815126,
"grad_norm": 1.9375,
"learning_rate": 4e-05,
"loss": 2.8013,
"step": 2
},
{
"epoch": 0.05042016806722689,
"grad_norm": 1.6875,
"learning_rate": 6e-05,
"loss": 2.9511,
"step": 3
},
{
"epoch": 0.06722689075630252,
"grad_norm": 1.734375,
"learning_rate": 8e-05,
"loss": 2.91,
"step": 4
},
{
"epoch": 0.08403361344537816,
"grad_norm": 1.6171875,
"learning_rate": 0.0001,
"loss": 2.766,
"step": 5
},
{
"epoch": 0.10084033613445378,
"grad_norm": 0.54296875,
"learning_rate": 0.00012,
"loss": 2.6274,
"step": 6
},
{
"epoch": 0.11764705882352941,
"grad_norm": 0.19921875,
"learning_rate": 0.00014,
"loss": 2.3908,
"step": 7
},
{
"epoch": 0.13445378151260504,
"grad_norm": 0.279296875,
"learning_rate": 0.00016,
"loss": 2.544,
"step": 8
},
{
"epoch": 0.15126050420168066,
"grad_norm": 0.259765625,
"learning_rate": 0.00018,
"loss": 2.4026,
"step": 9
},
{
"epoch": 0.16806722689075632,
"grad_norm": 0.2578125,
"learning_rate": 0.0002,
"loss": 2.4927,
"step": 10
},
{
"epoch": 0.18487394957983194,
"grad_norm": 0.2177734375,
"learning_rate": 0.00019999033847063811,
"loss": 2.4668,
"step": 11
},
{
"epoch": 0.20168067226890757,
"grad_norm": 0.197265625,
"learning_rate": 0.00019996135574945544,
"loss": 2.3493,
"step": 12
},
{
"epoch": 0.2184873949579832,
"grad_norm": 0.189453125,
"learning_rate": 0.00019991305743680013,
"loss": 2.4891,
"step": 13
},
{
"epoch": 0.23529411764705882,
"grad_norm": 0.1845703125,
"learning_rate": 0.0001998454528653836,
"loss": 2.3903,
"step": 14
},
{
"epoch": 0.25210084033613445,
"grad_norm": 0.15625,
"learning_rate": 0.00019975855509847686,
"loss": 2.3877,
"step": 15
},
{
"epoch": 0.2689075630252101,
"grad_norm": 0.1748046875,
"learning_rate": 0.00019965238092738643,
"loss": 2.4276,
"step": 16
},
{
"epoch": 0.2857142857142857,
"grad_norm": 0.15625,
"learning_rate": 0.00019952695086820975,
"loss": 2.3165,
"step": 17
},
{
"epoch": 0.3025210084033613,
"grad_norm": 0.15234375,
"learning_rate": 0.0001993822891578708,
"loss": 2.3638,
"step": 18
},
{
"epoch": 0.31932773109243695,
"grad_norm": 0.1708984375,
"learning_rate": 0.0001992184237494368,
"loss": 2.2576,
"step": 19
},
{
"epoch": 0.33613445378151263,
"grad_norm": 0.154296875,
"learning_rate": 0.0001990353863067169,
"loss": 2.4158,
"step": 20
},
{
"epoch": 0.35294117647058826,
"grad_norm": 0.1552734375,
"learning_rate": 0.0001988332121981436,
"loss": 2.2791,
"step": 21
},
{
"epoch": 0.3697478991596639,
"grad_norm": 0.2041015625,
"learning_rate": 0.00019861194048993863,
"loss": 2.4497,
"step": 22
},
{
"epoch": 0.3865546218487395,
"grad_norm": 0.2353515625,
"learning_rate": 0.0001983716139385641,
"loss": 2.4217,
"step": 23
},
{
"epoch": 0.40336134453781514,
"grad_norm": 0.193359375,
"learning_rate": 0.0001981122789824607,
"loss": 2.3775,
"step": 24
},
{
"epoch": 0.42016806722689076,
"grad_norm": 0.201171875,
"learning_rate": 0.00019783398573307428,
"loss": 2.3257,
"step": 25
},
{
"epoch": 0.4369747899159664,
"grad_norm": 0.2265625,
"learning_rate": 0.00019753678796517282,
"loss": 2.3262,
"step": 26
},
{
"epoch": 0.453781512605042,
"grad_norm": 0.1552734375,
"learning_rate": 0.00019722074310645553,
"loss": 2.3464,
"step": 27
},
{
"epoch": 0.47058823529411764,
"grad_norm": 0.2197265625,
"learning_rate": 0.00019688591222645607,
"loss": 2.4651,
"step": 28
},
{
"epoch": 0.48739495798319327,
"grad_norm": 0.24609375,
"learning_rate": 0.000196532360024742,
"loss": 2.2998,
"step": 29
},
{
"epoch": 0.5042016806722689,
"grad_norm": 0.2138671875,
"learning_rate": 0.0001961601548184129,
"loss": 2.3678,
"step": 30
},
{
"epoch": 0.5210084033613446,
"grad_norm": 0.25390625,
"learning_rate": 0.00019576936852889936,
"loss": 2.285,
"step": 31
},
{
"epoch": 0.5378151260504201,
"grad_norm": 0.1865234375,
"learning_rate": 0.00019536007666806556,
"loss": 2.316,
"step": 32
},
{
"epoch": 0.5546218487394958,
"grad_norm": 0.2412109375,
"learning_rate": 0.0001949323583236181,
"loss": 2.3368,
"step": 33
},
{
"epoch": 0.5714285714285714,
"grad_norm": 0.28125,
"learning_rate": 0.0001944862961438239,
"loss": 2.2433,
"step": 34
},
{
"epoch": 0.5882352941176471,
"grad_norm": 0.1884765625,
"learning_rate": 0.00019402197632153992,
"loss": 2.4387,
"step": 35
},
{
"epoch": 0.6050420168067226,
"grad_norm": 0.25390625,
"learning_rate": 0.00019353948857755803,
"loss": 2.3809,
"step": 36
},
{
"epoch": 0.6218487394957983,
"grad_norm": 0.1630859375,
"learning_rate": 0.00019303892614326836,
"loss": 2.2351,
"step": 37
},
{
"epoch": 0.6386554621848739,
"grad_norm": 0.1806640625,
"learning_rate": 0.00019252038574264405,
"loss": 2.231,
"step": 38
},
{
"epoch": 0.6554621848739496,
"grad_norm": 0.181640625,
"learning_rate": 0.00019198396757355118,
"loss": 2.3298,
"step": 39
},
{
"epoch": 0.6722689075630253,
"grad_norm": 0.1708984375,
"learning_rate": 0.00019142977528838762,
"loss": 2.3444,
"step": 40
},
{
"epoch": 0.6890756302521008,
"grad_norm": 0.2080078125,
"learning_rate": 0.00019085791597405404,
"loss": 2.2819,
"step": 41
},
{
"epoch": 0.7058823529411765,
"grad_norm": 0.169921875,
"learning_rate": 0.00019026850013126157,
"loss": 2.2556,
"step": 42
},
{
"epoch": 0.7226890756302521,
"grad_norm": 0.1572265625,
"learning_rate": 0.00018966164165317966,
"loss": 2.3512,
"step": 43
},
{
"epoch": 0.7394957983193278,
"grad_norm": 0.2431640625,
"learning_rate": 0.00018903745780342839,
"loss": 2.242,
"step": 44
},
{
"epoch": 0.7563025210084033,
"grad_norm": 0.177734375,
"learning_rate": 0.0001883960691934196,
"loss": 2.2857,
"step": 45
},
{
"epoch": 0.773109243697479,
"grad_norm": 0.25390625,
"learning_rate": 0.00018773759975905098,
"loss": 2.3035,
"step": 46
},
{
"epoch": 0.7899159663865546,
"grad_norm": 0.1630859375,
"learning_rate": 0.00018706217673675811,
"loss": 2.3832,
"step": 47
},
{
"epoch": 0.8067226890756303,
"grad_norm": 0.2333984375,
"learning_rate": 0.0001863699306389282,
"loss": 2.2734,
"step": 48
},
{
"epoch": 0.8235294117647058,
"grad_norm": 0.1845703125,
"learning_rate": 0.00018566099522868119,
"loss": 2.3121,
"step": 49
},
{
"epoch": 0.8403361344537815,
"grad_norm": 0.16796875,
"learning_rate": 0.00018493550749402278,
"loss": 2.3118,
"step": 50
},
{
"epoch": 0.8571428571428571,
"grad_norm": 0.1689453125,
"learning_rate": 0.00018419360762137395,
"loss": 2.2884,
"step": 51
},
{
"epoch": 0.8739495798319328,
"grad_norm": 0.1875,
"learning_rate": 0.00018343543896848273,
"loss": 2.2719,
"step": 52
},
{
"epoch": 0.8907563025210085,
"grad_norm": 0.1845703125,
"learning_rate": 0.00018266114803672318,
"loss": 2.275,
"step": 53
},
{
"epoch": 0.907563025210084,
"grad_norm": 0.1806640625,
"learning_rate": 0.00018187088444278674,
"loss": 2.2271,
"step": 54
},
{
"epoch": 0.9243697478991597,
"grad_norm": 0.1669921875,
"learning_rate": 0.00018106480088977172,
"loss": 2.2866,
"step": 55
},
{
"epoch": 0.9411764705882353,
"grad_norm": 0.1728515625,
"learning_rate": 0.00018024305313767646,
"loss": 2.2417,
"step": 56
},
{
"epoch": 0.957983193277311,
"grad_norm": 0.2333984375,
"learning_rate": 0.00017940579997330165,
"loss": 2.327,
"step": 57
},
{
"epoch": 0.9747899159663865,
"grad_norm": 0.1728515625,
"learning_rate": 0.00017855320317956784,
"loss": 2.2002,
"step": 58
},
{
"epoch": 0.9915966386554622,
"grad_norm": 0.2060546875,
"learning_rate": 0.00017768542750425426,
"loss": 2.3351,
"step": 59
},
{
"epoch": 1.0084033613445378,
"grad_norm": 0.17578125,
"learning_rate": 0.0001768026406281642,
"loss": 2.2484,
"step": 60
},
{
"epoch": 1.0252100840336134,
"grad_norm": 0.2021484375,
"learning_rate": 0.00017590501313272415,
"loss": 2.3226,
"step": 61
},
{
"epoch": 1.0420168067226891,
"grad_norm": 0.2080078125,
"learning_rate": 0.00017499271846702213,
"loss": 2.3126,
"step": 62
},
{
"epoch": 1.0588235294117647,
"grad_norm": 0.16796875,
"learning_rate": 0.00017406593291429217,
"loss": 2.3496,
"step": 63
},
{
"epoch": 1.0756302521008403,
"grad_norm": 0.21875,
"learning_rate": 0.00017312483555785086,
"loss": 2.3221,
"step": 64
},
{
"epoch": 1.0924369747899159,
"grad_norm": 0.171875,
"learning_rate": 0.00017216960824649303,
"loss": 2.2812,
"step": 65
},
{
"epoch": 1.0084033613445378,
"grad_norm": 0.18359375,
"learning_rate": 0.00017120043555935298,
"loss": 2.1682,
"step": 66
},
{
"epoch": 1.0252100840336134,
"grad_norm": 0.267578125,
"learning_rate": 0.0001702175047702382,
"loss": 2.1584,
"step": 67
},
{
"epoch": 1.0420168067226891,
"grad_norm": 0.20703125,
"learning_rate": 0.00016922100581144228,
"loss": 2.1734,
"step": 68
},
{
"epoch": 1.0588235294117647,
"grad_norm": 0.21875,
"learning_rate": 0.00016821113123704424,
"loss": 2.3534,
"step": 69
},
{
"epoch": 1.0756302521008403,
"grad_norm": 0.318359375,
"learning_rate": 0.00016718807618570106,
"loss": 2.2248,
"step": 70
},
{
"epoch": 1.092436974789916,
"grad_norm": 0.181640625,
"learning_rate": 0.00016615203834294119,
"loss": 2.2215,
"step": 71
},
{
"epoch": 1.1092436974789917,
"grad_norm": 0.25,
"learning_rate": 0.00016510321790296525,
"loss": 2.2602,
"step": 72
},
{
"epoch": 1.1260504201680672,
"grad_norm": 0.248046875,
"learning_rate": 0.00016404181752996289,
"loss": 2.2532,
"step": 73
},
{
"epoch": 1.1428571428571428,
"grad_norm": 0.185546875,
"learning_rate": 0.00016296804231895142,
"loss": 2.2066,
"step": 74
},
{
"epoch": 1.1596638655462184,
"grad_norm": 0.30078125,
"learning_rate": 0.00016188209975614542,
"loss": 2.1352,
"step": 75
},
{
"epoch": 1.1764705882352942,
"grad_norm": 0.2001953125,
"learning_rate": 0.00016078419967886402,
"loss": 2.256,
"step": 76
},
{
"epoch": 1.1932773109243697,
"grad_norm": 0.2080078125,
"learning_rate": 0.00015967455423498387,
"loss": 2.2871,
"step": 77
},
{
"epoch": 1.2100840336134453,
"grad_norm": 0.2119140625,
"learning_rate": 0.00015855337784194577,
"loss": 2.2594,
"step": 78
},
{
"epoch": 1.226890756302521,
"grad_norm": 0.2021484375,
"learning_rate": 0.00015742088714532247,
"loss": 2.2358,
"step": 79
},
{
"epoch": 1.2436974789915967,
"grad_norm": 0.21875,
"learning_rate": 0.00015627730097695638,
"loss": 2.2143,
"step": 80
},
{
"epoch": 1.2605042016806722,
"grad_norm": 0.318359375,
"learning_rate": 0.00015512284031267437,
"loss": 2.1646,
"step": 81
},
{
"epoch": 1.2773109243697478,
"grad_norm": 0.2119140625,
"learning_rate": 0.00015395772822958845,
"loss": 2.1203,
"step": 82
},
{
"epoch": 1.2941176470588236,
"grad_norm": 0.28125,
"learning_rate": 0.00015278218986299074,
"loss": 2.0921,
"step": 83
},
{
"epoch": 1.3109243697478992,
"grad_norm": 0.291015625,
"learning_rate": 0.0001515964523628501,
"loss": 2.1823,
"step": 84
},
{
"epoch": 1.3277310924369747,
"grad_norm": 0.197265625,
"learning_rate": 0.00015040074484992,
"loss": 2.1761,
"step": 85
},
{
"epoch": 1.3445378151260505,
"grad_norm": 0.29296875,
"learning_rate": 0.00014919529837146528,
"loss": 2.1554,
"step": 86
},
{
"epoch": 1.361344537815126,
"grad_norm": 0.201171875,
"learning_rate": 0.00014798034585661695,
"loss": 2.222,
"step": 87
},
{
"epoch": 1.3781512605042017,
"grad_norm": 0.1884765625,
"learning_rate": 0.0001467561220713628,
"loss": 2.0991,
"step": 88
},
{
"epoch": 1.3949579831932772,
"grad_norm": 0.283203125,
"learning_rate": 0.0001455228635731839,
"loss": 2.2474,
"step": 89
},
{
"epoch": 1.4117647058823528,
"grad_norm": 0.197265625,
"learning_rate": 0.00014428080866534396,
"loss": 2.049,
"step": 90
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.2119140625,
"learning_rate": 0.00014303019735084226,
"loss": 2.2173,
"step": 91
},
{
"epoch": 1.4453781512605042,
"grad_norm": 0.2265625,
"learning_rate": 0.00014177127128603745,
"loss": 2.1714,
"step": 92
},
{
"epoch": 1.46218487394958,
"grad_norm": 0.271484375,
"learning_rate": 0.0001405042737339524,
"loss": 2.1857,
"step": 93
},
{
"epoch": 1.4789915966386555,
"grad_norm": 0.232421875,
"learning_rate": 0.0001392294495172681,
"loss": 2.2088,
"step": 94
},
{
"epoch": 1.495798319327731,
"grad_norm": 0.2109375,
"learning_rate": 0.00013794704497101655,
"loss": 2.0494,
"step": 95
},
{
"epoch": 1.5126050420168067,
"grad_norm": 0.328125,
"learning_rate": 0.0001366573078949813,
"loss": 2.2291,
"step": 96
},
{
"epoch": 1.5294117647058822,
"grad_norm": 0.2158203125,
"learning_rate": 0.00013536048750581494,
"loss": 2.1513,
"step": 97
},
{
"epoch": 1.5462184873949578,
"grad_norm": 0.251953125,
"learning_rate": 0.00013405683438888282,
"loss": 2.2065,
"step": 98
},
{
"epoch": 1.5630252100840336,
"grad_norm": 0.2734375,
"learning_rate": 0.00013274660044984224,
"loss": 2.1891,
"step": 99
},
{
"epoch": 1.5798319327731094,
"grad_norm": 0.220703125,
"learning_rate": 0.00013143003886596669,
"loss": 2.1263,
"step": 100
},
{
"epoch": 1.596638655462185,
"grad_norm": 0.279296875,
"learning_rate": 0.0001301074040372242,
"loss": 2.1815,
"step": 101
},
{
"epoch": 1.6134453781512605,
"grad_norm": 0.275390625,
"learning_rate": 0.00012877895153711935,
"loss": 2.1458,
"step": 102
},
{
"epoch": 1.6302521008403361,
"grad_norm": 0.22265625,
"learning_rate": 0.0001274449380633089,
"loss": 2.2204,
"step": 103
},
{
"epoch": 1.6470588235294117,
"grad_norm": 0.2099609375,
"learning_rate": 0.00012610562138799978,
"loss": 2.2575,
"step": 104
},
{
"epoch": 1.6638655462184873,
"grad_norm": 0.33984375,
"learning_rate": 0.00012476126030813963,
"loss": 2.1376,
"step": 105
},
{
"epoch": 1.680672268907563,
"grad_norm": 0.259765625,
"learning_rate": 0.0001234121145954094,
"loss": 2.1891,
"step": 106
},
{
"epoch": 1.6974789915966386,
"grad_norm": 0.23828125,
"learning_rate": 0.0001220584449460274,
"loss": 2.2351,
"step": 107
},
{
"epoch": 1.7142857142857144,
"grad_norm": 0.314453125,
"learning_rate": 0.00012070051293037492,
"loss": 2.2202,
"step": 108
},
{
"epoch": 1.73109243697479,
"grad_norm": 0.248046875,
"learning_rate": 0.00011933858094245281,
"loss": 2.1824,
"step": 109
},
{
"epoch": 1.7478991596638656,
"grad_norm": 0.25390625,
"learning_rate": 0.00011797291214917881,
"loss": 2.1288,
"step": 110
},
{
"epoch": 1.7647058823529411,
"grad_norm": 0.248046875,
"learning_rate": 0.00011660377043953588,
"loss": 2.1954,
"step": 111
},
{
"epoch": 1.7815126050420167,
"grad_norm": 0.2275390625,
"learning_rate": 0.0001152314203735805,
"loss": 2.224,
"step": 112
},
{
"epoch": 1.7983193277310925,
"grad_norm": 0.2080078125,
"learning_rate": 0.0001138561271313219,
"loss": 2.2759,
"step": 113
},
{
"epoch": 1.815126050420168,
"grad_norm": 0.234375,
"learning_rate": 0.00011247815646148087,
"loss": 2.0882,
"step": 114
},
{
"epoch": 1.8319327731092439,
"grad_norm": 0.2294921875,
"learning_rate": 0.00011109777463013915,
"loss": 2.0767,
"step": 115
},
{
"epoch": 1.8487394957983194,
"grad_norm": 0.3125,
"learning_rate": 0.0001097152483692886,
"loss": 2.2182,
"step": 116
},
{
"epoch": 1.865546218487395,
"grad_norm": 0.236328125,
"learning_rate": 0.00010833084482529048,
"loss": 2.1451,
"step": 117
},
{
"epoch": 1.8823529411764706,
"grad_norm": 0.2119140625,
"learning_rate": 0.00010694483150725458,
"loss": 2.1774,
"step": 118
},
{
"epoch": 1.8991596638655461,
"grad_norm": 0.24609375,
"learning_rate": 0.00010555747623534831,
"loss": 2.1748,
"step": 119
},
{
"epoch": 1.9159663865546217,
"grad_norm": 0.279296875,
"learning_rate": 0.00010416904708904548,
"loss": 2.1665,
"step": 120
},
{
"epoch": 1.9327731092436975,
"grad_norm": 0.2490234375,
"learning_rate": 0.00010277981235532541,
"loss": 2.1585,
"step": 121
},
{
"epoch": 1.949579831932773,
"grad_norm": 0.248046875,
"learning_rate": 0.00010139004047683151,
"loss": 2.1784,
"step": 122
},
{
"epoch": 1.9663865546218489,
"grad_norm": 0.296875,
"learning_rate": 0.0001,
"loss": 2.2399,
"step": 123
},
{
"epoch": 1.9831932773109244,
"grad_norm": 0.22265625,
"learning_rate": 9.860995952316851e-05,
"loss": 2.2187,
"step": 124
},
{
"epoch": 2.0,
"grad_norm": 0.2294921875,
"learning_rate": 9.722018764467461e-05,
"loss": 2.2007,
"step": 125
},
{
"epoch": 2.0168067226890756,
"grad_norm": 0.236328125,
"learning_rate": 9.583095291095453e-05,
"loss": 2.2312,
"step": 126
},
{
"epoch": 2.033613445378151,
"grad_norm": 0.220703125,
"learning_rate": 9.444252376465171e-05,
"loss": 2.2207,
"step": 127
},
{
"epoch": 2.0504201680672267,
"grad_norm": 0.234375,
"learning_rate": 9.305516849274541e-05,
"loss": 2.2537,
"step": 128
},
{
"epoch": 2.0672268907563023,
"grad_norm": 0.2265625,
"learning_rate": 9.166915517470953e-05,
"loss": 2.2015,
"step": 129
},
{
"epoch": 2.084033613445378,
"grad_norm": 0.248046875,
"learning_rate": 9.028475163071141e-05,
"loss": 2.2012,
"step": 130
},
{
"epoch": 2.100840336134454,
"grad_norm": 0.2255859375,
"learning_rate": 8.890222536986085e-05,
"loss": 2.1247,
"step": 131
},
{
"epoch": 2.008403361344538,
"grad_norm": 0.27734375,
"learning_rate": 8.752184353851916e-05,
"loss": 2.1723,
"step": 132
},
{
"epoch": 2.0252100840336134,
"grad_norm": 0.306640625,
"learning_rate": 8.614387286867814e-05,
"loss": 2.083,
"step": 133
},
{
"epoch": 2.042016806722689,
"grad_norm": 0.2255859375,
"learning_rate": 8.47685796264195e-05,
"loss": 2.014,
"step": 134
},
{
"epoch": 2.0588235294117645,
"grad_norm": 0.275390625,
"learning_rate": 8.339622956046417e-05,
"loss": 2.0522,
"step": 135
},
{
"epoch": 2.0756302521008405,
"grad_norm": 0.271484375,
"learning_rate": 8.202708785082121e-05,
"loss": 2.0255,
"step": 136
},
{
"epoch": 2.092436974789916,
"grad_norm": 0.265625,
"learning_rate": 8.066141905754723e-05,
"loss": 2.0615,
"step": 137
},
{
"epoch": 2.1092436974789917,
"grad_norm": 0.2578125,
"learning_rate": 7.929948706962508e-05,
"loss": 2.0913,
"step": 138
},
{
"epoch": 2.1260504201680672,
"grad_norm": 0.25,
"learning_rate": 7.794155505397261e-05,
"loss": 2.0726,
"step": 139
},
{
"epoch": 2.142857142857143,
"grad_norm": 0.29296875,
"learning_rate": 7.658788540459062e-05,
"loss": 2.0546,
"step": 140
},
{
"epoch": 2.1596638655462184,
"grad_norm": 0.271484375,
"learning_rate": 7.523873969186039e-05,
"loss": 2.0894,
"step": 141
},
{
"epoch": 2.176470588235294,
"grad_norm": 0.2490234375,
"learning_rate": 7.389437861200024e-05,
"loss": 1.9735,
"step": 142
},
{
"epoch": 2.19327731092437,
"grad_norm": 0.267578125,
"learning_rate": 7.25550619366911e-05,
"loss": 2.0254,
"step": 143
},
{
"epoch": 2.2100840336134455,
"grad_norm": 0.265625,
"learning_rate": 7.122104846288064e-05,
"loss": 2.0967,
"step": 144
},
{
"epoch": 2.226890756302521,
"grad_norm": 0.279296875,
"learning_rate": 6.989259596277582e-05,
"loss": 2.0115,
"step": 145
},
{
"epoch": 2.2436974789915967,
"grad_norm": 0.306640625,
"learning_rate": 6.85699611340333e-05,
"loss": 2.1694,
"step": 146
},
{
"epoch": 2.2605042016806722,
"grad_norm": 0.28125,
"learning_rate": 6.725339955015777e-05,
"loss": 2.0343,
"step": 147
},
{
"epoch": 2.277310924369748,
"grad_norm": 0.265625,
"learning_rate": 6.594316561111724e-05,
"loss": 2.0645,
"step": 148
},
{
"epoch": 2.2941176470588234,
"grad_norm": 0.30078125,
"learning_rate": 6.46395124941851e-05,
"loss": 2.0,
"step": 149
},
{
"epoch": 2.310924369747899,
"grad_norm": 0.29296875,
"learning_rate": 6.334269210501875e-05,
"loss": 2.0821,
"step": 150
},
{
"epoch": 2.327731092436975,
"grad_norm": 0.314453125,
"learning_rate": 6.205295502898348e-05,
"loss": 2.031,
"step": 151
},
{
"epoch": 2.3445378151260505,
"grad_norm": 0.330078125,
"learning_rate": 6.0770550482731924e-05,
"loss": 2.0395,
"step": 152
},
{
"epoch": 2.361344537815126,
"grad_norm": 0.30078125,
"learning_rate": 5.9495726266047605e-05,
"loss": 2.1277,
"step": 153
},
{
"epoch": 2.3781512605042017,
"grad_norm": 0.287109375,
"learning_rate": 5.8228728713962543e-05,
"loss": 2.0844,
"step": 154
},
{
"epoch": 2.3949579831932772,
"grad_norm": 0.3046875,
"learning_rate": 5.696980264915777e-05,
"loss": 2.0771,
"step": 155
},
{
"epoch": 2.411764705882353,
"grad_norm": 0.302734375,
"learning_rate": 5.571919133465605e-05,
"loss": 2.0715,
"step": 156
},
{
"epoch": 2.4285714285714284,
"grad_norm": 0.30859375,
"learning_rate": 5.447713642681612e-05,
"loss": 2.0176,
"step": 157
},
{
"epoch": 2.4453781512605044,
"grad_norm": 0.298828125,
"learning_rate": 5.324387792863719e-05,
"loss": 2.0616,
"step": 158
},
{
"epoch": 2.46218487394958,
"grad_norm": 0.279296875,
"learning_rate": 5.201965414338308e-05,
"loss": 2.1075,
"step": 159
},
{
"epoch": 2.4789915966386555,
"grad_norm": 0.291015625,
"learning_rate": 5.080470162853472e-05,
"loss": 2.1146,
"step": 160
},
{
"epoch": 2.495798319327731,
"grad_norm": 0.3203125,
"learning_rate": 4.959925515008002e-05,
"loss": 2.0432,
"step": 161
},
{
"epoch": 2.5126050420168067,
"grad_norm": 0.2890625,
"learning_rate": 4.840354763714991e-05,
"loss": 2.0304,
"step": 162
},
{
"epoch": 2.5294117647058822,
"grad_norm": 0.279296875,
"learning_rate": 4.7217810137009274e-05,
"loss": 2.128,
"step": 163
},
{
"epoch": 2.546218487394958,
"grad_norm": 0.318359375,
"learning_rate": 4.604227177041156e-05,
"loss": 2.1879,
"step": 164
},
{
"epoch": 2.5630252100840334,
"grad_norm": 0.310546875,
"learning_rate": 4.487715968732568e-05,
"loss": 2.0896,
"step": 165
},
{
"epoch": 2.5798319327731094,
"grad_norm": 0.318359375,
"learning_rate": 4.372269902304363e-05,
"loss": 2.1105,
"step": 166
},
{
"epoch": 2.596638655462185,
"grad_norm": 0.29296875,
"learning_rate": 4.257911285467754e-05,
"loss": 2.0194,
"step": 167
},
{
"epoch": 2.6134453781512605,
"grad_norm": 0.28515625,
"learning_rate": 4.144662215805426e-05,
"loss": 2.1294,
"step": 168
},
{
"epoch": 2.630252100840336,
"grad_norm": 0.30078125,
"learning_rate": 4.0325445765016145e-05,
"loss": 2.0898,
"step": 169
},
{
"epoch": 2.6470588235294117,
"grad_norm": 0.2890625,
"learning_rate": 3.921580032113602e-05,
"loss": 2.0419,
"step": 170
},
{
"epoch": 2.6638655462184873,
"grad_norm": 0.314453125,
"learning_rate": 3.8117900243854595e-05,
"loss": 2.0747,
"step": 171
},
{
"epoch": 2.6806722689075633,
"grad_norm": 0.291015625,
"learning_rate": 3.7031957681048604e-05,
"loss": 2.1093,
"step": 172
},
{
"epoch": 2.697478991596639,
"grad_norm": 0.294921875,
"learning_rate": 3.595818247003713e-05,
"loss": 2.0736,
"step": 173
},
{
"epoch": 2.7142857142857144,
"grad_norm": 0.296875,
"learning_rate": 3.489678209703475e-05,
"loss": 2.1207,
"step": 174
},
{
"epoch": 2.73109243697479,
"grad_norm": 0.28125,
"learning_rate": 3.3847961657058845e-05,
"loss": 2.1366,
"step": 175
},
{
"epoch": 2.7478991596638656,
"grad_norm": 0.31640625,
"learning_rate": 3.281192381429894e-05,
"loss": 2.1676,
"step": 176
},
{
"epoch": 2.764705882352941,
"grad_norm": 0.291015625,
"learning_rate": 3.178886876295578e-05,
"loss": 2.0748,
"step": 177
},
{
"epoch": 2.7815126050420167,
"grad_norm": 0.296875,
"learning_rate": 3.077899418855772e-05,
"loss": 2.0757,
"step": 178
},
{
"epoch": 2.7983193277310923,
"grad_norm": 0.291015625,
"learning_rate": 2.9782495229761808e-05,
"loss": 2.106,
"step": 179
},
{
"epoch": 2.815126050420168,
"grad_norm": 0.291015625,
"learning_rate": 2.879956444064703e-05,
"loss": 2.0526,
"step": 180
},
{
"epoch": 2.831932773109244,
"grad_norm": 0.333984375,
"learning_rate": 2.783039175350699e-05,
"loss": 2.1291,
"step": 181
},
{
"epoch": 2.8487394957983194,
"grad_norm": 0.3203125,
"learning_rate": 2.6875164442149147e-05,
"loss": 2.116,
"step": 182
},
{
"epoch": 2.865546218487395,
"grad_norm": 0.3046875,
"learning_rate": 2.5934067085707834e-05,
"loss": 2.0595,
"step": 183
},
{
"epoch": 2.8823529411764706,
"grad_norm": 0.29296875,
"learning_rate": 2.500728153297788e-05,
"loss": 2.0402,
"step": 184
},
{
"epoch": 2.899159663865546,
"grad_norm": 0.306640625,
"learning_rate": 2.409498686727587e-05,
"loss": 2.1448,
"step": 185
},
{
"epoch": 2.9159663865546217,
"grad_norm": 0.302734375,
"learning_rate": 2.3197359371835802e-05,
"loss": 2.0672,
"step": 186
},
{
"epoch": 2.9327731092436977,
"grad_norm": 0.291015625,
"learning_rate": 2.2314572495745746e-05,
"loss": 2.1085,
"step": 187
},
{
"epoch": 2.9495798319327733,
"grad_norm": 0.302734375,
"learning_rate": 2.1446796820432167e-05,
"loss": 2.0861,
"step": 188
},
{
"epoch": 2.966386554621849,
"grad_norm": 0.3046875,
"learning_rate": 2.0594200026698363e-05,
"loss": 2.1058,
"step": 189
},
{
"epoch": 2.9831932773109244,
"grad_norm": 0.298828125,
"learning_rate": 1.9756946862323535e-05,
"loss": 2.1034,
"step": 190
},
{
"epoch": 3.0,
"grad_norm": 0.30859375,
"learning_rate": 1.8935199110228275e-05,
"loss": 2.112,
"step": 191
},
{
"epoch": 3.0168067226890756,
"grad_norm": 0.32421875,
"learning_rate": 1.8129115557213262e-05,
"loss": 2.0627,
"step": 192
},
{
"epoch": 3.033613445378151,
"grad_norm": 0.287109375,
"learning_rate": 1.7338851963276825e-05,
"loss": 2.0705,
"step": 193
},
{
"epoch": 3.0504201680672267,
"grad_norm": 0.291015625,
"learning_rate": 1.656456103151728e-05,
"loss": 2.0702,
"step": 194
},
{
"epoch": 3.0672268907563023,
"grad_norm": 0.283203125,
"learning_rate": 1.580639237862608e-05,
"loss": 2.0607,
"step": 195
},
{
"epoch": 3.084033613445378,
"grad_norm": 0.28515625,
"learning_rate": 1.5064492505977234e-05,
"loss": 2.0568,
"step": 196
},
{
"epoch": 3.100840336134454,
"grad_norm": 0.287109375,
"learning_rate": 1.433900477131882e-05,
"loss": 2.0892,
"step": 197
},
{
"epoch": 3.004201680672269,
"grad_norm": 0.68359375,
"learning_rate": 1.363006936107183e-05,
"loss": 2.092,
"step": 198
},
{
"epoch": 3.0210084033613445,
"grad_norm": 0.279296875,
"learning_rate": 1.29378232632419e-05,
"loss": 2.0288,
"step": 199
},
{
"epoch": 3.03781512605042,
"grad_norm": 0.283203125,
"learning_rate": 1.2262400240949023e-05,
"loss": 2.0081,
"step": 200
},
{
"epoch": 3.0546218487394956,
"grad_norm": 0.3125,
"learning_rate": 1.1603930806580444e-05,
"loss": 2.1497,
"step": 201
},
{
"epoch": 3.0714285714285716,
"grad_norm": 0.302734375,
"learning_rate": 1.0962542196571634e-05,
"loss": 2.0169,
"step": 202
},
{
"epoch": 3.088235294117647,
"grad_norm": 0.296875,
"learning_rate": 1.0338358346820353e-05,
"loss": 2.0305,
"step": 203
},
{
"epoch": 3.1050420168067228,
"grad_norm": 0.2890625,
"learning_rate": 9.731499868738447e-06,
"loss": 2.1016,
"step": 204
},
{
"epoch": 3.1218487394957983,
"grad_norm": 0.3046875,
"learning_rate": 9.142084025945984e-06,
"loss": 2.0196,
"step": 205
},
{
"epoch": 3.138655462184874,
"grad_norm": 0.298828125,
"learning_rate": 8.570224711612385e-06,
"loss": 2.0644,
"step": 206
},
{
"epoch": 3.1554621848739495,
"grad_norm": 0.291015625,
"learning_rate": 8.016032426448817e-06,
"loss": 2.0463,
"step": 207
},
{
"epoch": 3.172268907563025,
"grad_norm": 0.310546875,
"learning_rate": 7.479614257355971e-06,
"loss": 2.043,
"step": 208
},
{
"epoch": 3.189075630252101,
"grad_norm": 0.30078125,
"learning_rate": 6.961073856731648e-06,
"loss": 2.0055,
"step": 209
},
{
"epoch": 3.2058823529411766,
"grad_norm": 0.298828125,
"learning_rate": 6.460511422441984e-06,
"loss": 2.0479,
"step": 210
},
{
"epoch": 3.222689075630252,
"grad_norm": 0.30859375,
"learning_rate": 5.978023678460099e-06,
"loss": 2.0462,
"step": 211
},
{
"epoch": 3.2394957983193278,
"grad_norm": 0.3203125,
"learning_rate": 5.5137038561761115e-06,
"loss": 1.9518,
"step": 212
},
{
"epoch": 3.2563025210084033,
"grad_norm": 0.29296875,
"learning_rate": 5.067641676381918e-06,
"loss": 2.0281,
"step": 213
},
{
"epoch": 3.273109243697479,
"grad_norm": 0.302734375,
"learning_rate": 4.639923331934471e-06,
"loss": 2.0816,
"step": 214
},
{
"epoch": 3.2899159663865545,
"grad_norm": 0.29296875,
"learning_rate": 4.230631471100655e-06,
"loss": 1.9686,
"step": 215
},
{
"epoch": 3.30672268907563,
"grad_norm": 0.30078125,
"learning_rate": 3.839845181587098e-06,
"loss": 2.0643,
"step": 216
},
{
"epoch": 3.323529411764706,
"grad_norm": 0.3125,
"learning_rate": 3.467639975257997e-06,
"loss": 1.9867,
"step": 217
},
{
"epoch": 3.3403361344537816,
"grad_norm": 0.298828125,
"learning_rate": 3.1140877735439387e-06,
"loss": 1.9972,
"step": 218
},
{
"epoch": 3.357142857142857,
"grad_norm": 0.3046875,
"learning_rate": 2.7792568935444796e-06,
"loss": 2.0394,
"step": 219
},
{
"epoch": 3.3739495798319328,
"grad_norm": 0.287109375,
"learning_rate": 2.4632120348272003e-06,
"loss": 1.945,
"step": 220
},
{
"epoch": 3.3907563025210083,
"grad_norm": 0.3046875,
"learning_rate": 2.166014266925731e-06,
"loss": 2.0512,
"step": 221
},
{
"epoch": 3.407563025210084,
"grad_norm": 0.298828125,
"learning_rate": 1.88772101753929e-06,
"loss": 1.991,
"step": 222
},
{
"epoch": 3.4243697478991595,
"grad_norm": 0.296875,
"learning_rate": 1.6283860614358936e-06,
"loss": 1.9694,
"step": 223
},
{
"epoch": 3.4411764705882355,
"grad_norm": 0.333984375,
"learning_rate": 1.3880595100613792e-06,
"loss": 2.0378,
"step": 224
},
{
"epoch": 3.457983193277311,
"grad_norm": 0.294921875,
"learning_rate": 1.1667878018564171e-06,
"loss": 1.9876,
"step": 225
},
{
"epoch": 3.4747899159663866,
"grad_norm": 0.294921875,
"learning_rate": 9.64613693283123e-07,
"loss": 2.0086,
"step": 226
},
{
"epoch": 3.491596638655462,
"grad_norm": 0.294921875,
"learning_rate": 7.815762505632096e-07,
"loss": 1.9774,
"step": 227
},
{
"epoch": 3.508403361344538,
"grad_norm": 0.29296875,
"learning_rate": 6.177108421292266e-07,
"loss": 2.0522,
"step": 228
},
{
"epoch": 3.5252100840336134,
"grad_norm": 0.296875,
"learning_rate": 4.7304913179025965e-07,
"loss": 2.0401,
"step": 229
},
{
"epoch": 3.542016806722689,
"grad_norm": 0.34375,
"learning_rate": 3.4761907261356976e-07,
"loss": 2.1176,
"step": 230
},
{
"epoch": 3.5588235294117645,
"grad_norm": 0.306640625,
"learning_rate": 2.414449015231357e-07,
"loss": 2.0368,
"step": 231
},
{
"epoch": 3.57563025210084,
"grad_norm": 0.298828125,
"learning_rate": 1.545471346164007e-07,
"loss": 2.0378,
"step": 232
},
{
"epoch": 3.592436974789916,
"grad_norm": 0.283203125,
"learning_rate": 8.694256319987659e-08,
"loss": 2.0836,
"step": 233
},
{
"epoch": 3.6092436974789917,
"grad_norm": 0.296875,
"learning_rate": 3.8644250544594975e-08,
"loss": 1.9831,
"step": 234
},
{
"epoch": 3.6260504201680672,
"grad_norm": 0.3046875,
"learning_rate": 9.661529361892907e-09,
"loss": 2.014,
"step": 235
},
{
"epoch": 3.642857142857143,
"grad_norm": 0.298828125,
"learning_rate": 0.0,
"loss": 1.9648,
"step": 236
}
],
"logging_steps": 1,
"max_steps": 236,
"num_input_tokens_seen": 0,
"num_train_epochs": 4,
"save_steps": 30,
"total_flos": 3.519304308086538e+17,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}