VQA-Streaming-Scorer / trainer_state.json
JZHWS's picture
Upload 15 files
ed3fb08 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 263,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0038022813688212928,
"grad_norm": 5.838922807698357,
"learning_rate": 1.25e-06,
"loss": 0.4415,
"step": 1
},
{
"epoch": 0.0076045627376425855,
"grad_norm": 6.450188921175257,
"learning_rate": 2.5e-06,
"loss": 0.2714,
"step": 2
},
{
"epoch": 0.011406844106463879,
"grad_norm": 6.328807457221606,
"learning_rate": 3.7500000000000005e-06,
"loss": 0.2392,
"step": 3
},
{
"epoch": 0.015209125475285171,
"grad_norm": 3.1900170578862403,
"learning_rate": 5e-06,
"loss": 0.1902,
"step": 4
},
{
"epoch": 0.019011406844106463,
"grad_norm": 4.396787628400217,
"learning_rate": 6.25e-06,
"loss": 0.2287,
"step": 5
},
{
"epoch": 0.022813688212927757,
"grad_norm": 2.328876214447226,
"learning_rate": 7.500000000000001e-06,
"loss": 0.1073,
"step": 6
},
{
"epoch": 0.026615969581749048,
"grad_norm": 1.9893066003363478,
"learning_rate": 8.750000000000001e-06,
"loss": 0.0892,
"step": 7
},
{
"epoch": 0.030418250950570342,
"grad_norm": 3.4491162158646227,
"learning_rate": 1e-05,
"loss": 0.1859,
"step": 8
},
{
"epoch": 0.034220532319391636,
"grad_norm": 3.391007553648945,
"learning_rate": 9.999620550574155e-06,
"loss": 0.1162,
"step": 9
},
{
"epoch": 0.03802281368821293,
"grad_norm": 5.243929471727456,
"learning_rate": 9.99848225988936e-06,
"loss": 0.2052,
"step": 10
},
{
"epoch": 0.04182509505703422,
"grad_norm": 2.556605157640159,
"learning_rate": 9.996585300715117e-06,
"loss": 0.1279,
"step": 11
},
{
"epoch": 0.045627376425855515,
"grad_norm": 2.3229950077008232,
"learning_rate": 9.99392996097145e-06,
"loss": 0.2329,
"step": 12
},
{
"epoch": 0.049429657794676805,
"grad_norm": 3.4538715709146754,
"learning_rate": 9.990516643685222e-06,
"loss": 0.0859,
"step": 13
},
{
"epoch": 0.053231939163498096,
"grad_norm": 2.144930306306445,
"learning_rate": 9.98634586692894e-06,
"loss": 0.1541,
"step": 14
},
{
"epoch": 0.057034220532319393,
"grad_norm": 1.6344989568819797,
"learning_rate": 9.981418263742148e-06,
"loss": 0.1433,
"step": 15
},
{
"epoch": 0.060836501901140684,
"grad_norm": 1.2978801538360458,
"learning_rate": 9.975734582035323e-06,
"loss": 0.0885,
"step": 16
},
{
"epoch": 0.06463878326996197,
"grad_norm": 2.0950541376792366,
"learning_rate": 9.96929568447637e-06,
"loss": 0.1319,
"step": 17
},
{
"epoch": 0.06844106463878327,
"grad_norm": 1.0425316398679343,
"learning_rate": 9.96210254835968e-06,
"loss": 0.1279,
"step": 18
},
{
"epoch": 0.07224334600760456,
"grad_norm": 1.7860823823073846,
"learning_rate": 9.954156265457801e-06,
"loss": 0.1857,
"step": 19
},
{
"epoch": 0.07604562737642585,
"grad_norm": 1.7415782346194582,
"learning_rate": 9.945458041855732e-06,
"loss": 0.0717,
"step": 20
},
{
"epoch": 0.07984790874524715,
"grad_norm": 1.0879866640913596,
"learning_rate": 9.936009197767847e-06,
"loss": 0.1016,
"step": 21
},
{
"epoch": 0.08365019011406843,
"grad_norm": 2.2439360478661645,
"learning_rate": 9.925811167337533e-06,
"loss": 0.0935,
"step": 22
},
{
"epoch": 0.08745247148288973,
"grad_norm": 1.8496647469351228,
"learning_rate": 9.91486549841951e-06,
"loss": 0.1117,
"step": 23
},
{
"epoch": 0.09125475285171103,
"grad_norm": 1.1499446513004061,
"learning_rate": 9.903173852344889e-06,
"loss": 0.0786,
"step": 24
},
{
"epoch": 0.09505703422053231,
"grad_norm": 1.0813157058702787,
"learning_rate": 9.890738003669029e-06,
"loss": 0.0466,
"step": 25
},
{
"epoch": 0.09885931558935361,
"grad_norm": 1.1206071268561641,
"learning_rate": 9.877559839902185e-06,
"loss": 0.0473,
"step": 26
},
{
"epoch": 0.10266159695817491,
"grad_norm": 1.2183071418887779,
"learning_rate": 9.863641361223025e-06,
"loss": 0.0625,
"step": 27
},
{
"epoch": 0.10646387832699619,
"grad_norm": 1.7316764015595658,
"learning_rate": 9.848984680175049e-06,
"loss": 0.0485,
"step": 28
},
{
"epoch": 0.11026615969581749,
"grad_norm": 0.6990239085195359,
"learning_rate": 9.833592021345938e-06,
"loss": 0.0573,
"step": 29
},
{
"epoch": 0.11406844106463879,
"grad_norm": 2.2785738442682413,
"learning_rate": 9.817465721029916e-06,
"loss": 0.0796,
"step": 30
},
{
"epoch": 0.11787072243346007,
"grad_norm": 1.0584228821124018,
"learning_rate": 9.800608226873143e-06,
"loss": 0.0403,
"step": 31
},
{
"epoch": 0.12167300380228137,
"grad_norm": 2.5879877206601636,
"learning_rate": 9.783022097502204e-06,
"loss": 0.1062,
"step": 32
},
{
"epoch": 0.12547528517110265,
"grad_norm": 2.5381277344322433,
"learning_rate": 9.764710002135784e-06,
"loss": 0.0769,
"step": 33
},
{
"epoch": 0.12927756653992395,
"grad_norm": 1.0747071356178863,
"learning_rate": 9.745674720179507e-06,
"loss": 0.0544,
"step": 34
},
{
"epoch": 0.13307984790874525,
"grad_norm": 3.102732036616969,
"learning_rate": 9.7259191408041e-06,
"loss": 0.0926,
"step": 35
},
{
"epoch": 0.13688212927756654,
"grad_norm": 1.0789665373993582,
"learning_rate": 9.705446262506858e-06,
"loss": 0.041,
"step": 36
},
{
"epoch": 0.14068441064638784,
"grad_norm": 1.872380809657841,
"learning_rate": 9.684259192656554e-06,
"loss": 0.0987,
"step": 37
},
{
"epoch": 0.1444866920152091,
"grad_norm": 1.6725384234341496,
"learning_rate": 9.66236114702178e-06,
"loss": 0.0664,
"step": 38
},
{
"epoch": 0.1482889733840304,
"grad_norm": 1.3155529867243292,
"learning_rate": 9.639755449282874e-06,
"loss": 0.0471,
"step": 39
},
{
"epoch": 0.1520912547528517,
"grad_norm": 0.7375858425847438,
"learning_rate": 9.616445530527448e-06,
"loss": 0.0542,
"step": 40
},
{
"epoch": 0.155893536121673,
"grad_norm": 2.5798050001278865,
"learning_rate": 9.592434928729617e-06,
"loss": 0.0851,
"step": 41
},
{
"epoch": 0.1596958174904943,
"grad_norm": 0.9563326747806046,
"learning_rate": 9.567727288213005e-06,
"loss": 0.0701,
"step": 42
},
{
"epoch": 0.1634980988593156,
"grad_norm": 1.2201713738820383,
"learning_rate": 9.542326359097619e-06,
"loss": 0.0483,
"step": 43
},
{
"epoch": 0.16730038022813687,
"grad_norm": 1.5349602348665532,
"learning_rate": 9.516235996730645e-06,
"loss": 0.0493,
"step": 44
},
{
"epoch": 0.17110266159695817,
"grad_norm": 1.2129366778348925,
"learning_rate": 9.489460161101291e-06,
"loss": 0.0423,
"step": 45
},
{
"epoch": 0.17490494296577946,
"grad_norm": 1.5701039195877315,
"learning_rate": 9.46200291623974e-06,
"loss": 0.0594,
"step": 46
},
{
"epoch": 0.17870722433460076,
"grad_norm": 1.7001361570347544,
"learning_rate": 9.43386842960031e-06,
"loss": 0.056,
"step": 47
},
{
"epoch": 0.18250950570342206,
"grad_norm": 0.8564787838977387,
"learning_rate": 9.405060971428924e-06,
"loss": 0.0498,
"step": 48
},
{
"epoch": 0.18631178707224336,
"grad_norm": 2.4376315388128056,
"learning_rate": 9.375584914114963e-06,
"loss": 0.0717,
"step": 49
},
{
"epoch": 0.19011406844106463,
"grad_norm": 3.0710870034344704,
"learning_rate": 9.345444731527642e-06,
"loss": 0.0597,
"step": 50
},
{
"epoch": 0.19391634980988592,
"grad_norm": 1.1595171258313646,
"learning_rate": 9.31464499833695e-06,
"loss": 0.0484,
"step": 51
},
{
"epoch": 0.19771863117870722,
"grad_norm": 2.036685842781906,
"learning_rate": 9.283190389319315e-06,
"loss": 0.0533,
"step": 52
},
{
"epoch": 0.20152091254752852,
"grad_norm": 1.6358538836833496,
"learning_rate": 9.251085678648072e-06,
"loss": 0.063,
"step": 53
},
{
"epoch": 0.20532319391634982,
"grad_norm": 2.341827886552907,
"learning_rate": 9.218335739168833e-06,
"loss": 0.0614,
"step": 54
},
{
"epoch": 0.20912547528517111,
"grad_norm": 1.6561642430586991,
"learning_rate": 9.18494554165989e-06,
"loss": 0.0335,
"step": 55
},
{
"epoch": 0.21292775665399238,
"grad_norm": 1.395413976072295,
"learning_rate": 9.150920154077753e-06,
"loss": 0.0536,
"step": 56
},
{
"epoch": 0.21673003802281368,
"grad_norm": 2.657218435113838,
"learning_rate": 9.116264740787937e-06,
"loss": 0.0697,
"step": 57
},
{
"epoch": 0.22053231939163498,
"grad_norm": 3.3032959131412634,
"learning_rate": 9.08098456178111e-06,
"loss": 0.0833,
"step": 58
},
{
"epoch": 0.22433460076045628,
"grad_norm": 1.8964099123844205,
"learning_rate": 9.045084971874738e-06,
"loss": 0.0568,
"step": 59
},
{
"epoch": 0.22813688212927757,
"grad_norm": 1.5300857159970949,
"learning_rate": 9.008571419900334e-06,
"loss": 0.0599,
"step": 60
},
{
"epoch": 0.23193916349809887,
"grad_norm": 1.5891594516335554,
"learning_rate": 8.97144944787643e-06,
"loss": 0.0454,
"step": 61
},
{
"epoch": 0.23574144486692014,
"grad_norm": 1.3527045916224263,
"learning_rate": 8.933724690167417e-06,
"loss": 0.0429,
"step": 62
},
{
"epoch": 0.23954372623574144,
"grad_norm": 3.4176684375694446,
"learning_rate": 8.895402872628352e-06,
"loss": 0.0535,
"step": 63
},
{
"epoch": 0.24334600760456274,
"grad_norm": 0.9708842431312176,
"learning_rate": 8.856489811735904e-06,
"loss": 0.05,
"step": 64
},
{
"epoch": 0.24714828897338403,
"grad_norm": 1.4542670605196113,
"learning_rate": 8.816991413705515e-06,
"loss": 0.0553,
"step": 65
},
{
"epoch": 0.2509505703422053,
"grad_norm": 1.3407992355297549,
"learning_rate": 8.776913673594968e-06,
"loss": 0.0337,
"step": 66
},
{
"epoch": 0.25475285171102663,
"grad_norm": 2.282768165416241,
"learning_rate": 8.736262674394455e-06,
"loss": 0.054,
"step": 67
},
{
"epoch": 0.2585551330798479,
"grad_norm": 2.120541919990877,
"learning_rate": 8.695044586103297e-06,
"loss": 0.0528,
"step": 68
},
{
"epoch": 0.2623574144486692,
"grad_norm": 2.340243548218491,
"learning_rate": 8.653265664793466e-06,
"loss": 0.0559,
"step": 69
},
{
"epoch": 0.2661596958174905,
"grad_norm": 2.6043724991846915,
"learning_rate": 8.610932251660046e-06,
"loss": 0.0788,
"step": 70
},
{
"epoch": 0.26996197718631176,
"grad_norm": 1.981130327340551,
"learning_rate": 8.568050772058763e-06,
"loss": 0.0631,
"step": 71
},
{
"epoch": 0.2737642585551331,
"grad_norm": 3.0833275950321535,
"learning_rate": 8.524627734530738e-06,
"loss": 0.066,
"step": 72
},
{
"epoch": 0.27756653992395436,
"grad_norm": 2.2997642624470354,
"learning_rate": 8.480669729814635e-06,
"loss": 0.056,
"step": 73
},
{
"epoch": 0.2813688212927757,
"grad_norm": 1.4420430072448926,
"learning_rate": 8.436183429846314e-06,
"loss": 0.0425,
"step": 74
},
{
"epoch": 0.28517110266159695,
"grad_norm": 1.6013511726718204,
"learning_rate": 8.39117558674617e-06,
"loss": 0.0501,
"step": 75
},
{
"epoch": 0.2889733840304182,
"grad_norm": 1.4338725585293788,
"learning_rate": 8.345653031794292e-06,
"loss": 0.0422,
"step": 76
},
{
"epoch": 0.29277566539923955,
"grad_norm": 3.890683595146634,
"learning_rate": 8.299622674393615e-06,
"loss": 0.1042,
"step": 77
},
{
"epoch": 0.2965779467680608,
"grad_norm": 2.8479412113795406,
"learning_rate": 8.25309150102121e-06,
"loss": 0.065,
"step": 78
},
{
"epoch": 0.30038022813688214,
"grad_norm": 1.431054324105267,
"learning_rate": 8.206066574167893e-06,
"loss": 0.0458,
"step": 79
},
{
"epoch": 0.3041825095057034,
"grad_norm": 0.8730610840802391,
"learning_rate": 8.158555031266255e-06,
"loss": 0.04,
"step": 80
},
{
"epoch": 0.30798479087452474,
"grad_norm": 2.448239293063525,
"learning_rate": 8.110564083607371e-06,
"loss": 0.0759,
"step": 81
},
{
"epoch": 0.311787072243346,
"grad_norm": 1.8456070372447848,
"learning_rate": 8.06210101524625e-06,
"loss": 0.0576,
"step": 82
},
{
"epoch": 0.3155893536121673,
"grad_norm": 1.834734351428776,
"learning_rate": 8.013173181896283e-06,
"loss": 0.06,
"step": 83
},
{
"epoch": 0.3193916349809886,
"grad_norm": 1.563283754929812,
"learning_rate": 7.963788009812775e-06,
"loss": 0.0513,
"step": 84
},
{
"epoch": 0.3231939163498099,
"grad_norm": 1.1475605231367754,
"learning_rate": 7.913952994665805e-06,
"loss": 0.0543,
"step": 85
},
{
"epoch": 0.3269961977186312,
"grad_norm": 1.4206944686043848,
"learning_rate": 7.863675700402527e-06,
"loss": 0.0374,
"step": 86
},
{
"epoch": 0.33079847908745247,
"grad_norm": 40.37566470088141,
"learning_rate": 7.812963758099118e-06,
"loss": 0.2393,
"step": 87
},
{
"epoch": 0.33460076045627374,
"grad_norm": 0.781171019162742,
"learning_rate": 7.76182486480253e-06,
"loss": 0.0439,
"step": 88
},
{
"epoch": 0.33840304182509506,
"grad_norm": 1.2238520077458774,
"learning_rate": 7.710266782362248e-06,
"loss": 0.0363,
"step": 89
},
{
"epoch": 0.34220532319391633,
"grad_norm": 3.0641582178280307,
"learning_rate": 7.658297336252181e-06,
"loss": 0.0662,
"step": 90
},
{
"epoch": 0.34600760456273766,
"grad_norm": 1.965932071811284,
"learning_rate": 7.605924414382926e-06,
"loss": 0.0472,
"step": 91
},
{
"epoch": 0.34980988593155893,
"grad_norm": 1.755513579015423,
"learning_rate": 7.553155965904535e-06,
"loss": 0.0355,
"step": 92
},
{
"epoch": 0.35361216730038025,
"grad_norm": 1.7758852391959217,
"learning_rate": 7.500000000000001e-06,
"loss": 0.0439,
"step": 93
},
{
"epoch": 0.3574144486692015,
"grad_norm": 1.3555739799872628,
"learning_rate": 7.4464645846696186e-06,
"loss": 0.0503,
"step": 94
},
{
"epoch": 0.3612167300380228,
"grad_norm": 1.4562802703339102,
"learning_rate": 7.392557845506433e-06,
"loss": 0.0451,
"step": 95
},
{
"epoch": 0.3650190114068441,
"grad_norm": 1.242339996611598,
"learning_rate": 7.3382879644629345e-06,
"loss": 0.0372,
"step": 96
},
{
"epoch": 0.3688212927756654,
"grad_norm": 1.7658082693457402,
"learning_rate": 7.283663178609204e-06,
"loss": 0.0516,
"step": 97
},
{
"epoch": 0.3726235741444867,
"grad_norm": 2.308009029051408,
"learning_rate": 7.2286917788826926e-06,
"loss": 0.049,
"step": 98
},
{
"epoch": 0.376425855513308,
"grad_norm": 1.5974409986244866,
"learning_rate": 7.173382108829826e-06,
"loss": 0.052,
"step": 99
},
{
"epoch": 0.38022813688212925,
"grad_norm": 0.8994670069496103,
"learning_rate": 7.117742563339622e-06,
"loss": 0.0256,
"step": 100
},
{
"epoch": 0.3840304182509506,
"grad_norm": 2.4678061096370465,
"learning_rate": 7.061781587369518e-06,
"loss": 0.0666,
"step": 101
},
{
"epoch": 0.38783269961977185,
"grad_norm": 0.866063867737264,
"learning_rate": 7.005507674663594e-06,
"loss": 0.0404,
"step": 102
},
{
"epoch": 0.3916349809885932,
"grad_norm": 1.2929123997330905,
"learning_rate": 6.948929366463397e-06,
"loss": 0.0416,
"step": 103
},
{
"epoch": 0.39543726235741444,
"grad_norm": 1.6302913701612327,
"learning_rate": 6.892055250211552e-06,
"loss": 0.0444,
"step": 104
},
{
"epoch": 0.39923954372623577,
"grad_norm": 2.728101647559586,
"learning_rate": 6.834893958248361e-06,
"loss": 0.0475,
"step": 105
},
{
"epoch": 0.40304182509505704,
"grad_norm": 2.2488039758111915,
"learning_rate": 6.77745416650159e-06,
"loss": 0.0659,
"step": 106
},
{
"epoch": 0.4068441064638783,
"grad_norm": 1.977313537391127,
"learning_rate": 6.719744593169642e-06,
"loss": 0.052,
"step": 107
},
{
"epoch": 0.41064638783269963,
"grad_norm": 5.706465703956973,
"learning_rate": 6.6617739973982985e-06,
"loss": 0.2998,
"step": 108
},
{
"epoch": 0.4144486692015209,
"grad_norm": 3.8000626899961825,
"learning_rate": 6.6035511779512764e-06,
"loss": 0.1164,
"step": 109
},
{
"epoch": 0.41825095057034223,
"grad_norm": 10.502568472372902,
"learning_rate": 6.545084971874738e-06,
"loss": 0.123,
"step": 110
},
{
"epoch": 0.4220532319391635,
"grad_norm": 1.8313837197728655,
"learning_rate": 6.486384253156014e-06,
"loss": 0.1134,
"step": 111
},
{
"epoch": 0.42585551330798477,
"grad_norm": 2.0327190996550177,
"learning_rate": 6.427457931376712e-06,
"loss": 0.0888,
"step": 112
},
{
"epoch": 0.4296577946768061,
"grad_norm": 1.7161432735601356,
"learning_rate": 6.368314950360416e-06,
"loss": 0.0902,
"step": 113
},
{
"epoch": 0.43346007604562736,
"grad_norm": 1.074310172184068,
"learning_rate": 6.308964286815203e-06,
"loss": 0.0581,
"step": 114
},
{
"epoch": 0.4372623574144487,
"grad_norm": 1.6451467195844793,
"learning_rate": 6.249414948971154e-06,
"loss": 0.0503,
"step": 115
},
{
"epoch": 0.44106463878326996,
"grad_norm": 4.814077108982163,
"learning_rate": 6.189675975213094e-06,
"loss": 0.1629,
"step": 116
},
{
"epoch": 0.4448669201520912,
"grad_norm": 1.4504882892841855,
"learning_rate": 6.129756432708739e-06,
"loss": 0.077,
"step": 117
},
{
"epoch": 0.44866920152091255,
"grad_norm": 2.5745840097932517,
"learning_rate": 6.0696654160324875e-06,
"loss": 0.0785,
"step": 118
},
{
"epoch": 0.4524714828897338,
"grad_norm": 1.2717081467380555,
"learning_rate": 6.009412045785051e-06,
"loss": 0.0721,
"step": 119
},
{
"epoch": 0.45627376425855515,
"grad_norm": 1.3500755816952386,
"learning_rate": 5.9490054672091305e-06,
"loss": 0.0585,
"step": 120
},
{
"epoch": 0.4600760456273764,
"grad_norm": 0.6582503184547626,
"learning_rate": 5.888454848801345e-06,
"loss": 0.0647,
"step": 121
},
{
"epoch": 0.46387832699619774,
"grad_norm": 2.057266551706436,
"learning_rate": 5.82776938092065e-06,
"loss": 0.1195,
"step": 122
},
{
"epoch": 0.467680608365019,
"grad_norm": 1.717649125812939,
"learning_rate": 5.766958274393428e-06,
"loss": 0.0762,
"step": 123
},
{
"epoch": 0.4714828897338403,
"grad_norm": 1.0979465765609746,
"learning_rate": 5.706030759115458e-06,
"loss": 0.074,
"step": 124
},
{
"epoch": 0.4752851711026616,
"grad_norm": 2.4576710255834184,
"learning_rate": 5.644996082651018e-06,
"loss": 0.09,
"step": 125
},
{
"epoch": 0.4790874524714829,
"grad_norm": 1.4141031874389902,
"learning_rate": 5.583863508829281e-06,
"loss": 0.0712,
"step": 126
},
{
"epoch": 0.4828897338403042,
"grad_norm": 1.3722960078778967,
"learning_rate": 5.522642316338268e-06,
"loss": 0.0774,
"step": 127
},
{
"epoch": 0.4866920152091255,
"grad_norm": 1.9899235244969848,
"learning_rate": 5.46134179731651e-06,
"loss": 0.0727,
"step": 128
},
{
"epoch": 0.49049429657794674,
"grad_norm": 0.9857281384665181,
"learning_rate": 5.399971255942708e-06,
"loss": 0.0709,
"step": 129
},
{
"epoch": 0.49429657794676807,
"grad_norm": 1.873178689272728,
"learning_rate": 5.338540007023538e-06,
"loss": 0.086,
"step": 130
},
{
"epoch": 0.49809885931558934,
"grad_norm": 1.2985642993407172,
"learning_rate": 5.27705737457985e-06,
"loss": 0.0729,
"step": 131
},
{
"epoch": 0.5019011406844106,
"grad_norm": 2.00845296448955,
"learning_rate": 5.2155326904314795e-06,
"loss": 0.0796,
"step": 132
},
{
"epoch": 0.5057034220532319,
"grad_norm": 1.3854160082227878,
"learning_rate": 5.153975292780852e-06,
"loss": 0.0655,
"step": 133
},
{
"epoch": 0.5095057034220533,
"grad_norm": 0.8110311967262005,
"learning_rate": 5.09239452479565e-06,
"loss": 0.0832,
"step": 134
},
{
"epoch": 0.5133079847908745,
"grad_norm": 1.349215941618463,
"learning_rate": 5.030799733190694e-06,
"loss": 0.0734,
"step": 135
},
{
"epoch": 0.5171102661596958,
"grad_norm": 1.7298488968449564,
"learning_rate": 4.9692002668093075e-06,
"loss": 0.0854,
"step": 136
},
{
"epoch": 0.5209125475285171,
"grad_norm": 1.9286164063763356,
"learning_rate": 4.907605475204352e-06,
"loss": 0.0967,
"step": 137
},
{
"epoch": 0.5247148288973384,
"grad_norm": 1.5511875987133978,
"learning_rate": 4.846024707219149e-06,
"loss": 0.0895,
"step": 138
},
{
"epoch": 0.5285171102661597,
"grad_norm": 1.8297476277504392,
"learning_rate": 4.784467309568524e-06,
"loss": 0.0861,
"step": 139
},
{
"epoch": 0.532319391634981,
"grad_norm": 1.4103839066534454,
"learning_rate": 4.7229426254201504e-06,
"loss": 0.0783,
"step": 140
},
{
"epoch": 0.5361216730038023,
"grad_norm": 2.1074351624344136,
"learning_rate": 4.661459992976463e-06,
"loss": 0.0822,
"step": 141
},
{
"epoch": 0.5399239543726235,
"grad_norm": 2.0662466538476827,
"learning_rate": 4.6000287440572925e-06,
"loss": 0.0923,
"step": 142
},
{
"epoch": 0.5437262357414449,
"grad_norm": 1.9436401310479368,
"learning_rate": 4.53865820268349e-06,
"loss": 0.0973,
"step": 143
},
{
"epoch": 0.5475285171102662,
"grad_norm": 1.6678903520906458,
"learning_rate": 4.477357683661734e-06,
"loss": 0.0877,
"step": 144
},
{
"epoch": 0.5513307984790875,
"grad_norm": 0.809599618210663,
"learning_rate": 4.41613649117072e-06,
"loss": 0.0786,
"step": 145
},
{
"epoch": 0.5551330798479087,
"grad_norm": 1.0977795425796129,
"learning_rate": 4.355003917348985e-06,
"loss": 0.0831,
"step": 146
},
{
"epoch": 0.55893536121673,
"grad_norm": 1.8349191027751834,
"learning_rate": 4.293969240884545e-06,
"loss": 0.0546,
"step": 147
},
{
"epoch": 0.5627376425855514,
"grad_norm": 1.2008364879121194,
"learning_rate": 4.233041725606573e-06,
"loss": 0.0614,
"step": 148
},
{
"epoch": 0.5665399239543726,
"grad_norm": 0.732201424861496,
"learning_rate": 4.17223061907935e-06,
"loss": 0.0734,
"step": 149
},
{
"epoch": 0.5703422053231939,
"grad_norm": 1.225050397564832,
"learning_rate": 4.111545151198657e-06,
"loss": 0.0651,
"step": 150
},
{
"epoch": 0.5741444866920152,
"grad_norm": 1.2302003045619856,
"learning_rate": 4.050994532790871e-06,
"loss": 0.0837,
"step": 151
},
{
"epoch": 0.5779467680608364,
"grad_norm": 1.8397382613360544,
"learning_rate": 3.99058795421495e-06,
"loss": 0.0607,
"step": 152
},
{
"epoch": 0.5817490494296578,
"grad_norm": 1.4093018088954288,
"learning_rate": 3.930334583967514e-06,
"loss": 0.0575,
"step": 153
},
{
"epoch": 0.5855513307984791,
"grad_norm": 1.4975102213218823,
"learning_rate": 3.870243567291263e-06,
"loss": 0.0634,
"step": 154
},
{
"epoch": 0.5893536121673004,
"grad_norm": 1.8275993786721776,
"learning_rate": 3.8103240247869077e-06,
"loss": 0.0477,
"step": 155
},
{
"epoch": 0.5931558935361216,
"grad_norm": 1.5246149628230938,
"learning_rate": 3.7505850510288455e-06,
"loss": 0.0927,
"step": 156
},
{
"epoch": 0.596958174904943,
"grad_norm": 1.07378251359328,
"learning_rate": 3.6910357131847986e-06,
"loss": 0.0798,
"step": 157
},
{
"epoch": 0.6007604562737643,
"grad_norm": 1.2099308034216252,
"learning_rate": 3.6316850496395863e-06,
"loss": 0.0724,
"step": 158
},
{
"epoch": 0.6045627376425855,
"grad_norm": 1.0732935946375615,
"learning_rate": 3.5725420686232903e-06,
"loss": 0.0773,
"step": 159
},
{
"epoch": 0.6083650190114068,
"grad_norm": 0.9225394975341887,
"learning_rate": 3.513615746843987e-06,
"loss": 0.0635,
"step": 160
},
{
"epoch": 0.6121673003802282,
"grad_norm": 2.6158603947076577,
"learning_rate": 3.4549150281252635e-06,
"loss": 0.1059,
"step": 161
},
{
"epoch": 0.6159695817490495,
"grad_norm": 0.9294415514571752,
"learning_rate": 3.3964488220487252e-06,
"loss": 0.0709,
"step": 162
},
{
"epoch": 0.6197718631178707,
"grad_norm": 1.9222737047405196,
"learning_rate": 3.3382260026017027e-06,
"loss": 0.062,
"step": 163
},
{
"epoch": 0.623574144486692,
"grad_norm": 1.906035535415949,
"learning_rate": 3.2802554068303595e-06,
"loss": 0.0967,
"step": 164
},
{
"epoch": 0.6273764258555133,
"grad_norm": 1.7676981737731525,
"learning_rate": 3.22254583349841e-06,
"loss": 0.0961,
"step": 165
},
{
"epoch": 0.6311787072243346,
"grad_norm": 1.605378292764449,
"learning_rate": 3.16510604175164e-06,
"loss": 0.0507,
"step": 166
},
{
"epoch": 0.6349809885931559,
"grad_norm": 1.1486017436180254,
"learning_rate": 3.107944749788449e-06,
"loss": 0.0653,
"step": 167
},
{
"epoch": 0.6387832699619772,
"grad_norm": 1.9815488934112822,
"learning_rate": 3.0510706335366034e-06,
"loss": 0.0687,
"step": 168
},
{
"epoch": 0.6425855513307985,
"grad_norm": 1.747055284984615,
"learning_rate": 2.9944923253364066e-06,
"loss": 0.0753,
"step": 169
},
{
"epoch": 0.6463878326996197,
"grad_norm": 1.5044113934612662,
"learning_rate": 2.9382184126304834e-06,
"loss": 0.0751,
"step": 170
},
{
"epoch": 0.6501901140684411,
"grad_norm": 1.4456809835756477,
"learning_rate": 2.8822574366603804e-06,
"loss": 0.0857,
"step": 171
},
{
"epoch": 0.6539923954372624,
"grad_norm": 0.8421771448658065,
"learning_rate": 2.8266178911701757e-06,
"loss": 0.0754,
"step": 172
},
{
"epoch": 0.6577946768060836,
"grad_norm": 1.6606796114651394,
"learning_rate": 2.771308221117309e-06,
"loss": 0.0633,
"step": 173
},
{
"epoch": 0.6615969581749049,
"grad_norm": 1.0518360600901147,
"learning_rate": 2.7163368213907975e-06,
"loss": 0.0595,
"step": 174
},
{
"epoch": 0.6653992395437263,
"grad_norm": 0.8118249216729664,
"learning_rate": 2.6617120355370667e-06,
"loss": 0.0721,
"step": 175
},
{
"epoch": 0.6692015209125475,
"grad_norm": 2.735849675959911,
"learning_rate": 2.607442154493568e-06,
"loss": 0.0981,
"step": 176
},
{
"epoch": 0.6730038022813688,
"grad_norm": 1.5818010804365756,
"learning_rate": 2.5535354153303827e-06,
"loss": 0.0944,
"step": 177
},
{
"epoch": 0.6768060836501901,
"grad_norm": 2.106035867055531,
"learning_rate": 2.5000000000000015e-06,
"loss": 0.0563,
"step": 178
},
{
"epoch": 0.6806083650190115,
"grad_norm": 1.1144497943115816,
"learning_rate": 2.4468440340954664e-06,
"loss": 0.08,
"step": 179
},
{
"epoch": 0.6844106463878327,
"grad_norm": 0.6703641595119413,
"learning_rate": 2.3940755856170744e-06,
"loss": 0.0681,
"step": 180
},
{
"epoch": 0.688212927756654,
"grad_norm": 1.5423718248019078,
"learning_rate": 2.341702663747819e-06,
"loss": 0.0934,
"step": 181
},
{
"epoch": 0.6920152091254753,
"grad_norm": 2.5974586850504173,
"learning_rate": 2.289733217637753e-06,
"loss": 0.0727,
"step": 182
},
{
"epoch": 0.6958174904942965,
"grad_norm": 1.6582686566551548,
"learning_rate": 2.238175135197471e-06,
"loss": 0.0708,
"step": 183
},
{
"epoch": 0.6996197718631179,
"grad_norm": 1.5364491430874354,
"learning_rate": 2.1870362419008844e-06,
"loss": 0.0896,
"step": 184
},
{
"epoch": 0.7034220532319392,
"grad_norm": 0.8883156220926425,
"learning_rate": 2.136324299597474e-06,
"loss": 0.0805,
"step": 185
},
{
"epoch": 0.7072243346007605,
"grad_norm": 0.722419818120629,
"learning_rate": 2.0860470053341957e-06,
"loss": 0.0677,
"step": 186
},
{
"epoch": 0.7110266159695817,
"grad_norm": 0.8687543599479524,
"learning_rate": 2.0362119901872262e-06,
"loss": 0.0743,
"step": 187
},
{
"epoch": 0.714828897338403,
"grad_norm": 1.1765271610095978,
"learning_rate": 1.9868268181037186e-06,
"loss": 0.073,
"step": 188
},
{
"epoch": 0.7186311787072244,
"grad_norm": 1.57171034016372,
"learning_rate": 1.937898984753751e-06,
"loss": 0.0774,
"step": 189
},
{
"epoch": 0.7224334600760456,
"grad_norm": 2.5653255183794843,
"learning_rate": 1.8894359163926312e-06,
"loss": 0.0845,
"step": 190
},
{
"epoch": 0.7262357414448669,
"grad_norm": 1.8207532400192823,
"learning_rate": 1.8414449687337467e-06,
"loss": 0.0866,
"step": 191
},
{
"epoch": 0.7300380228136882,
"grad_norm": 2.494001462793861,
"learning_rate": 1.7939334258321094e-06,
"loss": 0.0745,
"step": 192
},
{
"epoch": 0.7338403041825095,
"grad_norm": 1.2902772325556104,
"learning_rate": 1.746908498978791e-06,
"loss": 0.0755,
"step": 193
},
{
"epoch": 0.7376425855513308,
"grad_norm": 1.6350962042870758,
"learning_rate": 1.7003773256063882e-06,
"loss": 0.0734,
"step": 194
},
{
"epoch": 0.7414448669201521,
"grad_norm": 0.9676259463093386,
"learning_rate": 1.6543469682057105e-06,
"loss": 0.0816,
"step": 195
},
{
"epoch": 0.7452471482889734,
"grad_norm": 0.5610374914180468,
"learning_rate": 1.60882441325383e-06,
"loss": 0.0754,
"step": 196
},
{
"epoch": 0.7490494296577946,
"grad_norm": 0.7804394462668183,
"learning_rate": 1.5638165701536866e-06,
"loss": 0.0747,
"step": 197
},
{
"epoch": 0.752851711026616,
"grad_norm": 1.550496983980984,
"learning_rate": 1.5193302701853674e-06,
"loss": 0.0791,
"step": 198
},
{
"epoch": 0.7566539923954373,
"grad_norm": 1.9010955783137817,
"learning_rate": 1.475372265469265e-06,
"loss": 0.0732,
"step": 199
},
{
"epoch": 0.7604562737642585,
"grad_norm": 1.6079174758382067,
"learning_rate": 1.4319492279412388e-06,
"loss": 0.0708,
"step": 200
},
{
"epoch": 0.7642585551330798,
"grad_norm": 0.6607128276682519,
"learning_rate": 1.389067748339954e-06,
"loss": 0.0715,
"step": 201
},
{
"epoch": 0.7680608365019012,
"grad_norm": 1.3595868708508696,
"learning_rate": 1.3467343352065349e-06,
"loss": 0.0781,
"step": 202
},
{
"epoch": 0.7718631178707225,
"grad_norm": 2.419173076809107,
"learning_rate": 1.3049554138967052e-06,
"loss": 0.0739,
"step": 203
},
{
"epoch": 0.7756653992395437,
"grad_norm": 1.8044702216287307,
"learning_rate": 1.2637373256055445e-06,
"loss": 0.0825,
"step": 204
},
{
"epoch": 0.779467680608365,
"grad_norm": 0.9717656143179705,
"learning_rate": 1.2230863264050308e-06,
"loss": 0.0777,
"step": 205
},
{
"epoch": 0.7832699619771863,
"grad_norm": 1.181299656152716,
"learning_rate": 1.1830085862944851e-06,
"loss": 0.0726,
"step": 206
},
{
"epoch": 0.7870722433460076,
"grad_norm": 0.9219123710812074,
"learning_rate": 1.1435101882640964e-06,
"loss": 0.0832,
"step": 207
},
{
"epoch": 0.7908745247148289,
"grad_norm": 0.6914642768758692,
"learning_rate": 1.1045971273716476e-06,
"loss": 0.0793,
"step": 208
},
{
"epoch": 0.7946768060836502,
"grad_norm": 2.273763477220486,
"learning_rate": 1.066275309832584e-06,
"loss": 0.0715,
"step": 209
},
{
"epoch": 0.7984790874524715,
"grad_norm": 1.6498287914408991,
"learning_rate": 1.02855055212357e-06,
"loss": 0.0669,
"step": 210
},
{
"epoch": 0.8022813688212928,
"grad_norm": 1.102115306685056,
"learning_rate": 9.91428580099667e-07,
"loss": 0.0775,
"step": 211
},
{
"epoch": 0.8060836501901141,
"grad_norm": 2.4914475016820967,
"learning_rate": 9.549150281252633e-07,
"loss": 0.0677,
"step": 212
},
{
"epoch": 0.8098859315589354,
"grad_norm": 0.6217527292902375,
"learning_rate": 9.190154382188921e-07,
"loss": 0.0704,
"step": 213
},
{
"epoch": 0.8136882129277566,
"grad_norm": 0.776872411148349,
"learning_rate": 8.837352592120646e-07,
"loss": 0.0683,
"step": 214
},
{
"epoch": 0.8174904942965779,
"grad_norm": 1.3830390856962926,
"learning_rate": 8.490798459222477e-07,
"loss": 0.0719,
"step": 215
},
{
"epoch": 0.8212927756653993,
"grad_norm": 0.9016535949900154,
"learning_rate": 8.150544583401116e-07,
"loss": 0.0706,
"step": 216
},
{
"epoch": 0.8250950570342205,
"grad_norm": 1.2565895090887536,
"learning_rate": 7.816642608311692e-07,
"loss": 0.0691,
"step": 217
},
{
"epoch": 0.8288973384030418,
"grad_norm": 0.4685806197849472,
"learning_rate": 7.489143213519301e-07,
"loss": 0.0749,
"step": 218
},
{
"epoch": 0.8326996197718631,
"grad_norm": 1.8324078968655844,
"learning_rate": 7.168096106806871e-07,
"loss": 0.0842,
"step": 219
},
{
"epoch": 0.8365019011406845,
"grad_norm": 0.615073356028593,
"learning_rate": 6.853550016630517e-07,
"loss": 0.0697,
"step": 220
},
{
"epoch": 0.8403041825095057,
"grad_norm": 2.4222883575922154,
"learning_rate": 6.545552684723583e-07,
"loss": 0.0803,
"step": 221
},
{
"epoch": 0.844106463878327,
"grad_norm": 2.325809289721842,
"learning_rate": 6.244150858850368e-07,
"loss": 0.0969,
"step": 222
},
{
"epoch": 0.8479087452471483,
"grad_norm": 1.176358839322873,
"learning_rate": 5.949390285710777e-07,
"loss": 0.0599,
"step": 223
},
{
"epoch": 0.8517110266159695,
"grad_norm": 0.7047528759218481,
"learning_rate": 5.661315703996905e-07,
"loss": 0.0807,
"step": 224
},
{
"epoch": 0.8555133079847909,
"grad_norm": 1.1938640871812451,
"learning_rate": 5.379970837602611e-07,
"loss": 0.0712,
"step": 225
},
{
"epoch": 0.8593155893536122,
"grad_norm": 2.0073147773750635,
"learning_rate": 5.105398388987098e-07,
"loss": 0.0723,
"step": 226
},
{
"epoch": 0.8631178707224335,
"grad_norm": 1.910913263898643,
"learning_rate": 4.837640032693558e-07,
"loss": 0.0795,
"step": 227
},
{
"epoch": 0.8669201520912547,
"grad_norm": 1.8337713325079474,
"learning_rate": 4.576736409023813e-07,
"loss": 0.068,
"step": 228
},
{
"epoch": 0.870722433460076,
"grad_norm": 1.2290049663038816,
"learning_rate": 4.322727117869951e-07,
"loss": 0.0768,
"step": 229
},
{
"epoch": 0.8745247148288974,
"grad_norm": 1.4789730966126406,
"learning_rate": 4.0756507127038494e-07,
"loss": 0.0767,
"step": 230
},
{
"epoch": 0.8783269961977186,
"grad_norm": 1.6910881638069963,
"learning_rate": 3.8355446947255293e-07,
"loss": 0.0665,
"step": 231
},
{
"epoch": 0.8821292775665399,
"grad_norm": 1.033604422841365,
"learning_rate": 3.602445507171276e-07,
"loss": 0.0778,
"step": 232
},
{
"epoch": 0.8859315589353612,
"grad_norm": 0.8013760374315584,
"learning_rate": 3.3763885297822153e-07,
"loss": 0.0657,
"step": 233
},
{
"epoch": 0.8897338403041825,
"grad_norm": 2.429431976291132,
"learning_rate": 3.1574080734344757e-07,
"loss": 0.0644,
"step": 234
},
{
"epoch": 0.8935361216730038,
"grad_norm": 0.9711502835652512,
"learning_rate": 2.9455373749314285e-07,
"loss": 0.0706,
"step": 235
},
{
"epoch": 0.8973384030418251,
"grad_norm": 1.275326544398461,
"learning_rate": 2.7408085919590265e-07,
"loss": 0.0679,
"step": 236
},
{
"epoch": 0.9011406844106464,
"grad_norm": 1.2439206544006953,
"learning_rate": 2.5432527982049424e-07,
"loss": 0.0686,
"step": 237
},
{
"epoch": 0.9049429657794676,
"grad_norm": 1.1151619016488201,
"learning_rate": 2.3528999786421758e-07,
"loss": 0.067,
"step": 238
},
{
"epoch": 0.908745247148289,
"grad_norm": 1.5637158385664465,
"learning_rate": 2.1697790249779638e-07,
"loss": 0.0653,
"step": 239
},
{
"epoch": 0.9125475285171103,
"grad_norm": 0.4492873956448081,
"learning_rate": 1.9939177312685963e-07,
"loss": 0.0782,
"step": 240
},
{
"epoch": 0.9163498098859315,
"grad_norm": 1.1564997677392477,
"learning_rate": 1.825342789700846e-07,
"loss": 0.0608,
"step": 241
},
{
"epoch": 0.9201520912547528,
"grad_norm": 1.8601658350989132,
"learning_rate": 1.664079786540629e-07,
"loss": 0.0853,
"step": 242
},
{
"epoch": 0.9239543726235742,
"grad_norm": 1.227671794720867,
"learning_rate": 1.510153198249531e-07,
"loss": 0.0681,
"step": 243
},
{
"epoch": 0.9277566539923955,
"grad_norm": 1.318746672359122,
"learning_rate": 1.363586387769761e-07,
"loss": 0.0716,
"step": 244
},
{
"epoch": 0.9315589353612167,
"grad_norm": 1.6242713478117103,
"learning_rate": 1.22440160097817e-07,
"loss": 0.0585,
"step": 245
},
{
"epoch": 0.935361216730038,
"grad_norm": 2.4521843943377455,
"learning_rate": 1.0926199633097156e-07,
"loss": 0.0748,
"step": 246
},
{
"epoch": 0.9391634980988594,
"grad_norm": 1.624621392645855,
"learning_rate": 9.682614765511134e-08,
"loss": 0.0778,
"step": 247
},
{
"epoch": 0.9429657794676806,
"grad_norm": 0.4946954920558412,
"learning_rate": 8.513450158049109e-08,
"loss": 0.0787,
"step": 248
},
{
"epoch": 0.9467680608365019,
"grad_norm": 0.5126439315912313,
"learning_rate": 7.418883266246734e-08,
"loss": 0.0779,
"step": 249
},
{
"epoch": 0.9505703422053232,
"grad_norm": 1.9296492595340622,
"learning_rate": 6.399080223215503e-08,
"loss": 0.071,
"step": 250
},
{
"epoch": 0.9543726235741445,
"grad_norm": 2.381450276806091,
"learning_rate": 5.454195814427021e-08,
"loss": 0.0571,
"step": 251
},
{
"epoch": 0.9581749049429658,
"grad_norm": 1.6084565235987887,
"learning_rate": 4.584373454219859e-08,
"loss": 0.0895,
"step": 252
},
{
"epoch": 0.9619771863117871,
"grad_norm": 0.8689683317083515,
"learning_rate": 3.7897451640321326e-08,
"loss": 0.0666,
"step": 253
},
{
"epoch": 0.9657794676806084,
"grad_norm": 1.3522488546526146,
"learning_rate": 3.0704315523631956e-08,
"loss": 0.0611,
"step": 254
},
{
"epoch": 0.9695817490494296,
"grad_norm": 2.2795049559749754,
"learning_rate": 2.426541796467785e-08,
"loss": 0.0536,
"step": 255
},
{
"epoch": 0.973384030418251,
"grad_norm": 1.087776806550556,
"learning_rate": 1.8581736257852756e-08,
"loss": 0.0602,
"step": 256
},
{
"epoch": 0.9771863117870723,
"grad_norm": 1.1862726730320716,
"learning_rate": 1.3654133071059894e-08,
"loss": 0.0722,
"step": 257
},
{
"epoch": 0.9809885931558935,
"grad_norm": 0.7704198670078842,
"learning_rate": 9.48335631477948e-09,
"loss": 0.0639,
"step": 258
},
{
"epoch": 0.9847908745247148,
"grad_norm": 1.0439502260084565,
"learning_rate": 6.070039028550634e-09,
"loss": 0.0623,
"step": 259
},
{
"epoch": 0.9885931558935361,
"grad_norm": 0.9401759231345554,
"learning_rate": 3.41469928488547e-09,
"loss": 0.0735,
"step": 260
},
{
"epoch": 0.9923954372623575,
"grad_norm": 1.8274467631316753,
"learning_rate": 1.5177401106419853e-09,
"loss": 0.0714,
"step": 261
},
{
"epoch": 0.9961977186311787,
"grad_norm": 1.347810152487735,
"learning_rate": 3.7944942584688947e-10,
"loss": 0.0698,
"step": 262
},
{
"epoch": 1.0,
"grad_norm": 1.4596446787851405,
"learning_rate": 0.0,
"loss": 0.0874,
"step": 263
},
{
"epoch": 1.0,
"step": 263,
"total_flos": 8314083559424.0,
"train_loss": 0.04440210763889121,
"train_runtime": 1286.9653,
"train_samples_per_second": 1.632,
"train_steps_per_second": 0.204
}
],
"logging_steps": 1.0,
"max_steps": 263,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 8314083559424.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}