|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9996678844237795, |
|
"eval_steps": 500, |
|
"global_step": 1505, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006642311524410495, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 1.3245033112582782e-06, |
|
"loss": 1.8253, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0033211557622052474, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 6.622516556291391e-06, |
|
"loss": 1.838, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006642311524410495, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 1.3245033112582782e-05, |
|
"loss": 1.823, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009963467286615742, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 1.9867549668874173e-05, |
|
"loss": 1.7917, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01328462304882099, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 2.6490066225165565e-05, |
|
"loss": 1.7849, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.016605778811026237, |
|
"grad_norm": 0.375, |
|
"learning_rate": 3.311258278145696e-05, |
|
"loss": 1.7819, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.019926934573231483, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 3.9735099337748346e-05, |
|
"loss": 1.7091, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.023248090335436733, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 4.635761589403974e-05, |
|
"loss": 1.6498, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02656924609764198, |
|
"grad_norm": 2.0, |
|
"learning_rate": 5.298013245033113e-05, |
|
"loss": 1.6126, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.029890401859847225, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 5.960264900662252e-05, |
|
"loss": 1.5847, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.033211557622052475, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 6.622516556291392e-05, |
|
"loss": 1.4876, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.036532713384257724, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 7.284768211920529e-05, |
|
"loss": 1.4771, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03985386914646297, |
|
"grad_norm": 0.234375, |
|
"learning_rate": 7.947019867549669e-05, |
|
"loss": 1.4546, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.043175024908668216, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 8.609271523178808e-05, |
|
"loss": 1.4066, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.046496180670873466, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 9.271523178807948e-05, |
|
"loss": 1.4112, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04981733643307871, |
|
"grad_norm": 0.189453125, |
|
"learning_rate": 9.933774834437086e-05, |
|
"loss": 1.3762, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05313849219528396, |
|
"grad_norm": 0.150390625, |
|
"learning_rate": 0.00010596026490066226, |
|
"loss": 1.3657, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05645964795748921, |
|
"grad_norm": 0.1484375, |
|
"learning_rate": 0.00011258278145695364, |
|
"loss": 1.3309, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05978080371969445, |
|
"grad_norm": 0.11865234375, |
|
"learning_rate": 0.00011920529801324504, |
|
"loss": 1.3321, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0631019594818997, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.00012582781456953643, |
|
"loss": 1.3179, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06642311524410495, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.00013245033112582784, |
|
"loss": 1.3108, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0697442710063102, |
|
"grad_norm": 0.09033203125, |
|
"learning_rate": 0.0001390728476821192, |
|
"loss": 1.2985, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07306542676851545, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00014569536423841059, |
|
"loss": 1.2727, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07638658253072068, |
|
"grad_norm": 0.11328125, |
|
"learning_rate": 0.000152317880794702, |
|
"loss": 1.2735, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07970773829292593, |
|
"grad_norm": 0.06787109375, |
|
"learning_rate": 0.00015894039735099338, |
|
"loss": 1.2625, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08302889405513118, |
|
"grad_norm": 0.08056640625, |
|
"learning_rate": 0.00016556291390728477, |
|
"loss": 1.2354, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.08635004981733643, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 0.00017218543046357615, |
|
"loss": 1.2534, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08967120557954168, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 0.00017880794701986757, |
|
"loss": 1.2281, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.09299236134174693, |
|
"grad_norm": 0.146484375, |
|
"learning_rate": 0.00018543046357615895, |
|
"loss": 1.2436, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.09631351710395218, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.00019205298013245034, |
|
"loss": 1.2399, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09963467286615742, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 0.00019867549668874172, |
|
"loss": 1.2157, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.10295582862836267, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.00019999569325372926, |
|
"loss": 1.2191, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.10627698439056792, |
|
"grad_norm": 0.08154296875, |
|
"learning_rate": 0.0001999781977327807, |
|
"loss": 1.2421, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10959814015277317, |
|
"grad_norm": 0.0927734375, |
|
"learning_rate": 0.00019994724661834065, |
|
"loss": 1.2209, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.11291929591497842, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 0.0001999028440759734, |
|
"loss": 1.1945, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11624045167718366, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 0.00019984499608160746, |
|
"loss": 1.2276, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1195616074393889, |
|
"grad_norm": 0.07177734375, |
|
"learning_rate": 0.00019977371042073115, |
|
"loss": 1.2141, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.12288276320159415, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 0.00019968899668734502, |
|
"loss": 1.2088, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1262039189637994, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 0.00019959086628267033, |
|
"loss": 1.2116, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12952507472600466, |
|
"grad_norm": 0.09033203125, |
|
"learning_rate": 0.00019947933241361483, |
|
"loss": 1.2059, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1328462304882099, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 0.00019935441009099524, |
|
"loss": 1.2049, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13616738625041513, |
|
"grad_norm": 0.130859375, |
|
"learning_rate": 0.00019921611612751708, |
|
"loss": 1.1862, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1394885420126204, |
|
"grad_norm": 0.091796875, |
|
"learning_rate": 0.00019906446913551173, |
|
"loss": 1.1948, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.14280969777482563, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 0.00019889948952443173, |
|
"loss": 1.1951, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1461308535370309, |
|
"grad_norm": 0.07763671875, |
|
"learning_rate": 0.0001987211994981039, |
|
"loss": 1.1761, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.14945200929923613, |
|
"grad_norm": 0.08056640625, |
|
"learning_rate": 0.00019852962305174078, |
|
"loss": 1.2026, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.15277316506144137, |
|
"grad_norm": 0.2421875, |
|
"learning_rate": 0.00019832478596871166, |
|
"loss": 1.1896, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.15609432082364663, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 0.00019810671581707223, |
|
"loss": 1.1745, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.15941547658585187, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 0.00019787544194585434, |
|
"loss": 1.1878, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.16273663234805713, |
|
"grad_norm": 0.08203125, |
|
"learning_rate": 0.00019763099548111615, |
|
"loss": 1.1851, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.16605778811026237, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.00019737340932175296, |
|
"loss": 1.1765, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16937894387246763, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.00019710271813506953, |
|
"loss": 1.1818, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.17270009963467287, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 0.00019681895835211438, |
|
"loss": 1.1489, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1760212553968781, |
|
"grad_norm": 0.134765625, |
|
"learning_rate": 0.00019652216816277656, |
|
"loss": 1.1565, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.17934241115908336, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.0001962123875106462, |
|
"loss": 1.1663, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1826635669212886, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.0001958896580876383, |
|
"loss": 1.187, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.18598472268349386, |
|
"grad_norm": 0.0791015625, |
|
"learning_rate": 0.00019555402332838178, |
|
"loss": 1.1769, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1893058784456991, |
|
"grad_norm": 0.0791015625, |
|
"learning_rate": 0.00019520552840437394, |
|
"loss": 1.1765, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.19262703420790436, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00019484422021790083, |
|
"loss": 1.1769, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1959481899701096, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.00019447014739572502, |
|
"loss": 1.1654, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.19926934573231483, |
|
"grad_norm": 0.1396484375, |
|
"learning_rate": 0.0001940833602825411, |
|
"loss": 1.1695, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2025905014945201, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00019368391093420003, |
|
"loss": 1.1685, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.20591165725672533, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 0.0001932718531107033, |
|
"loss": 1.1502, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2092328130189306, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 0.00019284724226896737, |
|
"loss": 1.1694, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.21255396878113583, |
|
"grad_norm": 0.12158203125, |
|
"learning_rate": 0.0001924101355553603, |
|
"loss": 1.1701, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2158751245433411, |
|
"grad_norm": 0.0908203125, |
|
"learning_rate": 0.0001919605917980104, |
|
"loss": 1.1501, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.21919628030554633, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 0.00019149867149888906, |
|
"loss": 1.1645, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.22251743606775157, |
|
"grad_norm": 0.0908203125, |
|
"learning_rate": 0.00019102443682566793, |
|
"loss": 1.1432, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.22583859182995683, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 0.00019053795160335214, |
|
"loss": 1.1632, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.22915974759216207, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 0.00019003928130569039, |
|
"loss": 1.1464, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.23248090335436733, |
|
"grad_norm": 0.1064453125, |
|
"learning_rate": 0.00018952849304636307, |
|
"loss": 1.1568, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.23580205911657257, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.00018900565556994984, |
|
"loss": 1.156, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.2391232148787778, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.0001884708392426776, |
|
"loss": 1.1471, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.24244437064098306, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 0.00018792411604295014, |
|
"loss": 1.1302, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.2457655264031883, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.0001873655595516611, |
|
"loss": 1.1276, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.24908668216539356, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 0.0001867952449422909, |
|
"loss": 1.165, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2524078379275988, |
|
"grad_norm": 0.19921875, |
|
"learning_rate": 0.0001862132489707895, |
|
"loss": 1.1524, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.25572899368980406, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00018561964996524627, |
|
"loss": 1.1459, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2590501494520093, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 0.00018501452781534812, |
|
"loss": 1.1375, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.26237130521421453, |
|
"grad_norm": 0.1083984375, |
|
"learning_rate": 0.00018439796396162755, |
|
"loss": 1.1394, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2656924609764198, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 0.00018377004138450196, |
|
"loss": 1.146, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.26901361673862506, |
|
"grad_norm": 0.0947265625, |
|
"learning_rate": 0.00018313084459310568, |
|
"loss": 1.1391, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.27233477250083027, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0001824804596139163, |
|
"loss": 1.1368, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.27565592826303553, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.00018181897397917673, |
|
"loss": 1.1573, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2789770840252408, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 0.00018114647671511473, |
|
"loss": 1.1502, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.282298239787446, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00018046305832996126, |
|
"loss": 1.1461, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.28561939554965127, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.00017976881080176938, |
|
"loss": 1.163, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.28894055131185653, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.00017906382756603534, |
|
"loss": 1.155, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.2922617070740618, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.0001783482035031236, |
|
"loss": 1.1576, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.295582862836267, |
|
"grad_norm": 0.1142578125, |
|
"learning_rate": 0.00017762203492549728, |
|
"loss": 1.1548, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.29890401859847227, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 0.000176885419564756, |
|
"loss": 1.1501, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.30222517436067753, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 0.00017613845655848248, |
|
"loss": 1.1265, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.30554633012288274, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.00017538124643690033, |
|
"loss": 1.1424, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.308867485885088, |
|
"grad_norm": 0.12158203125, |
|
"learning_rate": 0.00017461389110934382, |
|
"loss": 1.1632, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.31218864164729326, |
|
"grad_norm": 0.1279296875, |
|
"learning_rate": 0.00017383649385054276, |
|
"loss": 1.1293, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.3155097974094985, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.000173049159286723, |
|
"loss": 1.1472, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.31883095317170373, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.0001722519933815253, |
|
"loss": 1.1435, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.322152108933909, |
|
"grad_norm": 0.0927734375, |
|
"learning_rate": 0.0001714451034217443, |
|
"loss": 1.1612, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.32547326469611426, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 0.0001706285980028892, |
|
"loss": 1.1242, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.32879442045831947, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 0.00016980258701456845, |
|
"loss": 1.1492, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.33211557622052473, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 0.00016896718162570009, |
|
"loss": 1.1566, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.33543673198273, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.00016812249426955032, |
|
"loss": 1.1261, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.33875788774493526, |
|
"grad_norm": 0.125, |
|
"learning_rate": 0.00016726863862860146, |
|
"loss": 1.1389, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.34207904350714047, |
|
"grad_norm": 0.09130859375, |
|
"learning_rate": 0.00016640572961925182, |
|
"loss": 1.1283, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.34540019926934573, |
|
"grad_norm": 0.091796875, |
|
"learning_rate": 0.00016553388337635, |
|
"loss": 1.1372, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.348721355031551, |
|
"grad_norm": 0.09130859375, |
|
"learning_rate": 0.00016465321723756464, |
|
"loss": 1.1392, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3520425107937562, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 0.00016376384972759236, |
|
"loss": 1.1359, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.35536366655596147, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 0.00016286590054220642, |
|
"loss": 1.129, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.35868482231816673, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.00016195949053214709, |
|
"loss": 1.1348, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.362005978080372, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.00016104474168685725, |
|
"loss": 1.1489, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.3653271338425772, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 0.000160121777118064, |
|
"loss": 1.1367, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.36864828960478246, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 0.0001591907210432102, |
|
"loss": 1.1236, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.3719694453669877, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 0.00015825169876873592, |
|
"loss": 1.1313, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.37529060112919294, |
|
"grad_norm": 0.1328125, |
|
"learning_rate": 0.0001573048366732147, |
|
"loss": 1.1452, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3786117568913982, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 0.00015635026219034445, |
|
"loss": 1.1364, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.38193291265360346, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 0.00015538810379179694, |
|
"loss": 1.1348, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3852540684158087, |
|
"grad_norm": 0.10595703125, |
|
"learning_rate": 0.0001544184909699272, |
|
"loss": 1.1367, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.38857522417801393, |
|
"grad_norm": 0.12451171875, |
|
"learning_rate": 0.00015344155422034609, |
|
"loss": 1.1194, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3918963799402192, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.00015245742502435712, |
|
"loss": 1.1346, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.39521753570242446, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.00015146623583126134, |
|
"loss": 1.1223, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.39853869146462967, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 0.00015046812004053134, |
|
"loss": 1.1088, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.40185984722683493, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 0.00014946321198385795, |
|
"loss": 1.1381, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.4051810029890402, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 0.00014845164690707087, |
|
"loss": 1.1354, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.40850215875124546, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.00014743356095193666, |
|
"loss": 1.1204, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.41182331451345067, |
|
"grad_norm": 0.1533203125, |
|
"learning_rate": 0.00014640909113783622, |
|
"loss": 1.1322, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.41514447027565593, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 0.00014537837534332385, |
|
"loss": 1.1271, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.4184656260378612, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 0.0001443415522875708, |
|
"loss": 1.1458, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.4217867818000664, |
|
"grad_norm": 0.091796875, |
|
"learning_rate": 0.0001432987615116958, |
|
"loss": 1.133, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.42510793756227166, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 0.00014225014335998492, |
|
"loss": 1.1346, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.42842909332447693, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 0.0001411958389610031, |
|
"loss": 1.1061, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.4317502490866822, |
|
"grad_norm": 0.09912109375, |
|
"learning_rate": 0.00014013599020860047, |
|
"loss": 1.1341, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4350714048488874, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 0.00013907073974281562, |
|
"loss": 1.1186, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.43839256061109266, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 0.00013800023093067814, |
|
"loss": 1.1225, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4417137163732979, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 0.00013692460784691356, |
|
"loss": 1.125, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.44503487213550313, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 0.0001358440152545533, |
|
"loss": 1.1509, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.4483560278977084, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 0.0001347585985854512, |
|
"loss": 1.1305, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.45167718365991366, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 0.000133668503920711, |
|
"loss": 1.1143, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.4549983394221189, |
|
"grad_norm": 0.0927734375, |
|
"learning_rate": 0.0001325738779710257, |
|
"loss": 1.1207, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.45831949518432413, |
|
"grad_norm": 0.091796875, |
|
"learning_rate": 0.00013147486805693256, |
|
"loss": 1.1355, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.4616406509465294, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 0.0001303716220889859, |
|
"loss": 1.1294, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.46496180670873466, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 0.00012926428854785052, |
|
"loss": 1.1368, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.46828296247093987, |
|
"grad_norm": 0.08984375, |
|
"learning_rate": 0.00012815301646431845, |
|
"loss": 1.1347, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.47160411823314513, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 0.00012703795539925142, |
|
"loss": 1.129, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.4749252739953504, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.00012591925542345243, |
|
"loss": 1.1162, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.4782464297575556, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.0001247970670974682, |
|
"loss": 1.147, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.48156758551976087, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.00012367154145132608, |
|
"loss": 1.1165, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.48488874128196613, |
|
"grad_norm": 0.1103515625, |
|
"learning_rate": 0.00012254282996420753, |
|
"loss": 1.1372, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4882098970441714, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.0001214110845440613, |
|
"loss": 1.1144, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.4915310528063766, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.00012027645750715892, |
|
"loss": 1.1397, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.49485220856858186, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.0001191391015575951, |
|
"loss": 1.1364, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4981733643307871, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 0.00011799916976673589, |
|
"loss": 1.1234, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5014945200929923, |
|
"grad_norm": 0.111328125, |
|
"learning_rate": 0.00011685681555261786, |
|
"loss": 1.1223, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.5048156758551976, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 0.00011571219265929998, |
|
"loss": 1.1357, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.5081368316174029, |
|
"grad_norm": 0.0947265625, |
|
"learning_rate": 0.00011456545513617198, |
|
"loss": 1.133, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.5114579873796081, |
|
"grad_norm": 0.12890625, |
|
"learning_rate": 0.00011341675731722175, |
|
"loss": 1.1346, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.5147791431418134, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 0.00011226625380026407, |
|
"loss": 1.1094, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5181002989040187, |
|
"grad_norm": 0.1376953125, |
|
"learning_rate": 0.0001111140994261341, |
|
"loss": 1.1246, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5214214546662238, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 0.00010996044925784806, |
|
"loss": 1.0992, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.5247426104284291, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.00010880545855973405, |
|
"loss": 1.1271, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.5280637661906343, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 0.00010764928277653576, |
|
"loss": 1.1272, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.5313849219528396, |
|
"grad_norm": 0.0927734375, |
|
"learning_rate": 0.00010649207751249188, |
|
"loss": 1.1211, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5347060777150449, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 0.0001053339985103941, |
|
"loss": 1.1108, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.5380272334772501, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.00010417520163062626, |
|
"loss": 1.1259, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5413483892394554, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 0.00010301584283018813, |
|
"loss": 1.1131, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.5446695450016605, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 0.00010185607814170561, |
|
"loss": 1.1278, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.5479907007638658, |
|
"grad_norm": 0.09765625, |
|
"learning_rate": 0.00010069606365243123, |
|
"loss": 1.1259, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5513118565260711, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 9.953595548323699e-05, |
|
"loss": 1.1277, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5546330122882763, |
|
"grad_norm": 0.11083984375, |
|
"learning_rate": 9.837590976760282e-05, |
|
"loss": 1.125, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5579541680504816, |
|
"grad_norm": 0.10302734375, |
|
"learning_rate": 9.721608263060341e-05, |
|
"loss": 1.1217, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5612753238126869, |
|
"grad_norm": 0.109375, |
|
"learning_rate": 9.605663016789583e-05, |
|
"loss": 1.1234, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.564596479574892, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 9.489770842471159e-05, |
|
"loss": 1.1118, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5679176353370973, |
|
"grad_norm": 0.1005859375, |
|
"learning_rate": 9.373947337485522e-05, |
|
"loss": 1.1207, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5712387910993025, |
|
"grad_norm": 0.2119140625, |
|
"learning_rate": 9.258208089971232e-05, |
|
"loss": 1.1324, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5745599468615078, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 9.142568676727043e-05, |
|
"loss": 1.1263, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5778811026237131, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 9.027044661115485e-05, |
|
"loss": 1.1263, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5812022583859183, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 8.911651590968259e-05, |
|
"loss": 1.1125, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5845234141481236, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 8.79640499649374e-05, |
|
"loss": 1.1256, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5878445699103287, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 8.68132038818684e-05, |
|
"loss": 1.1185, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.591165725672534, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 8.5664132547415e-05, |
|
"loss": 1.146, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5944868814347393, |
|
"grad_norm": 0.0791015625, |
|
"learning_rate": 8.451699060966173e-05, |
|
"loss": 1.105, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5978080371969445, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 8.33719324570247e-05, |
|
"loss": 1.1334, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6011291929591498, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 8.222911219747316e-05, |
|
"loss": 1.1373, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.6044503487213551, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 8.108868363778894e-05, |
|
"loss": 1.1089, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.6077715044835603, |
|
"grad_norm": 0.1240234375, |
|
"learning_rate": 7.995080026286631e-05, |
|
"loss": 1.1296, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.6110926602457655, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 7.881561521505514e-05, |
|
"loss": 1.1318, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.6144138160079707, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 7.768328127355007e-05, |
|
"loss": 1.1231, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.617734971770176, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 7.655395083382899e-05, |
|
"loss": 1.13, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.6210561275323813, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 7.542777588714256e-05, |
|
"loss": 1.1109, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.6243772832945865, |
|
"grad_norm": 0.09423828125, |
|
"learning_rate": 7.430490800005854e-05, |
|
"loss": 1.1166, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.6276984390567918, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 7.318549829406318e-05, |
|
"loss": 1.1117, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.631019594818997, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 7.206969742522252e-05, |
|
"loss": 1.121, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6343407505812022, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 7.095765556390606e-05, |
|
"loss": 1.1349, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.6376619063434075, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 6.984952237457647e-05, |
|
"loss": 1.0884, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.6409830621056127, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 6.874544699564663e-05, |
|
"loss": 1.1405, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.644304217867818, |
|
"grad_norm": 0.203125, |
|
"learning_rate": 6.764557801940771e-05, |
|
"loss": 1.1103, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.6476253736300233, |
|
"grad_norm": 0.0927734375, |
|
"learning_rate": 6.655006347203128e-05, |
|
"loss": 1.1222, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6509465293922285, |
|
"grad_norm": 0.0927734375, |
|
"learning_rate": 6.545905079364678e-05, |
|
"loss": 1.1323, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.6542676851544338, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 6.437268681849825e-05, |
|
"loss": 1.1185, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.6575888409166389, |
|
"grad_norm": 0.09033203125, |
|
"learning_rate": 6.329111775518284e-05, |
|
"loss": 1.1153, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.6609099966788442, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 6.221448916697323e-05, |
|
"loss": 1.1356, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6642311524410495, |
|
"grad_norm": 0.09716796875, |
|
"learning_rate": 6.114294595222667e-05, |
|
"loss": 1.1316, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6675523082032547, |
|
"grad_norm": 0.1484375, |
|
"learning_rate": 6.0076632324884184e-05, |
|
"loss": 1.1045, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.67087346396546, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 5.9015691795061276e-05, |
|
"loss": 1.1136, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6741946197276653, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 5.796026714973358e-05, |
|
"loss": 1.1233, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6775157754898705, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 5.691050043352e-05, |
|
"loss": 1.1125, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6808369312520757, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 5.586653292956536e-05, |
|
"loss": 1.1257, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6841580870142809, |
|
"grad_norm": 0.0947265625, |
|
"learning_rate": 5.482850514052593e-05, |
|
"loss": 1.112, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6874792427764862, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 5.379655676965985e-05, |
|
"loss": 1.1022, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6908003985386915, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 5.2770826702025024e-05, |
|
"loss": 1.1031, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6941215543008967, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 5.1751452985787294e-05, |
|
"loss": 1.1314, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.697442710063102, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 5.073857281364116e-05, |
|
"loss": 1.115, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7007638658253073, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 4.9732322504345784e-05, |
|
"loss": 1.1162, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.7040850215875124, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 4.873283748437832e-05, |
|
"loss": 1.1398, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.7074061773497177, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 4.7740252269707545e-05, |
|
"loss": 1.1165, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.7107273331119229, |
|
"grad_norm": 0.09521484375, |
|
"learning_rate": 4.6754700447690145e-05, |
|
"loss": 1.1134, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.7140484888741282, |
|
"grad_norm": 0.087890625, |
|
"learning_rate": 4.57763146590916e-05, |
|
"loss": 1.1101, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.7173696446363335, |
|
"grad_norm": 0.08056640625, |
|
"learning_rate": 4.4805226580234794e-05, |
|
"loss": 1.1058, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.7206908003985387, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 4.384156690527842e-05, |
|
"loss": 1.1228, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.724011956160744, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 4.2885465328627275e-05, |
|
"loss": 1.1173, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.7273331119229491, |
|
"grad_norm": 0.10498046875, |
|
"learning_rate": 4.193705052747737e-05, |
|
"loss": 1.1233, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.7306542676851544, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 4.0996450144498044e-05, |
|
"loss": 1.1163, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7339754234473597, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 4.0063790770652876e-05, |
|
"loss": 1.1264, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.7372965792095649, |
|
"grad_norm": 0.078125, |
|
"learning_rate": 3.913919792816252e-05, |
|
"loss": 1.1334, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.7406177349717702, |
|
"grad_norm": 0.076171875, |
|
"learning_rate": 3.822279605361138e-05, |
|
"loss": 1.1133, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.7439388907339755, |
|
"grad_norm": 0.0810546875, |
|
"learning_rate": 3.731470848120005e-05, |
|
"loss": 1.1068, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.7472600464961807, |
|
"grad_norm": 0.0947265625, |
|
"learning_rate": 3.64150574261465e-05, |
|
"loss": 1.1204, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7505812022583859, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 3.552396396823774e-05, |
|
"loss": 1.1446, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.7539023580205911, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 3.464154803553408e-05, |
|
"loss": 1.0895, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.7572235137827964, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 3.376792838822873e-05, |
|
"loss": 1.1094, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7605446695450017, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 3.2903222602664464e-05, |
|
"loss": 1.1178, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.7638658253072069, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 3.20475470555094e-05, |
|
"loss": 1.1149, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7671869810694122, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 3.1201016908094514e-05, |
|
"loss": 1.1282, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.7705081368316175, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 3.0363746090914724e-05, |
|
"loss": 1.1392, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7738292925938226, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 2.9535847288295325e-05, |
|
"loss": 1.0927, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.7771504483560279, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 2.8717431923226422e-05, |
|
"loss": 1.1235, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7804716041182331, |
|
"grad_norm": 0.08740234375, |
|
"learning_rate": 2.7908610142367143e-05, |
|
"loss": 1.109, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7837927598804384, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 2.7109490801221327e-05, |
|
"loss": 1.1214, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7871139156426437, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 2.632018144948727e-05, |
|
"loss": 1.1099, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7904350714048489, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 2.5540788316583207e-05, |
|
"loss": 1.1275, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7937562271670542, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 2.4771416297350247e-05, |
|
"loss": 1.1306, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7970773829292593, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 2.4012168937935096e-05, |
|
"loss": 1.0969, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8003985386914646, |
|
"grad_norm": 0.0810546875, |
|
"learning_rate": 2.326314842185443e-05, |
|
"loss": 1.1186, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.8037196944536699, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 2.252445555624225e-05, |
|
"loss": 1.1222, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.8070408502158751, |
|
"grad_norm": 0.0966796875, |
|
"learning_rate": 2.1796189758282915e-05, |
|
"loss": 1.1112, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.8103620059780804, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 2.107844904183096e-05, |
|
"loss": 1.0992, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.8136831617402857, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 2.0371330004219967e-05, |
|
"loss": 1.1067, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.8170043175024909, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 1.9674927813261856e-05, |
|
"loss": 1.135, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.8203254732646961, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 1.8989336194438757e-05, |
|
"loss": 1.1272, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.8236466290269013, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 1.831464741828903e-05, |
|
"loss": 1.11, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.8269677847891066, |
|
"grad_norm": 0.08154296875, |
|
"learning_rate": 1.7650952287988864e-05, |
|
"loss": 1.123, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.8302889405513119, |
|
"grad_norm": 0.08203125, |
|
"learning_rate": 1.699834012713155e-05, |
|
"loss": 1.1179, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8336100963135171, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 1.6356898767705954e-05, |
|
"loss": 1.1216, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.8369312520757224, |
|
"grad_norm": 0.078125, |
|
"learning_rate": 1.5726714538275423e-05, |
|
"loss": 1.1349, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.8402524078379277, |
|
"grad_norm": 0.0791015625, |
|
"learning_rate": 1.5107872252359389e-05, |
|
"loss": 1.1137, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.8435735636001328, |
|
"grad_norm": 0.09033203125, |
|
"learning_rate": 1.4500455197018703e-05, |
|
"loss": 1.1134, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.8468947193623381, |
|
"grad_norm": 0.08056640625, |
|
"learning_rate": 1.3904545121646317e-05, |
|
"loss": 1.1245, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8502158751245433, |
|
"grad_norm": 0.08056640625, |
|
"learning_rate": 1.3320222226965118e-05, |
|
"loss": 1.1231, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.8535370308867486, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 1.2747565154234109e-05, |
|
"loss": 1.1352, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.8568581866489539, |
|
"grad_norm": 0.0859375, |
|
"learning_rate": 1.2186650974664337e-05, |
|
"loss": 1.1065, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.8601793424111591, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 1.1637555179046345e-05, |
|
"loss": 1.1235, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.8635004981733644, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 1.1100351667590204e-05, |
|
"loss": 1.1458, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8668216539355695, |
|
"grad_norm": 0.080078125, |
|
"learning_rate": 1.057511273997962e-05, |
|
"loss": 1.1097, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.8701428096977748, |
|
"grad_norm": 0.08203125, |
|
"learning_rate": 1.0061909085641396e-05, |
|
"loss": 1.1038, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.8734639654599801, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 9.560809774231871e-06, |
|
"loss": 1.1226, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.8767851212221853, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 9.0718822463409e-06, |
|
"loss": 1.1157, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8801062769843906, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 8.595192304415534e-06, |
|
"loss": 1.0975, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8834274327465959, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 8.130804103903956e-06, |
|
"loss": 1.1056, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8867485885088011, |
|
"grad_norm": 0.123046875, |
|
"learning_rate": 7.678780144620956e-06, |
|
"loss": 1.1328, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.8900697442710063, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 7.2391812623366025e-06, |
|
"loss": 1.1226, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8933909000332115, |
|
"grad_norm": 0.10400390625, |
|
"learning_rate": 6.812066620588519e-06, |
|
"loss": 1.1112, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.8967120557954168, |
|
"grad_norm": 0.0888671875, |
|
"learning_rate": 6.397493702719226e-06, |
|
"loss": 1.121, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9000332115576221, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 5.995518304139991e-06, |
|
"loss": 1.1104, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.9033543673198273, |
|
"grad_norm": 0.08984375, |
|
"learning_rate": 5.606194524821429e-06, |
|
"loss": 1.1058, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.9066755230820326, |
|
"grad_norm": 0.08935546875, |
|
"learning_rate": 5.229574762012379e-06, |
|
"loss": 1.1069, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.9099966788442378, |
|
"grad_norm": 0.08984375, |
|
"learning_rate": 4.865709703188193e-06, |
|
"loss": 1.1414, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.913317834606443, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 4.514648319228798e-06, |
|
"loss": 1.1161, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.9166389903686483, |
|
"grad_norm": 0.07763671875, |
|
"learning_rate": 4.1764378578279865e-06, |
|
"loss": 1.1083, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.9199601461308535, |
|
"grad_norm": 0.08447265625, |
|
"learning_rate": 3.8511238371345845e-06, |
|
"loss": 1.1135, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.9232813018930588, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 3.5387500396263483e-06, |
|
"loss": 1.1137, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.9266024576552641, |
|
"grad_norm": 0.09228515625, |
|
"learning_rate": 3.2393585062175493e-06, |
|
"loss": 1.1097, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.9299236134174693, |
|
"grad_norm": 0.08349609375, |
|
"learning_rate": 2.9529895306007804e-06, |
|
"loss": 1.1307, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9332447691796745, |
|
"grad_norm": 0.09619140625, |
|
"learning_rate": 2.6796816538241065e-06, |
|
"loss": 1.1214, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.9365659249418797, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 2.419471659104e-06, |
|
"loss": 1.1157, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.939887080704085, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 2.1723945668748247e-06, |
|
"loss": 1.1047, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.9432082364662903, |
|
"grad_norm": 0.10205078125, |
|
"learning_rate": 1.9384836300755937e-06, |
|
"loss": 1.1367, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.9465293922284955, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 1.717770329674684e-06, |
|
"loss": 1.0773, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9498505479907008, |
|
"grad_norm": 0.0771484375, |
|
"learning_rate": 1.5102843704328684e-06, |
|
"loss": 1.1348, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.953171703752906, |
|
"grad_norm": 0.080078125, |
|
"learning_rate": 1.3160536769055708e-06, |
|
"loss": 1.1134, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.9564928595151112, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 1.1351043896846047e-06, |
|
"loss": 1.1216, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.9598140152773165, |
|
"grad_norm": 0.09375, |
|
"learning_rate": 9.674608618799985e-07, |
|
"loss": 1.1085, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.9631351710395217, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 8.131456558424622e-07, |
|
"loss": 1.1218, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.966456326801727, |
|
"grad_norm": 0.09326171875, |
|
"learning_rate": 6.721795401268493e-07, |
|
"loss": 1.1021, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.9697774825639323, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 5.445814866969712e-07, |
|
"loss": 1.1071, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.9730986383261375, |
|
"grad_norm": 0.08544921875, |
|
"learning_rate": 4.3036866837224964e-07, |
|
"loss": 1.0951, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.9764197940883428, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 3.295564565165554e-07, |
|
"loss": 1.1135, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.9797409498505479, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 2.4215841896938487e-07, |
|
"loss": 1.1155, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.9830621056127532, |
|
"grad_norm": 0.1533203125, |
|
"learning_rate": 1.681863182198984e-07, |
|
"loss": 1.1137, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9863832613749585, |
|
"grad_norm": 0.083984375, |
|
"learning_rate": 1.0765010982378699e-07, |
|
"loss": 1.1328, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.9897044171371637, |
|
"grad_norm": 0.0810546875, |
|
"learning_rate": 6.055794106347712e-08, |
|
"loss": 1.1114, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.993025572899369, |
|
"grad_norm": 0.0869140625, |
|
"learning_rate": 2.6916149851563543e-08, |
|
"loss": 1.1065, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.9963467286615743, |
|
"grad_norm": 0.08251953125, |
|
"learning_rate": 6.729263877847114e-09, |
|
"loss": 1.1432, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9996678844237795, |
|
"grad_norm": 0.08203125, |
|
"learning_rate": 0.0, |
|
"loss": 1.1166, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9996678844237795, |
|
"eval_loss": 1.3468068838119507, |
|
"eval_runtime": 2438.0312, |
|
"eval_samples_per_second": 5.613, |
|
"eval_steps_per_second": 5.613, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9996678844237795, |
|
"step": 1505, |
|
"total_flos": 4.73786206333698e+18, |
|
"train_loss": 0.5388378948072262, |
|
"train_runtime": 65741.8056, |
|
"train_samples_per_second": 2.931, |
|
"train_steps_per_second": 0.023 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1505, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 20, |
|
"total_flos": 4.73786206333698e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|