|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.19529385214953432, |
|
"eval_steps": 500, |
|
"global_step": 7500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0002603918028660458, |
|
"grad_norm": 0.4500846266746521, |
|
"learning_rate": 5.194805194805195e-06, |
|
"loss": 1.0381, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0005207836057320916, |
|
"grad_norm": 0.35188010334968567, |
|
"learning_rate": 1.038961038961039e-05, |
|
"loss": 1.0108, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0007811754085981374, |
|
"grad_norm": 0.2300374060869217, |
|
"learning_rate": 1.5584415584415583e-05, |
|
"loss": 0.9668, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0010415672114641832, |
|
"grad_norm": 0.16189467906951904, |
|
"learning_rate": 2.077922077922078e-05, |
|
"loss": 0.918, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.001301959014330229, |
|
"grad_norm": 0.18843211233615875, |
|
"learning_rate": 2.5974025974025972e-05, |
|
"loss": 0.9265, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0015623508171962747, |
|
"grad_norm": 0.20334510505199432, |
|
"learning_rate": 3.1168831168831166e-05, |
|
"loss": 0.9234, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0018227426200623205, |
|
"grad_norm": 0.1745327115058899, |
|
"learning_rate": 3.6363636363636364e-05, |
|
"loss": 0.881, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0020831344229283663, |
|
"grad_norm": 0.18667331337928772, |
|
"learning_rate": 4.155844155844156e-05, |
|
"loss": 0.8592, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.002343526225794412, |
|
"grad_norm": 0.1848158985376358, |
|
"learning_rate": 4.675324675324675e-05, |
|
"loss": 0.8537, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.002603918028660458, |
|
"grad_norm": 0.17589879035949707, |
|
"learning_rate": 5.1948051948051944e-05, |
|
"loss": 0.8518, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0028643098315265037, |
|
"grad_norm": 0.2132624089717865, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 0.8511, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0031247016343925495, |
|
"grad_norm": 0.23070092499256134, |
|
"learning_rate": 6.233766233766233e-05, |
|
"loss": 0.7975, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0033850934372585953, |
|
"grad_norm": 0.25368157029151917, |
|
"learning_rate": 6.753246753246754e-05, |
|
"loss": 0.8134, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.003645485240124641, |
|
"grad_norm": 0.22897231578826904, |
|
"learning_rate": 7.272727272727273e-05, |
|
"loss": 0.8322, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.003905877042990687, |
|
"grad_norm": 0.19932536780834198, |
|
"learning_rate": 7.792207792207793e-05, |
|
"loss": 0.7959, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.004166268845856733, |
|
"grad_norm": 0.21011792123317719, |
|
"learning_rate": 8.311688311688312e-05, |
|
"loss": 0.8102, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.004426660648722778, |
|
"grad_norm": 0.20594824850559235, |
|
"learning_rate": 8.831168831168831e-05, |
|
"loss": 0.8128, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.004687052451588824, |
|
"grad_norm": 0.20465536415576935, |
|
"learning_rate": 9.35064935064935e-05, |
|
"loss": 0.7989, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.00494744425445487, |
|
"grad_norm": 0.4109392762184143, |
|
"learning_rate": 9.870129870129871e-05, |
|
"loss": 0.8108, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.005207836057320916, |
|
"grad_norm": 0.4293076694011688, |
|
"learning_rate": 0.00010389610389610389, |
|
"loss": 0.8101, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.005468227860186962, |
|
"grad_norm": 0.31628963351249695, |
|
"learning_rate": 0.00010909090909090909, |
|
"loss": 0.7989, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.005728619663053007, |
|
"grad_norm": 0.24642810225486755, |
|
"learning_rate": 0.00011428571428571428, |
|
"loss": 0.7751, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.005989011465919053, |
|
"grad_norm": 0.3599106967449188, |
|
"learning_rate": 0.00011948051948051949, |
|
"loss": 0.8063, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.006249403268785099, |
|
"grad_norm": 0.17053447663784027, |
|
"learning_rate": 0.00012467532467532467, |
|
"loss": 0.7751, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.006509795071651145, |
|
"grad_norm": 0.17303769290447235, |
|
"learning_rate": 0.00012987012987012987, |
|
"loss": 0.7883, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.0067701868745171905, |
|
"grad_norm": 0.1815861016511917, |
|
"learning_rate": 0.00013506493506493507, |
|
"loss": 0.788, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.007030578677383236, |
|
"grad_norm": 0.24125365912914276, |
|
"learning_rate": 0.00014025974025974028, |
|
"loss": 0.8018, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.007290970480249282, |
|
"grad_norm": 0.19443446397781372, |
|
"learning_rate": 0.00014545454545454546, |
|
"loss": 0.7908, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.007551362283115328, |
|
"grad_norm": 0.17829768359661102, |
|
"learning_rate": 0.00015064935064935066, |
|
"loss": 0.8033, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.007811754085981374, |
|
"grad_norm": 0.19535653293132782, |
|
"learning_rate": 0.00015584415584415587, |
|
"loss": 0.7997, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.008072145888847419, |
|
"grad_norm": 0.19930541515350342, |
|
"learning_rate": 0.00016103896103896104, |
|
"loss": 0.7945, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.008332537691713465, |
|
"grad_norm": 0.2156297266483307, |
|
"learning_rate": 0.00016623376623376625, |
|
"loss": 0.8018, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.00859292949457951, |
|
"grad_norm": 0.1924206018447876, |
|
"learning_rate": 0.00017142857142857143, |
|
"loss": 0.7746, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.008853321297445557, |
|
"grad_norm": 0.2294880747795105, |
|
"learning_rate": 0.00017662337662337663, |
|
"loss": 0.8152, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.009113713100311602, |
|
"grad_norm": 0.16817067563533783, |
|
"learning_rate": 0.00018181818181818183, |
|
"loss": 0.7972, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.009374104903177648, |
|
"grad_norm": 0.18544812500476837, |
|
"learning_rate": 0.000187012987012987, |
|
"loss": 0.7801, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.009634496706043693, |
|
"grad_norm": 0.19597066938877106, |
|
"learning_rate": 0.00019220779220779222, |
|
"loss": 0.7706, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.00989488850890974, |
|
"grad_norm": 0.40291881561279297, |
|
"learning_rate": 0.00019740259740259742, |
|
"loss": 0.7911, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.010155280311775785, |
|
"grad_norm": 0.23841074109077454, |
|
"learning_rate": 0.00019999996515752773, |
|
"loss": 0.7861, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.010415672114641832, |
|
"grad_norm": 0.1675388514995575, |
|
"learning_rate": 0.00019999968641789507, |
|
"loss": 0.788, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.010676063917507876, |
|
"grad_norm": 1.8860758543014526, |
|
"learning_rate": 0.0001999991289394067, |
|
"loss": 0.7632, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.010936455720373923, |
|
"grad_norm": 0.17022117972373962, |
|
"learning_rate": 0.00019999829272361654, |
|
"loss": 0.784, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.011196847523239968, |
|
"grad_norm": 0.21460269391536713, |
|
"learning_rate": 0.00019999717777285545, |
|
"loss": 0.761, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.011457239326106015, |
|
"grad_norm": 0.19413785636425018, |
|
"learning_rate": 0.00019999578409023126, |
|
"loss": 0.7772, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.01171763112897206, |
|
"grad_norm": 0.20223405957221985, |
|
"learning_rate": 0.00019999411167962868, |
|
"loss": 0.7811, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.011978022931838106, |
|
"grad_norm": 0.15166303515434265, |
|
"learning_rate": 0.00019999216054570942, |
|
"loss": 0.7709, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.012238414734704151, |
|
"grad_norm": 0.16307081282138824, |
|
"learning_rate": 0.00019998993069391205, |
|
"loss": 0.7811, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.012498806537570198, |
|
"grad_norm": 0.15996049344539642, |
|
"learning_rate": 0.00019998742213045206, |
|
"loss": 0.7599, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.012759198340436243, |
|
"grad_norm": 0.17560279369354248, |
|
"learning_rate": 0.00019998463486232179, |
|
"loss": 0.7572, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.01301959014330229, |
|
"grad_norm": 0.17571642994880676, |
|
"learning_rate": 0.0001999815688972905, |
|
"loss": 0.7643, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.013279981946168334, |
|
"grad_norm": 0.17719799280166626, |
|
"learning_rate": 0.00019997822424390422, |
|
"loss": 0.7923, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.013540373749034381, |
|
"grad_norm": 0.19846616685390472, |
|
"learning_rate": 0.00019997460091148586, |
|
"loss": 0.7674, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.013800765551900426, |
|
"grad_norm": 0.2715558111667633, |
|
"learning_rate": 0.00019997069891013503, |
|
"loss": 0.7421, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.014061157354766473, |
|
"grad_norm": 0.1725197583436966, |
|
"learning_rate": 0.00019996651825072826, |
|
"loss": 0.7663, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.014321549157632518, |
|
"grad_norm": 0.15060502290725708, |
|
"learning_rate": 0.00019996205894491856, |
|
"loss": 0.7794, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.014581940960498564, |
|
"grad_norm": 0.16645808517932892, |
|
"learning_rate": 0.00019995732100513592, |
|
"loss": 0.752, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.014842332763364609, |
|
"grad_norm": 0.1736789345741272, |
|
"learning_rate": 0.00019995230444458682, |
|
"loss": 0.7788, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.015102724566230656, |
|
"grad_norm": 0.15416319668293, |
|
"learning_rate": 0.0001999470092772544, |
|
"loss": 0.7656, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.0153631163690967, |
|
"grad_norm": 0.16610187292099, |
|
"learning_rate": 0.00019994143551789839, |
|
"loss": 0.7676, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.015623508171962747, |
|
"grad_norm": 0.15843011438846588, |
|
"learning_rate": 0.00019993558318205507, |
|
"loss": 0.7746, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.015883899974828794, |
|
"grad_norm": 0.26837801933288574, |
|
"learning_rate": 0.00019992945228603724, |
|
"loss": 0.7617, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.016144291777694837, |
|
"grad_norm": 0.15099173784255981, |
|
"learning_rate": 0.0001999230428469341, |
|
"loss": 0.7601, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.016404683580560884, |
|
"grad_norm": 0.15511856973171234, |
|
"learning_rate": 0.00019991635488261138, |
|
"loss": 0.7647, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.01666507538342693, |
|
"grad_norm": 0.14919579029083252, |
|
"learning_rate": 0.00019990938841171104, |
|
"loss": 0.7692, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.016925467186292977, |
|
"grad_norm": 0.15838642418384552, |
|
"learning_rate": 0.0001999021434536514, |
|
"loss": 0.7763, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.01718585898915902, |
|
"grad_norm": 0.15956635773181915, |
|
"learning_rate": 0.00019989462002862704, |
|
"loss": 0.7598, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.017446250792025067, |
|
"grad_norm": 0.1499069333076477, |
|
"learning_rate": 0.0001998868181576088, |
|
"loss": 0.7626, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.017706642594891114, |
|
"grad_norm": 0.2170073390007019, |
|
"learning_rate": 0.00019987873786234348, |
|
"loss": 0.7569, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.01796703439775716, |
|
"grad_norm": 0.17841948568820953, |
|
"learning_rate": 0.00019987037916535417, |
|
"loss": 0.7494, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.018227426200623204, |
|
"grad_norm": 0.2066909372806549, |
|
"learning_rate": 0.0001998617420899398, |
|
"loss": 0.7609, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.01848781800348925, |
|
"grad_norm": 0.17015361785888672, |
|
"learning_rate": 0.0001998528266601754, |
|
"loss": 0.7761, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.018748209806355297, |
|
"grad_norm": 0.22166290879249573, |
|
"learning_rate": 0.0001998436329009118, |
|
"loss": 0.7573, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.01900860160922134, |
|
"grad_norm": 0.15084640681743622, |
|
"learning_rate": 0.00019983416083777563, |
|
"loss": 0.7775, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.019268993412087387, |
|
"grad_norm": 0.17800921201705933, |
|
"learning_rate": 0.0001998244104971693, |
|
"loss": 0.7359, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.019529385214953433, |
|
"grad_norm": 0.17354707419872284, |
|
"learning_rate": 0.0001998143819062709, |
|
"loss": 0.7415, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.01978977701781948, |
|
"grad_norm": 0.16408118605613708, |
|
"learning_rate": 0.00019980407509303413, |
|
"loss": 0.7708, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.020050168820685523, |
|
"grad_norm": 0.16820089519023895, |
|
"learning_rate": 0.00019979349008618808, |
|
"loss": 0.791, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.02031056062355157, |
|
"grad_norm": 0.15958388149738312, |
|
"learning_rate": 0.00019978262691523743, |
|
"loss": 0.7412, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.020570952426417616, |
|
"grad_norm": 0.1646542251110077, |
|
"learning_rate": 0.00019977148561046217, |
|
"loss": 0.7529, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.020831344229283663, |
|
"grad_norm": 0.17032025754451752, |
|
"learning_rate": 0.0001997600662029175, |
|
"loss": 0.7656, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.021091736032149706, |
|
"grad_norm": 0.17189227044582367, |
|
"learning_rate": 0.00019974836872443388, |
|
"loss": 0.7433, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.021352127835015753, |
|
"grad_norm": 0.16334249079227448, |
|
"learning_rate": 0.0001997363932076168, |
|
"loss": 0.7703, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.0216125196378818, |
|
"grad_norm": 0.1676424890756607, |
|
"learning_rate": 0.00019972413968584682, |
|
"loss": 0.7603, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.021872911440747846, |
|
"grad_norm": 0.16826209425926208, |
|
"learning_rate": 0.0001997116081932793, |
|
"loss": 0.7569, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.02213330324361389, |
|
"grad_norm": 0.1876436173915863, |
|
"learning_rate": 0.0001996987987648446, |
|
"loss": 0.7553, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.022393695046479936, |
|
"grad_norm": 0.17252250015735626, |
|
"learning_rate": 0.0001996857114362476, |
|
"loss": 0.7644, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.022654086849345983, |
|
"grad_norm": 0.1632252335548401, |
|
"learning_rate": 0.00019967234624396793, |
|
"loss": 0.7568, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.02291447865221203, |
|
"grad_norm": 0.1818259060382843, |
|
"learning_rate": 0.00019965870322525965, |
|
"loss": 0.7672, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.023174870455078073, |
|
"grad_norm": 0.15418195724487305, |
|
"learning_rate": 0.0001996447824181513, |
|
"loss": 0.7642, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.02343526225794412, |
|
"grad_norm": 0.17383505403995514, |
|
"learning_rate": 0.0001996305838614457, |
|
"loss": 0.7607, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.023695654060810166, |
|
"grad_norm": 0.17794272303581238, |
|
"learning_rate": 0.00019961610759471984, |
|
"loss": 0.7588, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.023956045863676213, |
|
"grad_norm": 0.1909121572971344, |
|
"learning_rate": 0.00019960135365832486, |
|
"loss": 0.7438, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.024216437666542256, |
|
"grad_norm": 0.17758873105049133, |
|
"learning_rate": 0.00019958632209338587, |
|
"loss": 0.7323, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.024476829469408302, |
|
"grad_norm": 0.15553662180900574, |
|
"learning_rate": 0.00019957101294180174, |
|
"loss": 0.7508, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.02473722127227435, |
|
"grad_norm": 0.15310749411582947, |
|
"learning_rate": 0.00019955542624624522, |
|
"loss": 0.7563, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.024997613075140396, |
|
"grad_norm": 0.1628728210926056, |
|
"learning_rate": 0.00019953956205016256, |
|
"loss": 0.7524, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.02525800487800644, |
|
"grad_norm": 0.16211454570293427, |
|
"learning_rate": 0.00019952342039777362, |
|
"loss": 0.7564, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.025518396680872486, |
|
"grad_norm": 0.15663012862205505, |
|
"learning_rate": 0.00019950700133407163, |
|
"loss": 0.7395, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.025778788483738532, |
|
"grad_norm": 0.1684863567352295, |
|
"learning_rate": 0.00019949030490482296, |
|
"loss": 0.753, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.02603918028660458, |
|
"grad_norm": 0.1561436653137207, |
|
"learning_rate": 0.0001994733311565673, |
|
"loss": 0.7409, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.026299572089470622, |
|
"grad_norm": 0.1781485229730606, |
|
"learning_rate": 0.0001994560801366171, |
|
"loss": 0.762, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.02655996389233667, |
|
"grad_norm": 0.15422071516513824, |
|
"learning_rate": 0.00019943855189305792, |
|
"loss": 0.7291, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.026820355695202715, |
|
"grad_norm": 0.17980527877807617, |
|
"learning_rate": 0.00019942074647474786, |
|
"loss": 0.7732, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.027080747498068762, |
|
"grad_norm": 0.15810626745224, |
|
"learning_rate": 0.00019940266393131775, |
|
"loss": 0.7764, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.027341139300934805, |
|
"grad_norm": 0.16385480761528015, |
|
"learning_rate": 0.00019938430431317081, |
|
"loss": 0.7404, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.027601531103800852, |
|
"grad_norm": 0.15134255588054657, |
|
"learning_rate": 0.00019936566767148257, |
|
"loss": 0.7506, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.0278619229066669, |
|
"grad_norm": 0.1592187136411667, |
|
"learning_rate": 0.00019934675405820077, |
|
"loss": 0.73, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.028122314709532945, |
|
"grad_norm": 0.16852422058582306, |
|
"learning_rate": 0.00019932756352604515, |
|
"loss": 0.7443, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.02838270651239899, |
|
"grad_norm": 0.15741507709026337, |
|
"learning_rate": 0.00019930809612850735, |
|
"loss": 0.7377, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.028643098315265035, |
|
"grad_norm": 0.22424879670143127, |
|
"learning_rate": 0.00019928835191985076, |
|
"loss": 0.7544, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.028903490118131082, |
|
"grad_norm": 0.2047310769557953, |
|
"learning_rate": 0.0001992683309551103, |
|
"loss": 0.7441, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.02916388192099713, |
|
"grad_norm": 0.16392463445663452, |
|
"learning_rate": 0.00019924803329009243, |
|
"loss": 0.7606, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.02942427372386317, |
|
"grad_norm": 0.16227149963378906, |
|
"learning_rate": 0.00019922745898137473, |
|
"loss": 0.736, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.029684665526729218, |
|
"grad_norm": 0.15652808547019958, |
|
"learning_rate": 0.00019920660808630598, |
|
"loss": 0.7513, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.029945057329595265, |
|
"grad_norm": 0.15162768959999084, |
|
"learning_rate": 0.00019918548066300592, |
|
"loss": 0.7303, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.03020544913246131, |
|
"grad_norm": 0.17650415003299713, |
|
"learning_rate": 0.0001991640767703651, |
|
"loss": 0.7254, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.030465840935327355, |
|
"grad_norm": 0.1594468355178833, |
|
"learning_rate": 0.00019914239646804462, |
|
"loss": 0.741, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.0307262327381934, |
|
"grad_norm": 0.17928367853164673, |
|
"learning_rate": 0.00019912043981647616, |
|
"loss": 0.7515, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.030986624541059448, |
|
"grad_norm": 0.17009998857975006, |
|
"learning_rate": 0.00019909820687686157, |
|
"loss": 0.7539, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.031247016343925495, |
|
"grad_norm": 0.16556763648986816, |
|
"learning_rate": 0.0001990756977111729, |
|
"loss": 0.7418, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.03150740814679154, |
|
"grad_norm": 0.1561640352010727, |
|
"learning_rate": 0.0001990529123821522, |
|
"loss": 0.7465, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.03176779994965759, |
|
"grad_norm": 0.15182287991046906, |
|
"learning_rate": 0.00019902985095331113, |
|
"loss": 0.7694, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.03202819175252363, |
|
"grad_norm": 0.15173685550689697, |
|
"learning_rate": 0.00019900651348893114, |
|
"loss": 0.7519, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.032288583555389674, |
|
"grad_norm": 0.16535787284374237, |
|
"learning_rate": 0.00019898290005406296, |
|
"loss": 0.7646, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.032548975358255725, |
|
"grad_norm": 0.19272534549236298, |
|
"learning_rate": 0.00019895901071452667, |
|
"loss": 0.7655, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.03280936716112177, |
|
"grad_norm": 0.1672705113887787, |
|
"learning_rate": 0.0001989348455369113, |
|
"loss": 0.7486, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.03306975896398781, |
|
"grad_norm": 0.1525493860244751, |
|
"learning_rate": 0.0001989104045885748, |
|
"loss": 0.7546, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.03333015076685386, |
|
"grad_norm": 0.16333037614822388, |
|
"learning_rate": 0.00019888568793764385, |
|
"loss": 0.7299, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.033590542569719904, |
|
"grad_norm": 0.1590205729007721, |
|
"learning_rate": 0.00019886069565301355, |
|
"loss": 0.762, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.033850934372585954, |
|
"grad_norm": 0.15006420016288757, |
|
"learning_rate": 0.00019883542780434733, |
|
"loss": 0.7531, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.034111326175452, |
|
"grad_norm": 0.18390792608261108, |
|
"learning_rate": 0.0001988098844620767, |
|
"loss": 0.7621, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.03437171797831804, |
|
"grad_norm": 0.17046166956424713, |
|
"learning_rate": 0.0001987840656974011, |
|
"loss": 0.7422, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.03463210978118409, |
|
"grad_norm": 0.15121813118457794, |
|
"learning_rate": 0.00019875797158228775, |
|
"loss": 0.7555, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.034892501584050134, |
|
"grad_norm": 0.16219307482242584, |
|
"learning_rate": 0.00019873160218947125, |
|
"loss": 0.7301, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.03515289338691618, |
|
"grad_norm": 0.1779986321926117, |
|
"learning_rate": 0.00019870495759245362, |
|
"loss": 0.7356, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.03541328518978223, |
|
"grad_norm": 0.16951359808444977, |
|
"learning_rate": 0.0001986780378655039, |
|
"loss": 0.7645, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.03567367699264827, |
|
"grad_norm": 0.16620802879333496, |
|
"learning_rate": 0.0001986508430836581, |
|
"loss": 0.7331, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.03593406879551432, |
|
"grad_norm": 0.1577858328819275, |
|
"learning_rate": 0.0001986233733227188, |
|
"loss": 0.7667, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.036194460598380364, |
|
"grad_norm": 0.1637091338634491, |
|
"learning_rate": 0.00019859562865925525, |
|
"loss": 0.7521, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.03645485240124641, |
|
"grad_norm": 0.15061691403388977, |
|
"learning_rate": 0.00019856760917060277, |
|
"loss": 0.744, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.03671524420411246, |
|
"grad_norm": 0.15373477339744568, |
|
"learning_rate": 0.00019853931493486287, |
|
"loss": 0.7677, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.0369756360069785, |
|
"grad_norm": 0.16468606889247894, |
|
"learning_rate": 0.00019851074603090277, |
|
"loss": 0.7179, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.037236027809844544, |
|
"grad_norm": 0.16084876656532288, |
|
"learning_rate": 0.00019848190253835536, |
|
"loss": 0.749, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.037496419612710594, |
|
"grad_norm": 0.16743004322052002, |
|
"learning_rate": 0.00019845278453761896, |
|
"loss": 0.7483, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.03775681141557664, |
|
"grad_norm": 0.17335088551044464, |
|
"learning_rate": 0.00019842339210985696, |
|
"loss": 0.735, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.03801720321844268, |
|
"grad_norm": 0.1546197235584259, |
|
"learning_rate": 0.00019839372533699774, |
|
"loss": 0.7549, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.03827759502130873, |
|
"grad_norm": 0.16218656301498413, |
|
"learning_rate": 0.00019836378430173438, |
|
"loss": 0.7425, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.03853798682417477, |
|
"grad_norm": 0.1712743639945984, |
|
"learning_rate": 0.0001983335690875245, |
|
"loss": 0.733, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.03879837862704082, |
|
"grad_norm": 0.15490613877773285, |
|
"learning_rate": 0.00019830307977858984, |
|
"loss": 0.7265, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.03905877042990687, |
|
"grad_norm": 0.1646670252084732, |
|
"learning_rate": 0.00019827231645991623, |
|
"loss": 0.7315, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03931916223277291, |
|
"grad_norm": 0.1599082201719284, |
|
"learning_rate": 0.00019824127921725326, |
|
"loss": 0.7293, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.03957955403563896, |
|
"grad_norm": 0.1565747708082199, |
|
"learning_rate": 0.00019820996813711407, |
|
"loss": 0.7396, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.039839945838505, |
|
"grad_norm": 0.154826357960701, |
|
"learning_rate": 0.0001981783833067751, |
|
"loss": 0.7217, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.040100337641371046, |
|
"grad_norm": 0.16705222427845, |
|
"learning_rate": 0.0001981465248142758, |
|
"loss": 0.761, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.040360729444237096, |
|
"grad_norm": 0.15651623904705048, |
|
"learning_rate": 0.00019811439274841842, |
|
"loss": 0.7565, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.04062112124710314, |
|
"grad_norm": 0.16211090981960297, |
|
"learning_rate": 0.00019808198719876782, |
|
"loss": 0.7555, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.04088151304996919, |
|
"grad_norm": 0.16856881976127625, |
|
"learning_rate": 0.00019804930825565112, |
|
"loss": 0.7567, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.04114190485283523, |
|
"grad_norm": 0.1588718593120575, |
|
"learning_rate": 0.00019801635601015752, |
|
"loss": 0.729, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.041402296655701276, |
|
"grad_norm": 0.17078711092472076, |
|
"learning_rate": 0.00019798313055413808, |
|
"loss": 0.7418, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.041662688458567326, |
|
"grad_norm": 0.16652734577655792, |
|
"learning_rate": 0.00019794963198020525, |
|
"loss": 0.7341, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.04192308026143337, |
|
"grad_norm": 0.15535488724708557, |
|
"learning_rate": 0.00019791586038173296, |
|
"loss": 0.7396, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.04218347206429941, |
|
"grad_norm": 0.3506317734718323, |
|
"learning_rate": 0.00019788181585285602, |
|
"loss": 0.7345, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.04244386386716546, |
|
"grad_norm": 0.16875872015953064, |
|
"learning_rate": 0.00019784749848847003, |
|
"loss": 0.7214, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.042704255670031506, |
|
"grad_norm": 0.17675861716270447, |
|
"learning_rate": 0.0001978129083842312, |
|
"loss": 0.7431, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.042964647472897556, |
|
"grad_norm": 0.15601837635040283, |
|
"learning_rate": 0.00019777804563655583, |
|
"loss": 0.7215, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.0432250392757636, |
|
"grad_norm": 0.1874823123216629, |
|
"learning_rate": 0.00019774291034262026, |
|
"loss": 0.727, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.04348543107862964, |
|
"grad_norm": 0.17005637288093567, |
|
"learning_rate": 0.00019770750260036054, |
|
"loss": 0.7446, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.04374582288149569, |
|
"grad_norm": 0.17069579660892487, |
|
"learning_rate": 0.00019767182250847207, |
|
"loss": 0.7266, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.044006214684361736, |
|
"grad_norm": 0.16133156418800354, |
|
"learning_rate": 0.00019763587016640948, |
|
"loss": 0.7568, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.04426660648722778, |
|
"grad_norm": 0.16229428350925446, |
|
"learning_rate": 0.00019759964567438623, |
|
"loss": 0.7402, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.04452699829009383, |
|
"grad_norm": 0.1622512936592102, |
|
"learning_rate": 0.00019756314913337432, |
|
"loss": 0.7536, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.04478739009295987, |
|
"grad_norm": 0.2161218672990799, |
|
"learning_rate": 0.00019752638064510415, |
|
"loss": 0.723, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.04504778189582592, |
|
"grad_norm": 0.154169961810112, |
|
"learning_rate": 0.00019748934031206414, |
|
"loss": 0.7441, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.045308173698691966, |
|
"grad_norm": 0.15468057990074158, |
|
"learning_rate": 0.00019745202823750034, |
|
"loss": 0.7349, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.04556856550155801, |
|
"grad_norm": 0.2015281468629837, |
|
"learning_rate": 0.0001974144445254164, |
|
"loss": 0.726, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.04582895730442406, |
|
"grad_norm": 0.1931644082069397, |
|
"learning_rate": 0.00019737658928057302, |
|
"loss": 0.7604, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.0460893491072901, |
|
"grad_norm": 0.1528482288122177, |
|
"learning_rate": 0.00019733846260848776, |
|
"loss": 0.7408, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.046349740910156145, |
|
"grad_norm": 0.16370061039924622, |
|
"learning_rate": 0.0001973000646154349, |
|
"loss": 0.7647, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.046610132713022195, |
|
"grad_norm": 0.16271348297595978, |
|
"learning_rate": 0.00019726139540844484, |
|
"loss": 0.7212, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.04687052451588824, |
|
"grad_norm": 0.16218173503875732, |
|
"learning_rate": 0.00019722245509530401, |
|
"loss": 0.735, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.04713091631875429, |
|
"grad_norm": 0.17063820362091064, |
|
"learning_rate": 0.00019718324378455458, |
|
"loss": 0.7311, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.04739130812162033, |
|
"grad_norm": 0.1678459346294403, |
|
"learning_rate": 0.00019714376158549404, |
|
"loss": 0.7486, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.047651699924486375, |
|
"grad_norm": 0.15926459431648254, |
|
"learning_rate": 0.00019710400860817494, |
|
"loss": 0.743, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.047912091727352425, |
|
"grad_norm": 0.1775251179933548, |
|
"learning_rate": 0.00019706398496340463, |
|
"loss": 0.7512, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.04817248353021847, |
|
"grad_norm": 0.1572408229112625, |
|
"learning_rate": 0.00019702369076274494, |
|
"loss": 0.733, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.04843287533308451, |
|
"grad_norm": 0.29658186435699463, |
|
"learning_rate": 0.0001969831261185118, |
|
"loss": 0.7297, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.04869326713595056, |
|
"grad_norm": 0.16520118713378906, |
|
"learning_rate": 0.00019694229114377494, |
|
"loss": 0.721, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.048953658938816605, |
|
"grad_norm": 0.17762574553489685, |
|
"learning_rate": 0.00019690118595235774, |
|
"loss": 0.7304, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.049214050741682655, |
|
"grad_norm": 0.16636615991592407, |
|
"learning_rate": 0.00019685981065883663, |
|
"loss": 0.7257, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.0494744425445487, |
|
"grad_norm": 0.1622323989868164, |
|
"learning_rate": 0.00019681816537854102, |
|
"loss": 0.7353, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.04973483434741474, |
|
"grad_norm": 0.17419832944869995, |
|
"learning_rate": 0.00019677625022755289, |
|
"loss": 0.7452, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.04999522615028079, |
|
"grad_norm": 0.17460434138774872, |
|
"learning_rate": 0.00019673406532270634, |
|
"loss": 0.7391, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.050255617953146835, |
|
"grad_norm": 0.15844550728797913, |
|
"learning_rate": 0.00019669161078158753, |
|
"loss": 0.7327, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.05051600975601288, |
|
"grad_norm": 0.1638839989900589, |
|
"learning_rate": 0.0001966488867225341, |
|
"loss": 0.745, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.05077640155887893, |
|
"grad_norm": 0.1587786227464676, |
|
"learning_rate": 0.00019660589326463498, |
|
"loss": 0.7476, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.05103679336174497, |
|
"grad_norm": 0.15708380937576294, |
|
"learning_rate": 0.00019656263052773002, |
|
"loss": 0.7208, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.05129718516461102, |
|
"grad_norm": 0.15816234052181244, |
|
"learning_rate": 0.00019651909863240965, |
|
"loss": 0.7262, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.051557576967477065, |
|
"grad_norm": 0.16749270260334015, |
|
"learning_rate": 0.00019647529770001456, |
|
"loss": 0.7284, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.05181796877034311, |
|
"grad_norm": 0.16943767666816711, |
|
"learning_rate": 0.00019643122785263536, |
|
"loss": 0.7225, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.05207836057320916, |
|
"grad_norm": 0.42929205298423767, |
|
"learning_rate": 0.00019638688921311224, |
|
"loss": 0.7305, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.0523387523760752, |
|
"grad_norm": 0.15851692855358124, |
|
"learning_rate": 0.00019634228190503454, |
|
"loss": 0.7344, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.052599144178941244, |
|
"grad_norm": 0.16053883731365204, |
|
"learning_rate": 0.00019629740605274062, |
|
"loss": 0.7468, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.052859535981807294, |
|
"grad_norm": 0.16504009068012238, |
|
"learning_rate": 0.00019625226178131728, |
|
"loss": 0.7375, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.05311992778467334, |
|
"grad_norm": 0.1618044674396515, |
|
"learning_rate": 0.00019620684921659953, |
|
"loss": 0.7201, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.05338031958753939, |
|
"grad_norm": 0.15512776374816895, |
|
"learning_rate": 0.00019616116848517027, |
|
"loss": 0.7355, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.05364071139040543, |
|
"grad_norm": 0.17377036809921265, |
|
"learning_rate": 0.00019611521971435979, |
|
"loss": 0.7226, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.053901103193271474, |
|
"grad_norm": 0.1685250997543335, |
|
"learning_rate": 0.0001960690030322456, |
|
"loss": 0.7483, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.054161494996137524, |
|
"grad_norm": 0.18394522368907928, |
|
"learning_rate": 0.00019602251856765194, |
|
"loss": 0.7385, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.05442188679900357, |
|
"grad_norm": 0.1753673106431961, |
|
"learning_rate": 0.0001959757664501495, |
|
"loss": 0.7378, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.05468227860186961, |
|
"grad_norm": 0.1795465052127838, |
|
"learning_rate": 0.000195928746810055, |
|
"loss": 0.748, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.05494267040473566, |
|
"grad_norm": 0.16327305138111115, |
|
"learning_rate": 0.0001958814597784309, |
|
"loss": 0.7306, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.055203062207601704, |
|
"grad_norm": 0.15880291163921356, |
|
"learning_rate": 0.00019583390548708486, |
|
"loss": 0.7281, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.05546345401046775, |
|
"grad_norm": 0.1702323853969574, |
|
"learning_rate": 0.0001957860840685696, |
|
"loss": 0.7407, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.0557238458133338, |
|
"grad_norm": 0.16931670904159546, |
|
"learning_rate": 0.0001957379956561825, |
|
"loss": 0.7272, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.05598423761619984, |
|
"grad_norm": 0.15455976128578186, |
|
"learning_rate": 0.000195689640383965, |
|
"loss": 0.7398, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.05624462941906589, |
|
"grad_norm": 0.16061417758464813, |
|
"learning_rate": 0.0001956410183867024, |
|
"loss": 0.749, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.056505021221931934, |
|
"grad_norm": 0.14933143556118011, |
|
"learning_rate": 0.00019559212979992365, |
|
"loss": 0.7418, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.05676541302479798, |
|
"grad_norm": 0.1592816412448883, |
|
"learning_rate": 0.00019554297475990058, |
|
"loss": 0.7423, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.05702580482766403, |
|
"grad_norm": 0.1677238792181015, |
|
"learning_rate": 0.00019549355340364787, |
|
"loss": 0.7101, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.05728619663053007, |
|
"grad_norm": 0.3558599054813385, |
|
"learning_rate": 0.00019544386586892238, |
|
"loss": 0.725, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.05754658843339611, |
|
"grad_norm": 0.1746376007795334, |
|
"learning_rate": 0.00019539391229422313, |
|
"loss": 0.7479, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.057806980236262163, |
|
"grad_norm": 0.15979182720184326, |
|
"learning_rate": 0.00019534369281879049, |
|
"loss": 0.7352, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.05806737203912821, |
|
"grad_norm": 0.16173166036605835, |
|
"learning_rate": 0.0001952932075826061, |
|
"loss": 0.7364, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.05832776384199426, |
|
"grad_norm": 0.1514744907617569, |
|
"learning_rate": 0.00019524245672639245, |
|
"loss": 0.734, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.0585881556448603, |
|
"grad_norm": 0.16860373318195343, |
|
"learning_rate": 0.00019519144039161222, |
|
"loss": 0.7098, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.05884854744772634, |
|
"grad_norm": 0.16847743093967438, |
|
"learning_rate": 0.00019514015872046833, |
|
"loss": 0.7103, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.05910893925059239, |
|
"grad_norm": 0.16181516647338867, |
|
"learning_rate": 0.00019508861185590307, |
|
"loss": 0.7561, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.059369331053458436, |
|
"grad_norm": 0.16594484448432922, |
|
"learning_rate": 0.0001950367999415981, |
|
"loss": 0.7308, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.05962972285632448, |
|
"grad_norm": 0.166441410779953, |
|
"learning_rate": 0.00019498472312197375, |
|
"loss": 0.735, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.05989011465919053, |
|
"grad_norm": 0.16273920238018036, |
|
"learning_rate": 0.00019493238154218886, |
|
"loss": 0.7458, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.06015050646205657, |
|
"grad_norm": 0.16227276623249054, |
|
"learning_rate": 0.00019487977534814012, |
|
"loss": 0.7143, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.06041089826492262, |
|
"grad_norm": 0.1619606912136078, |
|
"learning_rate": 0.000194826904686462, |
|
"loss": 0.7285, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.060671290067788666, |
|
"grad_norm": 0.1596045345067978, |
|
"learning_rate": 0.00019477376970452603, |
|
"loss": 0.7513, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.06093168187065471, |
|
"grad_norm": 0.17504757642745972, |
|
"learning_rate": 0.00019472037055044044, |
|
"loss": 0.7376, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.06119207367352076, |
|
"grad_norm": 0.1559167355298996, |
|
"learning_rate": 0.00019466670737304992, |
|
"loss": 0.7339, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.0614524654763868, |
|
"grad_norm": 0.1624836027622223, |
|
"learning_rate": 0.0001946127803219351, |
|
"loss": 0.7258, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.061712857279252846, |
|
"grad_norm": 0.17907138168811798, |
|
"learning_rate": 0.00019455858954741206, |
|
"loss": 0.72, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.061973249082118896, |
|
"grad_norm": 0.15922705829143524, |
|
"learning_rate": 0.00019450413520053202, |
|
"loss": 0.7187, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.06223364088498494, |
|
"grad_norm": 0.1552513986825943, |
|
"learning_rate": 0.0001944494174330809, |
|
"loss": 0.7183, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.06249403268785099, |
|
"grad_norm": 0.16838514804840088, |
|
"learning_rate": 0.00019439443639757885, |
|
"loss": 0.7286, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.06275442449071703, |
|
"grad_norm": 0.17352423071861267, |
|
"learning_rate": 0.00019433919224727986, |
|
"loss": 0.7436, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.06301481629358308, |
|
"grad_norm": 0.17366603016853333, |
|
"learning_rate": 0.0001942836851361713, |
|
"loss": 0.7265, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.06327520809644913, |
|
"grad_norm": 0.14833413064479828, |
|
"learning_rate": 0.00019422791521897357, |
|
"loss": 0.7234, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.06353559989931518, |
|
"grad_norm": 0.16602723300457, |
|
"learning_rate": 0.00019417188265113958, |
|
"loss": 0.725, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.06379599170218121, |
|
"grad_norm": 0.17290353775024414, |
|
"learning_rate": 0.00019411558758885438, |
|
"loss": 0.7174, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.06405638350504726, |
|
"grad_norm": 0.16486665606498718, |
|
"learning_rate": 0.0001940590301890346, |
|
"loss": 0.7301, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.06431677530791331, |
|
"grad_norm": 0.16255232691764832, |
|
"learning_rate": 0.00019400221060932827, |
|
"loss": 0.7462, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.06457716711077935, |
|
"grad_norm": 0.16139757633209229, |
|
"learning_rate": 0.0001939451290081141, |
|
"loss": 0.7424, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.0648375589136454, |
|
"grad_norm": 0.165597602725029, |
|
"learning_rate": 0.00019388778554450117, |
|
"loss": 0.7426, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.06509795071651145, |
|
"grad_norm": 0.19819000363349915, |
|
"learning_rate": 0.00019383018037832854, |
|
"loss": 0.7356, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06535834251937749, |
|
"grad_norm": 0.16469696164131165, |
|
"learning_rate": 0.00019377231367016467, |
|
"loss": 0.718, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.06561873432224354, |
|
"grad_norm": 0.1644965261220932, |
|
"learning_rate": 0.00019371418558130702, |
|
"loss": 0.7253, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.06587912612510959, |
|
"grad_norm": 0.15347526967525482, |
|
"learning_rate": 0.00019365579627378174, |
|
"loss": 0.7214, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.06613951792797562, |
|
"grad_norm": 0.1618672013282776, |
|
"learning_rate": 0.00019359714591034302, |
|
"loss": 0.7204, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.06639990973084167, |
|
"grad_norm": 0.17043665051460266, |
|
"learning_rate": 0.00019353823465447268, |
|
"loss": 0.7278, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.06666030153370772, |
|
"grad_norm": 0.15762579441070557, |
|
"learning_rate": 0.00019347906267037983, |
|
"loss": 0.7283, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.06692069333657376, |
|
"grad_norm": 0.1622801572084427, |
|
"learning_rate": 0.00019341963012300029, |
|
"loss": 0.7193, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.06718108513943981, |
|
"grad_norm": 0.16705769300460815, |
|
"learning_rate": 0.00019335993717799617, |
|
"loss": 0.7414, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.06744147694230586, |
|
"grad_norm": 0.15886452794075012, |
|
"learning_rate": 0.00019329998400175545, |
|
"loss": 0.7242, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.06770186874517191, |
|
"grad_norm": 0.17994090914726257, |
|
"learning_rate": 0.00019323977076139142, |
|
"loss": 0.7017, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.06796226054803794, |
|
"grad_norm": 0.1609068214893341, |
|
"learning_rate": 0.00019317929762474232, |
|
"loss": 0.7352, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.068222652350904, |
|
"grad_norm": 0.15605950355529785, |
|
"learning_rate": 0.0001931185647603708, |
|
"loss": 0.7249, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.06848304415377005, |
|
"grad_norm": 0.16057750582695007, |
|
"learning_rate": 0.00019305757233756352, |
|
"loss": 0.7521, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.06874343595663608, |
|
"grad_norm": 0.1703862100839615, |
|
"learning_rate": 0.00019299632052633054, |
|
"loss": 0.7245, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.06900382775950213, |
|
"grad_norm": 0.16324444115161896, |
|
"learning_rate": 0.00019293480949740505, |
|
"loss": 0.7395, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.06926421956236818, |
|
"grad_norm": 0.15283791720867157, |
|
"learning_rate": 0.00019287303942224266, |
|
"loss": 0.7158, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.06952461136523422, |
|
"grad_norm": 0.1882282942533493, |
|
"learning_rate": 0.00019281101047302114, |
|
"loss": 0.724, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.06978500316810027, |
|
"grad_norm": 0.16147953271865845, |
|
"learning_rate": 0.00019274872282263984, |
|
"loss": 0.7365, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.07004539497096632, |
|
"grad_norm": 0.1614103466272354, |
|
"learning_rate": 0.00019268617664471916, |
|
"loss": 0.7206, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.07030578677383235, |
|
"grad_norm": 0.16784432530403137, |
|
"learning_rate": 0.00019262337211360016, |
|
"loss": 0.7279, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.0705661785766984, |
|
"grad_norm": 0.15966112911701202, |
|
"learning_rate": 0.000192560309404344, |
|
"loss": 0.7274, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.07082657037956445, |
|
"grad_norm": 0.16970521211624146, |
|
"learning_rate": 0.0001924969886927315, |
|
"loss": 0.7038, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.07108696218243049, |
|
"grad_norm": 0.16143856942653656, |
|
"learning_rate": 0.00019243341015526272, |
|
"loss": 0.7097, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.07134735398529654, |
|
"grad_norm": 0.16041269898414612, |
|
"learning_rate": 0.00019236957396915623, |
|
"loss": 0.722, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.07160774578816259, |
|
"grad_norm": 0.15845969319343567, |
|
"learning_rate": 0.00019230548031234882, |
|
"loss": 0.7238, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.07186813759102864, |
|
"grad_norm": 0.14966030418872833, |
|
"learning_rate": 0.00019224112936349502, |
|
"loss": 0.7182, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.07212852939389468, |
|
"grad_norm": 0.16525116562843323, |
|
"learning_rate": 0.00019217652130196653, |
|
"loss": 0.7397, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.07238892119676073, |
|
"grad_norm": 0.18119119107723236, |
|
"learning_rate": 0.0001921116563078516, |
|
"loss": 0.7222, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.07264931299962678, |
|
"grad_norm": 0.1709197610616684, |
|
"learning_rate": 0.00019204653456195478, |
|
"loss": 0.7068, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.07290970480249281, |
|
"grad_norm": 0.16309161484241486, |
|
"learning_rate": 0.00019198115624579625, |
|
"loss": 0.7349, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.07317009660535886, |
|
"grad_norm": 0.1736750453710556, |
|
"learning_rate": 0.00019191552154161135, |
|
"loss": 0.7445, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.07343048840822491, |
|
"grad_norm": 0.15009112656116486, |
|
"learning_rate": 0.00019184963063235006, |
|
"loss": 0.7034, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.07369088021109095, |
|
"grad_norm": 0.17244628071784973, |
|
"learning_rate": 0.0001917834837016766, |
|
"loss": 0.7285, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.073951272013957, |
|
"grad_norm": 0.15991820394992828, |
|
"learning_rate": 0.00019171708093396861, |
|
"loss": 0.7096, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.07421166381682305, |
|
"grad_norm": 0.17037667334079742, |
|
"learning_rate": 0.0001916504225143171, |
|
"loss": 0.7177, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.07447205561968909, |
|
"grad_norm": 0.16700348258018494, |
|
"learning_rate": 0.00019158350862852553, |
|
"loss": 0.7453, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.07473244742255514, |
|
"grad_norm": 0.17683659493923187, |
|
"learning_rate": 0.00019151633946310948, |
|
"loss": 0.7331, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.07499283922542119, |
|
"grad_norm": 0.16364306211471558, |
|
"learning_rate": 0.00019144891520529608, |
|
"loss": 0.7347, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.07525323102828722, |
|
"grad_norm": 0.1781424731016159, |
|
"learning_rate": 0.00019138123604302355, |
|
"loss": 0.7169, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.07551362283115327, |
|
"grad_norm": 0.16007259488105774, |
|
"learning_rate": 0.00019131330216494064, |
|
"loss": 0.7269, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.07577401463401932, |
|
"grad_norm": 0.1604921519756317, |
|
"learning_rate": 0.00019124511376040598, |
|
"loss": 0.7094, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.07603440643688536, |
|
"grad_norm": 0.16649965941905975, |
|
"learning_rate": 0.00019117667101948782, |
|
"loss": 0.7271, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.07629479823975141, |
|
"grad_norm": 0.16084066033363342, |
|
"learning_rate": 0.0001911079741329632, |
|
"loss": 0.7239, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.07655519004261746, |
|
"grad_norm": 0.1651066243648529, |
|
"learning_rate": 0.0001910390232923177, |
|
"loss": 0.7304, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.07681558184548351, |
|
"grad_norm": 0.1528957635164261, |
|
"learning_rate": 0.00019096981868974467, |
|
"loss": 0.7068, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.07707597364834955, |
|
"grad_norm": 0.172830730676651, |
|
"learning_rate": 0.00019090036051814483, |
|
"loss": 0.7277, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.0773363654512156, |
|
"grad_norm": 0.15909147262573242, |
|
"learning_rate": 0.00019083064897112571, |
|
"loss": 0.7135, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.07759675725408165, |
|
"grad_norm": 0.16273066401481628, |
|
"learning_rate": 0.0001907606842430011, |
|
"loss": 0.7346, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.07785714905694768, |
|
"grad_norm": 0.1595291793346405, |
|
"learning_rate": 0.00019069046652879049, |
|
"loss": 0.7377, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.07811754085981373, |
|
"grad_norm": 0.15573470294475555, |
|
"learning_rate": 0.0001906199960242185, |
|
"loss": 0.7026, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07837793266267978, |
|
"grad_norm": 0.1670667678117752, |
|
"learning_rate": 0.0001905492729257145, |
|
"loss": 0.7231, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.07863832446554582, |
|
"grad_norm": 0.17074571549892426, |
|
"learning_rate": 0.00019047829743041184, |
|
"loss": 0.7003, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.07889871626841187, |
|
"grad_norm": 0.16979442536830902, |
|
"learning_rate": 0.00019040706973614738, |
|
"loss": 0.7217, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.07915910807127792, |
|
"grad_norm": 0.15843816101551056, |
|
"learning_rate": 0.00019033559004146103, |
|
"loss": 0.7334, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.07941949987414396, |
|
"grad_norm": 0.1607016921043396, |
|
"learning_rate": 0.0001902638585455951, |
|
"loss": 0.7271, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.07967989167701, |
|
"grad_norm": 0.1619115173816681, |
|
"learning_rate": 0.0001901918754484938, |
|
"loss": 0.7144, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.07994028347987606, |
|
"grad_norm": 0.1638360172510147, |
|
"learning_rate": 0.00019011964095080254, |
|
"loss": 0.7149, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.08020067528274209, |
|
"grad_norm": 0.16503652930259705, |
|
"learning_rate": 0.00019004715525386764, |
|
"loss": 0.7011, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.08046106708560814, |
|
"grad_norm": 0.16763822734355927, |
|
"learning_rate": 0.00018997441855973552, |
|
"loss": 0.7145, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.08072145888847419, |
|
"grad_norm": 0.1621125340461731, |
|
"learning_rate": 0.0001899014310711522, |
|
"loss": 0.7318, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.08098185069134024, |
|
"grad_norm": 0.16480112075805664, |
|
"learning_rate": 0.0001898281929915629, |
|
"loss": 0.7145, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.08124224249420628, |
|
"grad_norm": 0.1805388629436493, |
|
"learning_rate": 0.00018975470452511112, |
|
"loss": 0.7102, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.08150263429707233, |
|
"grad_norm": 0.1902652531862259, |
|
"learning_rate": 0.00018968096587663853, |
|
"loss": 0.7281, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.08176302609993838, |
|
"grad_norm": 0.1732487976551056, |
|
"learning_rate": 0.00018960697725168397, |
|
"loss": 0.7434, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.08202341790280442, |
|
"grad_norm": 0.1662171334028244, |
|
"learning_rate": 0.00018953273885648314, |
|
"loss": 0.716, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.08228380970567047, |
|
"grad_norm": 0.16129222512245178, |
|
"learning_rate": 0.00018945825089796797, |
|
"loss": 0.7318, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.08254420150853652, |
|
"grad_norm": 0.16837772727012634, |
|
"learning_rate": 0.00018938351358376596, |
|
"loss": 0.7137, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.08280459331140255, |
|
"grad_norm": 0.1618524193763733, |
|
"learning_rate": 0.00018930852712219974, |
|
"loss": 0.7079, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.0830649851142686, |
|
"grad_norm": 0.16333432495594025, |
|
"learning_rate": 0.00018923329172228632, |
|
"loss": 0.7062, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.08332537691713465, |
|
"grad_norm": 0.15985700488090515, |
|
"learning_rate": 0.00018915780759373672, |
|
"loss": 0.7277, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.08358576872000069, |
|
"grad_norm": 0.16181236505508423, |
|
"learning_rate": 0.0001890820749469551, |
|
"loss": 0.7048, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.08384616052286674, |
|
"grad_norm": 0.1679672747850418, |
|
"learning_rate": 0.00018900609399303853, |
|
"loss": 0.73, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.08410655232573279, |
|
"grad_norm": 0.16680286824703217, |
|
"learning_rate": 0.00018892986494377606, |
|
"loss": 0.7169, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.08436694412859883, |
|
"grad_norm": 0.15980315208435059, |
|
"learning_rate": 0.00018885338801164834, |
|
"loss": 0.7346, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.08462733593146488, |
|
"grad_norm": 0.16863352060317993, |
|
"learning_rate": 0.00018877666340982695, |
|
"loss": 0.7256, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.08488772773433093, |
|
"grad_norm": 0.1657836139202118, |
|
"learning_rate": 0.0001886996913521739, |
|
"loss": 0.7219, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.08514811953719698, |
|
"grad_norm": 0.1683470606803894, |
|
"learning_rate": 0.00018862247205324087, |
|
"loss": 0.7279, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.08540851134006301, |
|
"grad_norm": 0.1686122715473175, |
|
"learning_rate": 0.00018854500572826867, |
|
"loss": 0.7178, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.08566890314292906, |
|
"grad_norm": 0.16122782230377197, |
|
"learning_rate": 0.00018846729259318682, |
|
"loss": 0.7289, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.08592929494579511, |
|
"grad_norm": 0.1920589804649353, |
|
"learning_rate": 0.0001883893328646126, |
|
"loss": 0.7264, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.08618968674866115, |
|
"grad_norm": 0.17415335774421692, |
|
"learning_rate": 0.00018831112675985083, |
|
"loss": 0.7378, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.0864500785515272, |
|
"grad_norm": 0.16903561353683472, |
|
"learning_rate": 0.00018823267449689292, |
|
"loss": 0.702, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.08671047035439325, |
|
"grad_norm": 0.17158570885658264, |
|
"learning_rate": 0.00018815397629441658, |
|
"loss": 0.7264, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.08697086215725928, |
|
"grad_norm": 0.17094087600708008, |
|
"learning_rate": 0.0001880750323717849, |
|
"loss": 0.741, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.08723125396012534, |
|
"grad_norm": 0.1618223935365677, |
|
"learning_rate": 0.000187995842949046, |
|
"loss": 0.7084, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.08749164576299139, |
|
"grad_norm": 0.1671626716852188, |
|
"learning_rate": 0.0001879164082469322, |
|
"loss": 0.7236, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.08775203756585742, |
|
"grad_norm": 0.1681569665670395, |
|
"learning_rate": 0.00018783672848685966, |
|
"loss": 0.7382, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.08801242936872347, |
|
"grad_norm": 0.16421955823898315, |
|
"learning_rate": 0.00018775680389092748, |
|
"loss": 0.717, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.08827282117158952, |
|
"grad_norm": 0.174809530377388, |
|
"learning_rate": 0.00018767663468191725, |
|
"loss": 0.7225, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.08853321297445556, |
|
"grad_norm": 0.16181902587413788, |
|
"learning_rate": 0.00018759622108329243, |
|
"loss": 0.7014, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.08879360477732161, |
|
"grad_norm": 0.15579254925251007, |
|
"learning_rate": 0.0001875155633191977, |
|
"loss": 0.7125, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.08905399658018766, |
|
"grad_norm": 0.16342496871948242, |
|
"learning_rate": 0.00018743466161445823, |
|
"loss": 0.7075, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.08931438838305371, |
|
"grad_norm": 0.17215611040592194, |
|
"learning_rate": 0.00018735351619457923, |
|
"loss": 0.7331, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.08957478018591974, |
|
"grad_norm": 0.1682904213666916, |
|
"learning_rate": 0.00018727212728574522, |
|
"loss": 0.734, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.0898351719887858, |
|
"grad_norm": 0.16969889402389526, |
|
"learning_rate": 0.00018719049511481948, |
|
"loss": 0.7224, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.09009556379165184, |
|
"grad_norm": 0.16607950627803802, |
|
"learning_rate": 0.00018710861990934324, |
|
"loss": 0.7218, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.09035595559451788, |
|
"grad_norm": 0.16665585339069366, |
|
"learning_rate": 0.00018702650189753525, |
|
"loss": 0.7152, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.09061634739738393, |
|
"grad_norm": 0.16812992095947266, |
|
"learning_rate": 0.00018694414130829103, |
|
"loss": 0.7097, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.09087673920024998, |
|
"grad_norm": 0.16855508089065552, |
|
"learning_rate": 0.00018686153837118224, |
|
"loss": 0.7268, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.09113713100311602, |
|
"grad_norm": 0.1634734570980072, |
|
"learning_rate": 0.00018677869331645613, |
|
"loss": 0.7485, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.09139752280598207, |
|
"grad_norm": 0.17040792107582092, |
|
"learning_rate": 0.00018669560637503478, |
|
"loss": 0.7529, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.09165791460884812, |
|
"grad_norm": 0.16575287282466888, |
|
"learning_rate": 0.00018661227777851447, |
|
"loss": 0.7053, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.09191830641171415, |
|
"grad_norm": 0.16195647418498993, |
|
"learning_rate": 0.00018652870775916515, |
|
"loss": 0.7498, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.0921786982145802, |
|
"grad_norm": 0.16672903299331665, |
|
"learning_rate": 0.00018644489654992956, |
|
"loss": 0.7168, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.09243909001744625, |
|
"grad_norm": 0.16135342419147491, |
|
"learning_rate": 0.00018636084438442298, |
|
"loss": 0.6868, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.09269948182031229, |
|
"grad_norm": 0.16677817702293396, |
|
"learning_rate": 0.0001862765514969321, |
|
"loss": 0.7128, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.09295987362317834, |
|
"grad_norm": 0.16983947157859802, |
|
"learning_rate": 0.00018619201812241474, |
|
"loss": 0.7189, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.09322026542604439, |
|
"grad_norm": 0.17054444551467896, |
|
"learning_rate": 0.00018610724449649895, |
|
"loss": 0.6916, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.09348065722891043, |
|
"grad_norm": 0.1672518253326416, |
|
"learning_rate": 0.0001860222308554825, |
|
"loss": 0.7002, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.09374104903177648, |
|
"grad_norm": 0.16726090013980865, |
|
"learning_rate": 0.00018593697743633225, |
|
"loss": 0.7153, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.09400144083464253, |
|
"grad_norm": 0.16627268493175507, |
|
"learning_rate": 0.0001858514844766833, |
|
"loss": 0.7121, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.09426183263750858, |
|
"grad_norm": 0.16322413086891174, |
|
"learning_rate": 0.00018576575221483852, |
|
"loss": 0.7018, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.09452222444037461, |
|
"grad_norm": 0.1642254739999771, |
|
"learning_rate": 0.00018567978088976776, |
|
"loss": 0.706, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.09478261624324066, |
|
"grad_norm": 0.17001990973949432, |
|
"learning_rate": 0.00018559357074110727, |
|
"loss": 0.7236, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.09504300804610671, |
|
"grad_norm": 0.166624054312706, |
|
"learning_rate": 0.00018550712200915897, |
|
"loss": 0.7129, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.09530339984897275, |
|
"grad_norm": 0.17228394746780396, |
|
"learning_rate": 0.00018542043493488986, |
|
"loss": 0.7096, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.0955637916518388, |
|
"grad_norm": 0.17133943736553192, |
|
"learning_rate": 0.0001853335097599312, |
|
"loss": 0.7164, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.09582418345470485, |
|
"grad_norm": 0.16100934147834778, |
|
"learning_rate": 0.00018524634672657805, |
|
"loss": 0.711, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.09608457525757089, |
|
"grad_norm": 0.17711074650287628, |
|
"learning_rate": 0.00018515894607778837, |
|
"loss": 0.7153, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.09634496706043694, |
|
"grad_norm": 0.20850953459739685, |
|
"learning_rate": 0.00018507130805718252, |
|
"loss": 0.7309, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.09660535886330299, |
|
"grad_norm": 0.15805912017822266, |
|
"learning_rate": 0.00018498343290904246, |
|
"loss": 0.72, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.09686575066616902, |
|
"grad_norm": 0.17850877344608307, |
|
"learning_rate": 0.00018489532087831114, |
|
"loss": 0.7235, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.09712614246903507, |
|
"grad_norm": 0.16705553233623505, |
|
"learning_rate": 0.0001848069722105918, |
|
"loss": 0.7233, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.09738653427190112, |
|
"grad_norm": 0.17024511098861694, |
|
"learning_rate": 0.0001847183871521473, |
|
"loss": 0.7127, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.09764692607476716, |
|
"grad_norm": 0.16217222809791565, |
|
"learning_rate": 0.0001846295659498994, |
|
"loss": 0.6844, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.09790731787763321, |
|
"grad_norm": 0.20443694293498993, |
|
"learning_rate": 0.00018454050885142803, |
|
"loss": 0.7327, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.09816770968049926, |
|
"grad_norm": 0.16370315849781036, |
|
"learning_rate": 0.00018445121610497072, |
|
"loss": 0.7191, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.09842810148336531, |
|
"grad_norm": 0.16387638449668884, |
|
"learning_rate": 0.00018436168795942187, |
|
"loss": 0.724, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.09868849328623135, |
|
"grad_norm": 0.1664215326309204, |
|
"learning_rate": 0.00018427192466433193, |
|
"loss": 0.6955, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.0989488850890974, |
|
"grad_norm": 0.17285983264446259, |
|
"learning_rate": 0.0001841819264699069, |
|
"loss": 0.7063, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.09920927689196345, |
|
"grad_norm": 0.1637914627790451, |
|
"learning_rate": 0.00018409169362700753, |
|
"loss": 0.7211, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.09946966869482948, |
|
"grad_norm": 0.1695072054862976, |
|
"learning_rate": 0.00018400122638714855, |
|
"loss": 0.7172, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.09973006049769553, |
|
"grad_norm": 0.16097985208034515, |
|
"learning_rate": 0.00018391052500249812, |
|
"loss": 0.7073, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.09999045230056158, |
|
"grad_norm": 0.16112865507602692, |
|
"learning_rate": 0.00018381958972587707, |
|
"loss": 0.7126, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.10025084410342762, |
|
"grad_norm": 0.16681082546710968, |
|
"learning_rate": 0.0001837284208107581, |
|
"loss": 0.7331, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.10051123590629367, |
|
"grad_norm": 0.16337643563747406, |
|
"learning_rate": 0.0001836370185112652, |
|
"loss": 0.7185, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.10077162770915972, |
|
"grad_norm": 0.16530117392539978, |
|
"learning_rate": 0.0001835453830821729, |
|
"loss": 0.7058, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.10103201951202576, |
|
"grad_norm": 0.1671489030122757, |
|
"learning_rate": 0.00018345351477890554, |
|
"loss": 0.7225, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.1012924113148918, |
|
"grad_norm": 0.1631312221288681, |
|
"learning_rate": 0.00018336141385753661, |
|
"loss": 0.7189, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.10155280311775786, |
|
"grad_norm": 0.16685089468955994, |
|
"learning_rate": 0.00018326908057478795, |
|
"loss": 0.7076, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.10181319492062389, |
|
"grad_norm": 0.16232247650623322, |
|
"learning_rate": 0.00018317651518802913, |
|
"loss": 0.7168, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.10207358672348994, |
|
"grad_norm": 0.16136577725410461, |
|
"learning_rate": 0.0001830837179552766, |
|
"loss": 0.7335, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.10233397852635599, |
|
"grad_norm": 0.17820622026920319, |
|
"learning_rate": 0.00018299068913519312, |
|
"loss": 0.7071, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.10259437032922204, |
|
"grad_norm": 0.15879027545452118, |
|
"learning_rate": 0.00018289742898708707, |
|
"loss": 0.7301, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.10285476213208808, |
|
"grad_norm": 0.1569281965494156, |
|
"learning_rate": 0.00018280393777091143, |
|
"loss": 0.7018, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.10311515393495413, |
|
"grad_norm": 0.15729983150959015, |
|
"learning_rate": 0.00018271021574726336, |
|
"loss": 0.717, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.10337554573782018, |
|
"grad_norm": 0.1715734452009201, |
|
"learning_rate": 0.00018261626317738348, |
|
"loss": 0.7008, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.10363593754068622, |
|
"grad_norm": 0.16048088669776917, |
|
"learning_rate": 0.00018252208032315482, |
|
"loss": 0.7108, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.10389632934355227, |
|
"grad_norm": 0.1652391403913498, |
|
"learning_rate": 0.00018242766744710248, |
|
"loss": 0.7164, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.10415672114641832, |
|
"grad_norm": 0.17617185413837433, |
|
"learning_rate": 0.0001823330248123926, |
|
"loss": 0.7152, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.10441711294928435, |
|
"grad_norm": 0.16789917647838593, |
|
"learning_rate": 0.00018223815268283186, |
|
"loss": 0.742, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.1046775047521504, |
|
"grad_norm": 0.15830789506435394, |
|
"learning_rate": 0.00018214305132286656, |
|
"loss": 0.7127, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.10493789655501645, |
|
"grad_norm": 0.16791877150535583, |
|
"learning_rate": 0.00018204772099758194, |
|
"loss": 0.7173, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.10519828835788249, |
|
"grad_norm": 0.1708458811044693, |
|
"learning_rate": 0.00018195216197270156, |
|
"loss": 0.7152, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.10545868016074854, |
|
"grad_norm": 0.16406644880771637, |
|
"learning_rate": 0.0001818563745145863, |
|
"loss": 0.709, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.10571907196361459, |
|
"grad_norm": 0.16491225361824036, |
|
"learning_rate": 0.00018176035889023396, |
|
"loss": 0.6957, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.10597946376648062, |
|
"grad_norm": 0.18035203218460083, |
|
"learning_rate": 0.00018166411536727812, |
|
"loss": 0.7166, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.10623985556934668, |
|
"grad_norm": 0.1827452927827835, |
|
"learning_rate": 0.0001815676442139878, |
|
"loss": 0.7359, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.10650024737221273, |
|
"grad_norm": 0.17651152610778809, |
|
"learning_rate": 0.00018147094569926644, |
|
"loss": 0.7055, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.10676063917507878, |
|
"grad_norm": 0.16017574071884155, |
|
"learning_rate": 0.00018137402009265114, |
|
"loss": 0.7175, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.10702103097794481, |
|
"grad_norm": 0.16183608770370483, |
|
"learning_rate": 0.00018127686766431213, |
|
"loss": 0.7099, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.10728142278081086, |
|
"grad_norm": 0.173137828707695, |
|
"learning_rate": 0.00018117948868505182, |
|
"loss": 0.7139, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.10754181458367691, |
|
"grad_norm": 0.1736387312412262, |
|
"learning_rate": 0.00018108188342630413, |
|
"loss": 0.7219, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.10780220638654295, |
|
"grad_norm": 0.1679358333349228, |
|
"learning_rate": 0.0001809840521601337, |
|
"loss": 0.7057, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.108062598189409, |
|
"grad_norm": 0.16037985682487488, |
|
"learning_rate": 0.00018088599515923503, |
|
"loss": 0.7111, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.10832298999227505, |
|
"grad_norm": 0.15554189682006836, |
|
"learning_rate": 0.00018078771269693209, |
|
"loss": 0.6978, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.10858338179514108, |
|
"grad_norm": 0.1662927269935608, |
|
"learning_rate": 0.00018068920504717704, |
|
"loss": 0.7016, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.10884377359800713, |
|
"grad_norm": 0.168484628200531, |
|
"learning_rate": 0.0001805904724845499, |
|
"loss": 0.695, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.10910416540087318, |
|
"grad_norm": 0.18269601464271545, |
|
"learning_rate": 0.0001804915152842575, |
|
"loss": 0.7194, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.10936455720373922, |
|
"grad_norm": 0.17641094326972961, |
|
"learning_rate": 0.00018039233372213294, |
|
"loss": 0.7209, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.10962494900660527, |
|
"grad_norm": 0.18733248114585876, |
|
"learning_rate": 0.00018029292807463456, |
|
"loss": 0.7089, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.10988534080947132, |
|
"grad_norm": 0.16925212740898132, |
|
"learning_rate": 0.00018019329861884543, |
|
"loss": 0.7097, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.11014573261233736, |
|
"grad_norm": 0.15558676421642303, |
|
"learning_rate": 0.00018009344563247245, |
|
"loss": 0.6766, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.11040612441520341, |
|
"grad_norm": 0.16530027985572815, |
|
"learning_rate": 0.00017999336939384555, |
|
"loss": 0.7141, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.11066651621806946, |
|
"grad_norm": 0.16560573875904083, |
|
"learning_rate": 0.00017989307018191693, |
|
"loss": 0.6989, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.1109269080209355, |
|
"grad_norm": 0.17466457188129425, |
|
"learning_rate": 0.00017979254827626037, |
|
"loss": 0.7206, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.11118729982380154, |
|
"grad_norm": 0.18086989223957062, |
|
"learning_rate": 0.00017969180395707038, |
|
"loss": 0.714, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.1114476916266676, |
|
"grad_norm": 1.1102476119995117, |
|
"learning_rate": 0.00017959083750516137, |
|
"loss": 0.723, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.11170808342953364, |
|
"grad_norm": 0.19353629648685455, |
|
"learning_rate": 0.00017948964920196697, |
|
"loss": 0.718, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.11196847523239968, |
|
"grad_norm": 0.2126789540052414, |
|
"learning_rate": 0.00017938823932953915, |
|
"loss": 0.7171, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.11222886703526573, |
|
"grad_norm": 0.16679035127162933, |
|
"learning_rate": 0.00017928660817054758, |
|
"loss": 0.7113, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.11248925883813178, |
|
"grad_norm": 0.1647113859653473, |
|
"learning_rate": 0.00017918475600827861, |
|
"loss": 0.7184, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.11274965064099782, |
|
"grad_norm": 0.16823258996009827, |
|
"learning_rate": 0.00017908268312663475, |
|
"loss": 0.6876, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.11301004244386387, |
|
"grad_norm": 0.17199194431304932, |
|
"learning_rate": 0.0001789803898101336, |
|
"loss": 0.6954, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.11327043424672992, |
|
"grad_norm": 0.16541388630867004, |
|
"learning_rate": 0.00017887787634390733, |
|
"loss": 0.709, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.11353082604959595, |
|
"grad_norm": 0.17238549888134003, |
|
"learning_rate": 0.00017877514301370168, |
|
"loss": 0.7056, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.113791217852462, |
|
"grad_norm": 0.18374156951904297, |
|
"learning_rate": 0.00017867219010587527, |
|
"loss": 0.6951, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.11405160965532805, |
|
"grad_norm": 0.15940402448177338, |
|
"learning_rate": 0.00017856901790739874, |
|
"loss": 0.6873, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.11431200145819409, |
|
"grad_norm": 0.16984274983406067, |
|
"learning_rate": 0.00017846562670585398, |
|
"loss": 0.7065, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.11457239326106014, |
|
"grad_norm": 0.9227787852287292, |
|
"learning_rate": 0.0001783620167894334, |
|
"loss": 0.7591, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.11483278506392619, |
|
"grad_norm": 0.1881164163351059, |
|
"learning_rate": 0.00017825818844693892, |
|
"loss": 0.7133, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.11509317686679223, |
|
"grad_norm": 0.20252032577991486, |
|
"learning_rate": 0.00017815414196778147, |
|
"loss": 0.7145, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.11535356866965828, |
|
"grad_norm": 0.23380456864833832, |
|
"learning_rate": 0.0001780498776419799, |
|
"loss": 0.7232, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.11561396047252433, |
|
"grad_norm": 0.1666252464056015, |
|
"learning_rate": 0.00017794539576016028, |
|
"loss": 0.7224, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.11587435227539038, |
|
"grad_norm": 0.2421305775642395, |
|
"learning_rate": 0.0001778406966135551, |
|
"loss": 0.7031, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.11613474407825641, |
|
"grad_norm": 0.17607644200325012, |
|
"learning_rate": 0.00017773578049400256, |
|
"loss": 0.7162, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.11639513588112246, |
|
"grad_norm": 0.1905539333820343, |
|
"learning_rate": 0.00017763064769394553, |
|
"loss": 0.7087, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.11665552768398851, |
|
"grad_norm": 0.17976076900959015, |
|
"learning_rate": 0.00017752529850643088, |
|
"loss": 0.7274, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.11691591948685455, |
|
"grad_norm": 0.16786770522594452, |
|
"learning_rate": 0.00017741973322510864, |
|
"loss": 0.7003, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.1171763112897206, |
|
"grad_norm": 0.1559310108423233, |
|
"learning_rate": 0.00017731395214423126, |
|
"loss": 0.7279, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.11743670309258665, |
|
"grad_norm": 0.16374662518501282, |
|
"learning_rate": 0.00017720795555865255, |
|
"loss": 0.6924, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.11769709489545269, |
|
"grad_norm": 0.1647823303937912, |
|
"learning_rate": 0.00017710174376382712, |
|
"loss": 0.7224, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.11795748669831874, |
|
"grad_norm": 0.18070833384990692, |
|
"learning_rate": 0.0001769953170558095, |
|
"loss": 0.7223, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.11821787850118479, |
|
"grad_norm": 0.16518941521644592, |
|
"learning_rate": 0.0001768886757312532, |
|
"loss": 0.7153, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.11847827030405082, |
|
"grad_norm": 0.17081689834594727, |
|
"learning_rate": 0.0001767818200874099, |
|
"loss": 0.7197, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.11873866210691687, |
|
"grad_norm": 0.16503626108169556, |
|
"learning_rate": 0.00017667475042212874, |
|
"loss": 0.7222, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.11899905390978292, |
|
"grad_norm": 0.17310748994350433, |
|
"learning_rate": 0.00017656746703385547, |
|
"loss": 0.7057, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.11925944571264896, |
|
"grad_norm": 0.1613416224718094, |
|
"learning_rate": 0.0001764599702216315, |
|
"loss": 0.7133, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.11951983751551501, |
|
"grad_norm": 0.184098020195961, |
|
"learning_rate": 0.00017635226028509313, |
|
"loss": 0.72, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.11978022931838106, |
|
"grad_norm": 0.1819763481616974, |
|
"learning_rate": 0.00017624433752447076, |
|
"loss": 0.7154, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.12004062112124711, |
|
"grad_norm": 0.17078550159931183, |
|
"learning_rate": 0.000176136202240588, |
|
"loss": 0.7205, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.12030101292411315, |
|
"grad_norm": 0.16961923241615295, |
|
"learning_rate": 0.00017602785473486084, |
|
"loss": 0.6997, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.1205614047269792, |
|
"grad_norm": 0.18945324420928955, |
|
"learning_rate": 0.00017591929530929684, |
|
"loss": 0.7295, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.12082179652984525, |
|
"grad_norm": 0.1690925508737564, |
|
"learning_rate": 0.00017581052426649424, |
|
"loss": 0.714, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.12108218833271128, |
|
"grad_norm": 0.16739274561405182, |
|
"learning_rate": 0.00017570154190964113, |
|
"loss": 0.7018, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.12134258013557733, |
|
"grad_norm": 0.17123165726661682, |
|
"learning_rate": 0.0001755923485425147, |
|
"loss": 0.7288, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.12160297193844338, |
|
"grad_norm": 0.17006556689739227, |
|
"learning_rate": 0.0001754829444694802, |
|
"loss": 0.7185, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.12186336374130942, |
|
"grad_norm": 0.17169633507728577, |
|
"learning_rate": 0.00017537332999549023, |
|
"loss": 0.7169, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.12212375554417547, |
|
"grad_norm": 0.17851942777633667, |
|
"learning_rate": 0.00017526350542608393, |
|
"loss": 0.7138, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.12238414734704152, |
|
"grad_norm": 0.16988542675971985, |
|
"learning_rate": 0.00017515347106738597, |
|
"loss": 0.7188, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.12264453914990756, |
|
"grad_norm": 0.16778436303138733, |
|
"learning_rate": 0.00017504322722610582, |
|
"loss": 0.7034, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.1229049309527736, |
|
"grad_norm": 0.1835828721523285, |
|
"learning_rate": 0.00017493277420953688, |
|
"loss": 0.7084, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.12316532275563966, |
|
"grad_norm": 0.16590015590190887, |
|
"learning_rate": 0.0001748221123255556, |
|
"loss": 0.7023, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.12342571455850569, |
|
"grad_norm": 0.16521744430065155, |
|
"learning_rate": 0.00017471124188262062, |
|
"loss": 0.7224, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.12368610636137174, |
|
"grad_norm": 0.17280088365077972, |
|
"learning_rate": 0.00017460016318977188, |
|
"loss": 0.7087, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.12394649816423779, |
|
"grad_norm": 0.17343877255916595, |
|
"learning_rate": 0.00017448887655662984, |
|
"loss": 0.7212, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.12420688996710384, |
|
"grad_norm": 0.17116770148277283, |
|
"learning_rate": 0.0001743773822933946, |
|
"loss": 0.7289, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.12446728176996988, |
|
"grad_norm": 0.17440655827522278, |
|
"learning_rate": 0.0001742656807108449, |
|
"loss": 0.7353, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.12472767357283593, |
|
"grad_norm": 0.17304064333438873, |
|
"learning_rate": 0.0001741537721203375, |
|
"loss": 0.7322, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.12498806537570198, |
|
"grad_norm": 0.17076118290424347, |
|
"learning_rate": 0.00017404165683380604, |
|
"loss": 0.7199, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.12524845717856803, |
|
"grad_norm": 0.17371006309986115, |
|
"learning_rate": 0.0001739293351637604, |
|
"loss": 0.7035, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.12550884898143405, |
|
"grad_norm": 0.16811051964759827, |
|
"learning_rate": 0.0001738168074232857, |
|
"loss": 0.7113, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.1257692407843001, |
|
"grad_norm": 0.17408473789691925, |
|
"learning_rate": 0.00017370407392604145, |
|
"loss": 0.7151, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.12602963258716615, |
|
"grad_norm": 0.16915513575077057, |
|
"learning_rate": 0.0001735911349862607, |
|
"loss": 0.7089, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.1262900243900322, |
|
"grad_norm": 0.17281986773014069, |
|
"learning_rate": 0.00017347799091874913, |
|
"loss": 0.6935, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.12655041619289825, |
|
"grad_norm": 0.16271603107452393, |
|
"learning_rate": 0.00017336464203888426, |
|
"loss": 0.7131, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.1268108079957643, |
|
"grad_norm": 0.1661551147699356, |
|
"learning_rate": 0.00017325108866261439, |
|
"loss": 0.6899, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.12707119979863035, |
|
"grad_norm": 0.16490161418914795, |
|
"learning_rate": 0.00017313733110645792, |
|
"loss": 0.7042, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.12733159160149637, |
|
"grad_norm": 0.17232102155685425, |
|
"learning_rate": 0.00017302336968750236, |
|
"loss": 0.6965, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.12759198340436242, |
|
"grad_norm": 0.17850111424922943, |
|
"learning_rate": 0.00017290920472340347, |
|
"loss": 0.7185, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.12785237520722847, |
|
"grad_norm": 0.16296492516994476, |
|
"learning_rate": 0.00017279483653238434, |
|
"loss": 0.7179, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.12811276701009452, |
|
"grad_norm": 0.1704237312078476, |
|
"learning_rate": 0.00017268026543323454, |
|
"loss": 0.7103, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.12837315881296057, |
|
"grad_norm": 0.18265944719314575, |
|
"learning_rate": 0.00017256549174530928, |
|
"loss": 0.6988, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.12863355061582663, |
|
"grad_norm": 0.17257574200630188, |
|
"learning_rate": 0.0001724505157885284, |
|
"loss": 0.7231, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.12889394241869265, |
|
"grad_norm": 0.1703302413225174, |
|
"learning_rate": 0.0001723353378833756, |
|
"loss": 0.7192, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.1291543342215587, |
|
"grad_norm": 0.16430538892745972, |
|
"learning_rate": 0.00017221995835089743, |
|
"loss": 0.7007, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.12941472602442475, |
|
"grad_norm": 0.162199467420578, |
|
"learning_rate": 0.0001721043775127025, |
|
"loss": 0.7123, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.1296751178272908, |
|
"grad_norm": 0.1680443435907364, |
|
"learning_rate": 0.00017198859569096057, |
|
"loss": 0.6976, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.12993550963015685, |
|
"grad_norm": 0.16768567264080048, |
|
"learning_rate": 0.0001718726132084015, |
|
"loss": 0.6942, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.1301959014330229, |
|
"grad_norm": 0.16759110987186432, |
|
"learning_rate": 0.00017175643038831463, |
|
"loss": 0.6995, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.13045629323588892, |
|
"grad_norm": 0.17288459837436676, |
|
"learning_rate": 0.00017164004755454754, |
|
"loss": 0.7193, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.13071668503875497, |
|
"grad_norm": 0.18251213431358337, |
|
"learning_rate": 0.00017152346503150552, |
|
"loss": 0.7156, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.13097707684162102, |
|
"grad_norm": 0.17177079617977142, |
|
"learning_rate": 0.0001714066831441503, |
|
"loss": 0.7354, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.13123746864448707, |
|
"grad_norm": 0.18522126972675323, |
|
"learning_rate": 0.00017128970221799947, |
|
"loss": 0.717, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.13149786044735312, |
|
"grad_norm": 0.17539465427398682, |
|
"learning_rate": 0.00017117252257912532, |
|
"loss": 0.7204, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.13175825225021917, |
|
"grad_norm": 0.16526655852794647, |
|
"learning_rate": 0.00017105514455415403, |
|
"loss": 0.7099, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.13201864405308522, |
|
"grad_norm": 0.1709279865026474, |
|
"learning_rate": 0.00017093756847026483, |
|
"loss": 0.7205, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.13227903585595124, |
|
"grad_norm": 0.16695177555084229, |
|
"learning_rate": 0.00017081979465518896, |
|
"loss": 0.7092, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.1325394276588173, |
|
"grad_norm": 0.1795840859413147, |
|
"learning_rate": 0.00017070182343720882, |
|
"loss": 0.6983, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.13279981946168334, |
|
"grad_norm": 0.17300280928611755, |
|
"learning_rate": 0.0001705836551451571, |
|
"loss": 0.7018, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.1330602112645494, |
|
"grad_norm": 0.16966886818408966, |
|
"learning_rate": 0.00017046529010841574, |
|
"loss": 0.708, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.13332060306741544, |
|
"grad_norm": 0.18426869809627533, |
|
"learning_rate": 0.00017034672865691515, |
|
"loss": 0.7118, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.1335809948702815, |
|
"grad_norm": 0.1808805912733078, |
|
"learning_rate": 0.00017022797112113314, |
|
"loss": 0.7041, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.13384138667314752, |
|
"grad_norm": 0.17788094282150269, |
|
"learning_rate": 0.00017010901783209427, |
|
"loss": 0.7032, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.13410177847601357, |
|
"grad_norm": 0.17427201569080353, |
|
"learning_rate": 0.00016998986912136852, |
|
"loss": 0.7194, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.13436217027887962, |
|
"grad_norm": 0.17783771455287933, |
|
"learning_rate": 0.0001698705253210707, |
|
"loss": 0.705, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.13462256208174567, |
|
"grad_norm": 0.16358746588230133, |
|
"learning_rate": 0.0001697509867638594, |
|
"loss": 0.7118, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.13488295388461172, |
|
"grad_norm": 0.17140185832977295, |
|
"learning_rate": 0.0001696312537829361, |
|
"loss": 0.6929, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.13514334568747777, |
|
"grad_norm": 0.1690370887517929, |
|
"learning_rate": 0.0001695113267120441, |
|
"loss": 0.7044, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.13540373749034382, |
|
"grad_norm": 0.16261689364910126, |
|
"learning_rate": 0.0001693912058854679, |
|
"loss": 0.6933, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.13566412929320984, |
|
"grad_norm": 0.16243134438991547, |
|
"learning_rate": 0.00016927089163803192, |
|
"loss": 0.7009, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.1359245210960759, |
|
"grad_norm": 0.17476530373096466, |
|
"learning_rate": 0.00016915038430509978, |
|
"loss": 0.7215, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.13618491289894194, |
|
"grad_norm": 0.167569100856781, |
|
"learning_rate": 0.00016902968422257325, |
|
"loss": 0.6975, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.136445304701808, |
|
"grad_norm": 0.1719943732023239, |
|
"learning_rate": 0.00016890879172689146, |
|
"loss": 0.7324, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.13670569650467404, |
|
"grad_norm": 0.167982816696167, |
|
"learning_rate": 0.00016878770715502984, |
|
"loss": 0.6884, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.1369660883075401, |
|
"grad_norm": 0.1711047738790512, |
|
"learning_rate": 0.00016866643084449914, |
|
"loss": 0.7164, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.1372264801104061, |
|
"grad_norm": 0.17784222960472107, |
|
"learning_rate": 0.00016854496313334466, |
|
"loss": 0.6987, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.13748687191327216, |
|
"grad_norm": 0.16513650119304657, |
|
"learning_rate": 0.00016842330436014517, |
|
"loss": 0.7078, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.1377472637161382, |
|
"grad_norm": 0.17001712322235107, |
|
"learning_rate": 0.000168301454864012, |
|
"loss": 0.7053, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.13800765551900426, |
|
"grad_norm": 0.1930648535490036, |
|
"learning_rate": 0.0001681794149845881, |
|
"loss": 0.6954, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.1382680473218703, |
|
"grad_norm": 0.1823079138994217, |
|
"learning_rate": 0.00016805718506204711, |
|
"loss": 0.7325, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.13852843912473636, |
|
"grad_norm": 0.16231094300746918, |
|
"learning_rate": 0.00016793476543709238, |
|
"loss": 0.7074, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.13878883092760239, |
|
"grad_norm": 0.16010914742946625, |
|
"learning_rate": 0.00016781215645095604, |
|
"loss": 0.6946, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.13904922273046844, |
|
"grad_norm": 0.17128457129001617, |
|
"learning_rate": 0.00016768935844539804, |
|
"loss": 0.7116, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.13930961453333449, |
|
"grad_norm": 0.17700603604316711, |
|
"learning_rate": 0.00016756637176270522, |
|
"loss": 0.7276, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.13957000633620054, |
|
"grad_norm": 0.16596612334251404, |
|
"learning_rate": 0.00016744319674569033, |
|
"loss": 0.6683, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.1398303981390666, |
|
"grad_norm": 0.17127594351768494, |
|
"learning_rate": 0.00016731983373769106, |
|
"loss": 0.7053, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.14009078994193264, |
|
"grad_norm": 0.17723415791988373, |
|
"learning_rate": 0.00016719628308256915, |
|
"loss": 0.7294, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.1403511817447987, |
|
"grad_norm": 0.16194766759872437, |
|
"learning_rate": 0.00016707254512470936, |
|
"loss": 0.7164, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.1406115735476647, |
|
"grad_norm": 0.17294222116470337, |
|
"learning_rate": 0.00016694862020901852, |
|
"loss": 0.7084, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.14087196535053076, |
|
"grad_norm": 0.17684406042099, |
|
"learning_rate": 0.00016682450868092463, |
|
"loss": 0.7141, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.1411323571533968, |
|
"grad_norm": 0.16746504604816437, |
|
"learning_rate": 0.00016670021088637579, |
|
"loss": 0.7336, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.14139274895626286, |
|
"grad_norm": 0.2371237874031067, |
|
"learning_rate": 0.00016657572717183937, |
|
"loss": 0.7001, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.1416531407591289, |
|
"grad_norm": 0.16818630695343018, |
|
"learning_rate": 0.00016645105788430095, |
|
"loss": 0.7124, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.14191353256199496, |
|
"grad_norm": 0.1719949096441269, |
|
"learning_rate": 0.00016632620337126333, |
|
"loss": 0.7169, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.14217392436486098, |
|
"grad_norm": 0.1754136085510254, |
|
"learning_rate": 0.00016620116398074567, |
|
"loss": 0.6966, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.14243431616772703, |
|
"grad_norm": 0.1630551517009735, |
|
"learning_rate": 0.0001660759400612824, |
|
"loss": 0.695, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.14269470797059308, |
|
"grad_norm": 0.16431647539138794, |
|
"learning_rate": 0.00016595053196192234, |
|
"loss": 0.6983, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.14295509977345913, |
|
"grad_norm": 0.1706673949956894, |
|
"learning_rate": 0.00016582494003222772, |
|
"loss": 0.7025, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.14321549157632518, |
|
"grad_norm": 0.1757555902004242, |
|
"learning_rate": 0.00016569916462227312, |
|
"loss": 0.7159, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.14347588337919123, |
|
"grad_norm": 0.1697273999452591, |
|
"learning_rate": 0.0001655732060826446, |
|
"loss": 0.7112, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.14373627518205728, |
|
"grad_norm": 0.17706693708896637, |
|
"learning_rate": 0.00016544706476443862, |
|
"loss": 0.746, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.1439966669849233, |
|
"grad_norm": 0.17265696823596954, |
|
"learning_rate": 0.00016532074101926117, |
|
"loss": 0.7276, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.14425705878778936, |
|
"grad_norm": 0.16773734986782074, |
|
"learning_rate": 0.00016519423519922668, |
|
"loss": 0.6944, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.1445174505906554, |
|
"grad_norm": 0.17664338648319244, |
|
"learning_rate": 0.0001650675476569572, |
|
"loss": 0.6953, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.14477784239352146, |
|
"grad_norm": 0.17331312596797943, |
|
"learning_rate": 0.00016494067874558117, |
|
"loss": 0.7113, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.1450382341963875, |
|
"grad_norm": 0.1759510487318039, |
|
"learning_rate": 0.00016481362881873272, |
|
"loss": 0.71, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.14529862599925356, |
|
"grad_norm": 0.18095999956130981, |
|
"learning_rate": 0.00016468639823055044, |
|
"loss": 0.7255, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.14555901780211958, |
|
"grad_norm": 0.17434170842170715, |
|
"learning_rate": 0.0001645589873356765, |
|
"loss": 0.6989, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.14581940960498563, |
|
"grad_norm": 0.16754081845283508, |
|
"learning_rate": 0.00016443139648925572, |
|
"loss": 0.7196, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.14607980140785168, |
|
"grad_norm": 0.17215152084827423, |
|
"learning_rate": 0.00016430362604693448, |
|
"loss": 0.7104, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.14634019321071773, |
|
"grad_norm": 0.17826251685619354, |
|
"learning_rate": 0.00016417567636485973, |
|
"loss": 0.7142, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.14660058501358378, |
|
"grad_norm": 0.17078974843025208, |
|
"learning_rate": 0.00016404754779967813, |
|
"loss": 0.717, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.14686097681644983, |
|
"grad_norm": 0.18085996806621552, |
|
"learning_rate": 0.0001639192407085348, |
|
"loss": 0.711, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.14712136861931585, |
|
"grad_norm": 0.19176393747329712, |
|
"learning_rate": 0.00016379075544907268, |
|
"loss": 0.699, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.1473817604221819, |
|
"grad_norm": 0.17717042565345764, |
|
"learning_rate": 0.00016366209237943122, |
|
"loss": 0.7069, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.14764215222504795, |
|
"grad_norm": 0.17190402746200562, |
|
"learning_rate": 0.0001635332518582455, |
|
"loss": 0.6988, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.147902544027914, |
|
"grad_norm": 0.18004834651947021, |
|
"learning_rate": 0.00016340423424464516, |
|
"loss": 0.7196, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.14816293583078005, |
|
"grad_norm": 0.17457543313503265, |
|
"learning_rate": 0.00016327503989825364, |
|
"loss": 0.7109, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.1484233276336461, |
|
"grad_norm": 0.1904648393392563, |
|
"learning_rate": 0.00016314566917918693, |
|
"loss": 0.6874, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.14868371943651215, |
|
"grad_norm": 0.18096795678138733, |
|
"learning_rate": 0.00016301612244805252, |
|
"loss": 0.7154, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.14894411123937817, |
|
"grad_norm": 0.17521658539772034, |
|
"learning_rate": 0.00016288640006594874, |
|
"loss": 0.703, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.14920450304224422, |
|
"grad_norm": 0.180609330534935, |
|
"learning_rate": 0.00016275650239446328, |
|
"loss": 0.7033, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.14946489484511027, |
|
"grad_norm": 0.19524458050727844, |
|
"learning_rate": 0.00016262642979567268, |
|
"loss": 0.6925, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.14972528664797632, |
|
"grad_norm": 0.16790035367012024, |
|
"learning_rate": 0.00016249618263214087, |
|
"loss": 0.7104, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.14998567845084237, |
|
"grad_norm": 0.16895075142383575, |
|
"learning_rate": 0.00016236576126691843, |
|
"loss": 0.7126, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.15024607025370842, |
|
"grad_norm": 0.18354611098766327, |
|
"learning_rate": 0.00016223516606354163, |
|
"loss": 0.7143, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.15050646205657445, |
|
"grad_norm": 0.1766786128282547, |
|
"learning_rate": 0.00016210439738603108, |
|
"loss": 0.7164, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.1507668538594405, |
|
"grad_norm": 0.17794294655323029, |
|
"learning_rate": 0.00016197345559889107, |
|
"loss": 0.6919, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.15102724566230655, |
|
"grad_norm": 0.1671450436115265, |
|
"learning_rate": 0.00016184234106710838, |
|
"loss": 0.7048, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.1512876374651726, |
|
"grad_norm": 0.16732144355773926, |
|
"learning_rate": 0.00016171105415615134, |
|
"loss": 0.7168, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.15154802926803865, |
|
"grad_norm": 0.16594409942626953, |
|
"learning_rate": 0.00016157959523196866, |
|
"loss": 0.6942, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.1518084210709047, |
|
"grad_norm": 0.173036128282547, |
|
"learning_rate": 0.00016144796466098865, |
|
"loss": 0.7093, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.15206881287377072, |
|
"grad_norm": 0.17697438597679138, |
|
"learning_rate": 0.00016131616281011798, |
|
"loss": 0.7122, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.15232920467663677, |
|
"grad_norm": 0.1677410751581192, |
|
"learning_rate": 0.0001611841900467408, |
|
"loss": 0.7027, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.15258959647950282, |
|
"grad_norm": 0.16899007558822632, |
|
"learning_rate": 0.0001610520467387176, |
|
"loss": 0.6949, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.15284998828236887, |
|
"grad_norm": 0.17043690383434296, |
|
"learning_rate": 0.00016091973325438428, |
|
"loss": 0.7052, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.15311038008523492, |
|
"grad_norm": 0.17432864010334015, |
|
"learning_rate": 0.00016078724996255114, |
|
"loss": 0.7026, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.15337077188810097, |
|
"grad_norm": 0.17059588432312012, |
|
"learning_rate": 0.00016065459723250164, |
|
"loss": 0.7234, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.15363116369096702, |
|
"grad_norm": 0.16677695512771606, |
|
"learning_rate": 0.0001605217754339918, |
|
"loss": 0.7281, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.15389155549383304, |
|
"grad_norm": 0.16831070184707642, |
|
"learning_rate": 0.00016038878493724858, |
|
"loss": 0.6934, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.1541519472966991, |
|
"grad_norm": 0.17344878613948822, |
|
"learning_rate": 0.00016025562611296946, |
|
"loss": 0.7096, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.15441233909956514, |
|
"grad_norm": 0.16978445649147034, |
|
"learning_rate": 0.0001601222993323209, |
|
"loss": 0.6851, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.1546727309024312, |
|
"grad_norm": 0.17398668825626373, |
|
"learning_rate": 0.00015998880496693766, |
|
"loss": 0.7022, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.15493312270529724, |
|
"grad_norm": 0.16338910162448883, |
|
"learning_rate": 0.00015985514338892154, |
|
"loss": 0.7257, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.1551935145081633, |
|
"grad_norm": 0.1754075437784195, |
|
"learning_rate": 0.0001597213149708405, |
|
"loss": 0.7012, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.15545390631102932, |
|
"grad_norm": 0.16998423635959625, |
|
"learning_rate": 0.00015958732008572744, |
|
"loss": 0.7313, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.15571429811389537, |
|
"grad_norm": 0.18673604726791382, |
|
"learning_rate": 0.00015945315910707945, |
|
"loss": 0.6923, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.15597468991676142, |
|
"grad_norm": 0.17040878534317017, |
|
"learning_rate": 0.0001593188324088564, |
|
"loss": 0.7009, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.15623508171962747, |
|
"grad_norm": 0.16569504141807556, |
|
"learning_rate": 0.00015918434036548017, |
|
"loss": 0.6919, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.15649547352249352, |
|
"grad_norm": 0.16722472012043, |
|
"learning_rate": 0.00015904968335183354, |
|
"loss": 0.7028, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.15675586532535957, |
|
"grad_norm": 0.16823537647724152, |
|
"learning_rate": 0.00015891486174325912, |
|
"loss": 0.6822, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.15701625712822562, |
|
"grad_norm": 0.1750400960445404, |
|
"learning_rate": 0.00015877987591555822, |
|
"loss": 0.7146, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.15727664893109164, |
|
"grad_norm": 0.1703154593706131, |
|
"learning_rate": 0.00015864472624499008, |
|
"loss": 0.7057, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.1575370407339577, |
|
"grad_norm": 0.17317266762256622, |
|
"learning_rate": 0.00015850941310827045, |
|
"loss": 0.692, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.15779743253682374, |
|
"grad_norm": 0.18270671367645264, |
|
"learning_rate": 0.00015837393688257083, |
|
"loss": 0.7279, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.1580578243396898, |
|
"grad_norm": 0.1647195667028427, |
|
"learning_rate": 0.00015823829794551724, |
|
"loss": 0.6999, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.15831821614255584, |
|
"grad_norm": 0.17559175193309784, |
|
"learning_rate": 0.00015810249667518936, |
|
"loss": 0.6952, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.1585786079454219, |
|
"grad_norm": 0.168562114238739, |
|
"learning_rate": 0.00015796653345011928, |
|
"loss": 0.7013, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.1588389997482879, |
|
"grad_norm": 0.18079742789268494, |
|
"learning_rate": 0.00015783040864929043, |
|
"loss": 0.7115, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.15909939155115396, |
|
"grad_norm": 0.17427009344100952, |
|
"learning_rate": 0.00015769412265213684, |
|
"loss": 0.6955, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.15935978335402, |
|
"grad_norm": 0.18136094510555267, |
|
"learning_rate": 0.00015755767583854163, |
|
"loss": 0.7129, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.15962017515688606, |
|
"grad_norm": 0.17283126711845398, |
|
"learning_rate": 0.00015742106858883633, |
|
"loss": 0.6993, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.1598805669597521, |
|
"grad_norm": 0.16747362911701202, |
|
"learning_rate": 0.00015728430128379963, |
|
"loss": 0.718, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.16014095876261816, |
|
"grad_norm": 0.17267820239067078, |
|
"learning_rate": 0.0001571473743046563, |
|
"loss": 0.6946, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.16040135056548419, |
|
"grad_norm": 0.17288938164710999, |
|
"learning_rate": 0.00015701028803307625, |
|
"loss": 0.7031, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.16066174236835024, |
|
"grad_norm": 0.16363868117332458, |
|
"learning_rate": 0.0001568730428511734, |
|
"loss": 0.7038, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.16092213417121629, |
|
"grad_norm": 0.17383398115634918, |
|
"learning_rate": 0.0001567356391415046, |
|
"loss": 0.695, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.16118252597408234, |
|
"grad_norm": 0.15880119800567627, |
|
"learning_rate": 0.00015659807728706854, |
|
"loss": 0.6977, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.16144291777694839, |
|
"grad_norm": 0.1778557300567627, |
|
"learning_rate": 0.0001564603576713048, |
|
"loss": 0.7319, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.16170330957981444, |
|
"grad_norm": 0.1838517040014267, |
|
"learning_rate": 0.00015632248067809265, |
|
"loss": 0.6999, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.16196370138268049, |
|
"grad_norm": 0.1765550673007965, |
|
"learning_rate": 0.00015618444669175004, |
|
"loss": 0.689, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.1622240931855465, |
|
"grad_norm": 0.17367658019065857, |
|
"learning_rate": 0.00015604625609703257, |
|
"loss": 0.7028, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.16248448498841256, |
|
"grad_norm": 0.1650581955909729, |
|
"learning_rate": 0.0001559079092791323, |
|
"loss": 0.6926, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.1627448767912786, |
|
"grad_norm": 0.16974525153636932, |
|
"learning_rate": 0.00015576940662367675, |
|
"loss": 0.711, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.16300526859414466, |
|
"grad_norm": 0.17168691754341125, |
|
"learning_rate": 0.00015563074851672787, |
|
"loss": 0.6949, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.1632656603970107, |
|
"grad_norm": 0.17578692734241486, |
|
"learning_rate": 0.00015549193534478094, |
|
"loss": 0.6858, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.16352605219987676, |
|
"grad_norm": 0.17432482540607452, |
|
"learning_rate": 0.00015535296749476335, |
|
"loss": 0.7025, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.16378644400274278, |
|
"grad_norm": 0.17605715990066528, |
|
"learning_rate": 0.00015521384535403376, |
|
"loss": 0.7021, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.16404683580560883, |
|
"grad_norm": 0.17800843715667725, |
|
"learning_rate": 0.0001550745693103808, |
|
"loss": 0.6998, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.16430722760847488, |
|
"grad_norm": 0.17110416293144226, |
|
"learning_rate": 0.0001549351397520222, |
|
"loss": 0.7116, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.16456761941134093, |
|
"grad_norm": 0.1723811775445938, |
|
"learning_rate": 0.00015479555706760344, |
|
"loss": 0.72, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.16482801121420698, |
|
"grad_norm": 0.1752457171678543, |
|
"learning_rate": 0.00015465582164619703, |
|
"loss": 0.6866, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.16508840301707303, |
|
"grad_norm": 0.1849747896194458, |
|
"learning_rate": 0.00015451593387730105, |
|
"loss": 0.7288, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.16534879481993905, |
|
"grad_norm": 0.16762009263038635, |
|
"learning_rate": 0.00015437589415083828, |
|
"loss": 0.6919, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.1656091866228051, |
|
"grad_norm": 0.17073680460453033, |
|
"learning_rate": 0.00015423570285715506, |
|
"loss": 0.709, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.16586957842567115, |
|
"grad_norm": 0.16971205174922943, |
|
"learning_rate": 0.00015409536038702028, |
|
"loss": 0.7156, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.1661299702285372, |
|
"grad_norm": 0.17233216762542725, |
|
"learning_rate": 0.00015395486713162414, |
|
"loss": 0.7187, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.16639036203140325, |
|
"grad_norm": 0.17737407982349396, |
|
"learning_rate": 0.00015381422348257715, |
|
"loss": 0.71, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.1666507538342693, |
|
"grad_norm": 0.17228469252586365, |
|
"learning_rate": 0.00015367342983190906, |
|
"loss": 0.7224, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.16691114563713536, |
|
"grad_norm": 0.17020554840564728, |
|
"learning_rate": 0.00015353248657206764, |
|
"loss": 0.7052, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.16717153744000138, |
|
"grad_norm": 0.1692919284105301, |
|
"learning_rate": 0.00015339139409591783, |
|
"loss": 0.7035, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.16743192924286743, |
|
"grad_norm": 0.17089873552322388, |
|
"learning_rate": 0.00015325015279674036, |
|
"loss": 0.7043, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.16769232104573348, |
|
"grad_norm": 0.16704905033111572, |
|
"learning_rate": 0.00015310876306823092, |
|
"loss": 0.6924, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.16795271284859953, |
|
"grad_norm": 0.1821179836988449, |
|
"learning_rate": 0.00015296722530449877, |
|
"loss": 0.6996, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.16821310465146558, |
|
"grad_norm": 0.17202268540859222, |
|
"learning_rate": 0.000152825539900066, |
|
"loss": 0.7164, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.16847349645433163, |
|
"grad_norm": 0.16377121210098267, |
|
"learning_rate": 0.00015268370724986601, |
|
"loss": 0.699, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.16873388825719765, |
|
"grad_norm": 0.17642685770988464, |
|
"learning_rate": 0.00015254172774924277, |
|
"loss": 0.7127, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.1689942800600637, |
|
"grad_norm": 0.1717827171087265, |
|
"learning_rate": 0.0001523996017939496, |
|
"loss": 0.6927, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.16925467186292975, |
|
"grad_norm": 0.1642349660396576, |
|
"learning_rate": 0.000152257329780148, |
|
"loss": 0.704, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.1695150636657958, |
|
"grad_norm": 0.17559029161930084, |
|
"learning_rate": 0.00015211491210440653, |
|
"loss": 0.7236, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.16977545546866185, |
|
"grad_norm": 0.1805913746356964, |
|
"learning_rate": 0.00015197234916369996, |
|
"loss": 0.6829, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.1700358472715279, |
|
"grad_norm": 0.17583122849464417, |
|
"learning_rate": 0.00015182964135540772, |
|
"loss": 0.7045, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.17029623907439395, |
|
"grad_norm": 0.1662655770778656, |
|
"learning_rate": 0.00015168678907731323, |
|
"loss": 0.7173, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.17055663087725997, |
|
"grad_norm": 0.1719600409269333, |
|
"learning_rate": 0.00015154379272760252, |
|
"loss": 0.7082, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.17081702268012602, |
|
"grad_norm": 0.18137727677822113, |
|
"learning_rate": 0.00015140065270486324, |
|
"loss": 0.7314, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.17107741448299207, |
|
"grad_norm": 0.17727130651474, |
|
"learning_rate": 0.00015125736940808351, |
|
"loss": 0.7093, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.17133780628585812, |
|
"grad_norm": 0.19269149005413055, |
|
"learning_rate": 0.00015111394323665081, |
|
"loss": 0.6859, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.17159819808872417, |
|
"grad_norm": 0.18232578039169312, |
|
"learning_rate": 0.00015097037459035084, |
|
"loss": 0.726, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.17185858989159022, |
|
"grad_norm": 0.16427166759967804, |
|
"learning_rate": 0.0001508266638693665, |
|
"loss": 0.6896, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.17211898169445625, |
|
"grad_norm": 0.17567217350006104, |
|
"learning_rate": 0.00015068281147427657, |
|
"loss": 0.6968, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.1723793734973223, |
|
"grad_norm": 0.17443399131298065, |
|
"learning_rate": 0.00015053881780605495, |
|
"loss": 0.7034, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.17263976530018835, |
|
"grad_norm": 0.16696178913116455, |
|
"learning_rate": 0.00015039468326606911, |
|
"loss": 0.6897, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.1729001571030544, |
|
"grad_norm": 0.1779368370771408, |
|
"learning_rate": 0.00015025040825607935, |
|
"loss": 0.7036, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.17316054890592045, |
|
"grad_norm": 0.17863190174102783, |
|
"learning_rate": 0.00015010599317823734, |
|
"loss": 0.7032, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.1734209407087865, |
|
"grad_norm": 0.1763063222169876, |
|
"learning_rate": 0.00014996143843508538, |
|
"loss": 0.6825, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.17368133251165252, |
|
"grad_norm": 0.17159639298915863, |
|
"learning_rate": 0.00014981674442955488, |
|
"loss": 0.6988, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.17394172431451857, |
|
"grad_norm": 0.17756308615207672, |
|
"learning_rate": 0.00014967191156496564, |
|
"loss": 0.7018, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.17420211611738462, |
|
"grad_norm": 0.17641721665859222, |
|
"learning_rate": 0.00014952694024502429, |
|
"loss": 0.7321, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.17446250792025067, |
|
"grad_norm": 0.17100387811660767, |
|
"learning_rate": 0.00014938183087382354, |
|
"loss": 0.7099, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.17472289972311672, |
|
"grad_norm": 0.16952122747898102, |
|
"learning_rate": 0.00014923658385584084, |
|
"loss": 0.6802, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.17498329152598277, |
|
"grad_norm": 0.1642334908246994, |
|
"learning_rate": 0.00014909119959593734, |
|
"loss": 0.7083, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.17524368332884882, |
|
"grad_norm": 0.16929323971271515, |
|
"learning_rate": 0.00014894567849935673, |
|
"loss": 0.7144, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.17550407513171484, |
|
"grad_norm": 0.180099755525589, |
|
"learning_rate": 0.00014880002097172418, |
|
"loss": 0.7043, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.1757644669345809, |
|
"grad_norm": 0.17287197709083557, |
|
"learning_rate": 0.000148654227419045, |
|
"loss": 0.6891, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.17602485873744694, |
|
"grad_norm": 0.17458143830299377, |
|
"learning_rate": 0.00014850829824770382, |
|
"loss": 0.6889, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.176285250540313, |
|
"grad_norm": 0.1722840964794159, |
|
"learning_rate": 0.00014836223386446317, |
|
"loss": 0.7058, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.17654564234317904, |
|
"grad_norm": 0.17535988986492157, |
|
"learning_rate": 0.0001482160346764625, |
|
"loss": 0.7148, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.1768060341460451, |
|
"grad_norm": 0.16451425850391388, |
|
"learning_rate": 0.00014806970109121709, |
|
"loss": 0.6975, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.17706642594891112, |
|
"grad_norm": 0.16712118685245514, |
|
"learning_rate": 0.00014792323351661671, |
|
"loss": 0.7148, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.17732681775177717, |
|
"grad_norm": 0.17245186865329742, |
|
"learning_rate": 0.0001477766323609248, |
|
"loss": 0.7216, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.17758720955464322, |
|
"grad_norm": 0.17335280776023865, |
|
"learning_rate": 0.00014762989803277686, |
|
"loss": 0.6949, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.17784760135750927, |
|
"grad_norm": 0.16834279894828796, |
|
"learning_rate": 0.0001474830309411799, |
|
"loss": 0.6884, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.17810799316037532, |
|
"grad_norm": 0.17499734461307526, |
|
"learning_rate": 0.0001473360314955108, |
|
"loss": 0.7065, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.17836838496324137, |
|
"grad_norm": 0.17072665691375732, |
|
"learning_rate": 0.00014718890010551543, |
|
"loss": 0.7067, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.17862877676610742, |
|
"grad_norm": 0.192660853266716, |
|
"learning_rate": 0.00014704163718130735, |
|
"loss": 0.7263, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.17888916856897344, |
|
"grad_norm": 0.18734219670295715, |
|
"learning_rate": 0.00014689424313336696, |
|
"loss": 0.7044, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.1791495603718395, |
|
"grad_norm": 0.17117907106876373, |
|
"learning_rate": 0.00014674671837253998, |
|
"loss": 0.7112, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.17940995217470554, |
|
"grad_norm": 0.18425500392913818, |
|
"learning_rate": 0.00014659906331003652, |
|
"loss": 0.6925, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.1796703439775716, |
|
"grad_norm": 0.1706390231847763, |
|
"learning_rate": 0.00014645127835742988, |
|
"loss": 0.6916, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.17993073578043764, |
|
"grad_norm": 0.17749357223510742, |
|
"learning_rate": 0.00014630336392665552, |
|
"loss": 0.7011, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.1801911275833037, |
|
"grad_norm": 0.16545963287353516, |
|
"learning_rate": 0.00014615532043000965, |
|
"loss": 0.7163, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.1804515193861697, |
|
"grad_norm": 0.1756933480501175, |
|
"learning_rate": 0.00014600714828014833, |
|
"loss": 0.6954, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.18071191118903576, |
|
"grad_norm": 0.16752491891384125, |
|
"learning_rate": 0.0001458588478900862, |
|
"loss": 0.6902, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.1809723029919018, |
|
"grad_norm": 0.16998566687107086, |
|
"learning_rate": 0.00014571041967319535, |
|
"loss": 0.6906, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.18123269479476786, |
|
"grad_norm": 0.1744604855775833, |
|
"learning_rate": 0.00014556186404320428, |
|
"loss": 0.6814, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.1814930865976339, |
|
"grad_norm": 0.17596711218357086, |
|
"learning_rate": 0.00014541318141419646, |
|
"loss": 0.7141, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.18175347840049996, |
|
"grad_norm": 0.16803975403308868, |
|
"learning_rate": 0.00014526437220060948, |
|
"loss": 0.6875, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.18201387020336598, |
|
"grad_norm": 0.1845843344926834, |
|
"learning_rate": 0.00014511543681723377, |
|
"loss": 0.6921, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.18227426200623204, |
|
"grad_norm": 0.1783183515071869, |
|
"learning_rate": 0.00014496637567921135, |
|
"loss": 0.6982, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.18253465380909809, |
|
"grad_norm": 0.17463266849517822, |
|
"learning_rate": 0.00014481718920203492, |
|
"loss": 0.7094, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.18279504561196414, |
|
"grad_norm": 0.17779332399368286, |
|
"learning_rate": 0.00014466787780154636, |
|
"loss": 0.6989, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.18305543741483019, |
|
"grad_norm": 0.17536398768424988, |
|
"learning_rate": 0.0001445184418939359, |
|
"loss": 0.7009, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.18331582921769624, |
|
"grad_norm": 0.17731131613254547, |
|
"learning_rate": 0.00014436888189574084, |
|
"loss": 0.6964, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.18357622102056229, |
|
"grad_norm": 0.18129530549049377, |
|
"learning_rate": 0.00014421919822384428, |
|
"loss": 0.6822, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.1838366128234283, |
|
"grad_norm": 0.16552576422691345, |
|
"learning_rate": 0.00014406939129547406, |
|
"loss": 0.7172, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.18409700462629436, |
|
"grad_norm": 0.17690911889076233, |
|
"learning_rate": 0.00014391946152820167, |
|
"loss": 0.6883, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.1843573964291604, |
|
"grad_norm": 0.1851797252893448, |
|
"learning_rate": 0.00014376940933994082, |
|
"loss": 0.6988, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.18461778823202646, |
|
"grad_norm": 0.18322911858558655, |
|
"learning_rate": 0.0001436192351489467, |
|
"loss": 0.689, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.1848781800348925, |
|
"grad_norm": 0.17606942355632782, |
|
"learning_rate": 0.0001434689393738144, |
|
"loss": 0.7034, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.18513857183775856, |
|
"grad_norm": 0.1715836524963379, |
|
"learning_rate": 0.0001433185224334779, |
|
"loss": 0.6939, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.18539896364062458, |
|
"grad_norm": 0.18052712082862854, |
|
"learning_rate": 0.00014316798474720908, |
|
"loss": 0.6858, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.18565935544349063, |
|
"grad_norm": 0.17608265578746796, |
|
"learning_rate": 0.00014301732673461617, |
|
"loss": 0.7166, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.18591974724635668, |
|
"grad_norm": 0.17561575770378113, |
|
"learning_rate": 0.000142866548815643, |
|
"loss": 0.7208, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.18618013904922273, |
|
"grad_norm": 0.17250195145606995, |
|
"learning_rate": 0.00014271565141056746, |
|
"loss": 0.687, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.18644053085208878, |
|
"grad_norm": 0.1769152581691742, |
|
"learning_rate": 0.00014256463494000064, |
|
"loss": 0.7007, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.18670092265495483, |
|
"grad_norm": 0.16784866154193878, |
|
"learning_rate": 0.0001424134998248854, |
|
"loss": 0.6913, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.18696131445782085, |
|
"grad_norm": 0.18374022841453552, |
|
"learning_rate": 0.00014226224648649532, |
|
"loss": 0.7048, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.1872217062606869, |
|
"grad_norm": 0.173793226480484, |
|
"learning_rate": 0.0001421108753464336, |
|
"loss": 0.6877, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.18748209806355295, |
|
"grad_norm": 0.17282545566558838, |
|
"learning_rate": 0.00014195938682663175, |
|
"loss": 0.7132, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.187742489866419, |
|
"grad_norm": 0.16977694630622864, |
|
"learning_rate": 0.00014180778134934843, |
|
"loss": 0.6813, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.18800288166928505, |
|
"grad_norm": 0.18771855533123016, |
|
"learning_rate": 0.00014165605933716836, |
|
"loss": 0.7037, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.1882632734721511, |
|
"grad_norm": 0.17175500094890594, |
|
"learning_rate": 0.00014150422121300105, |
|
"loss": 0.6926, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.18852366527501715, |
|
"grad_norm": 0.17340902984142303, |
|
"learning_rate": 0.00014135226740007967, |
|
"loss": 0.6987, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.18878405707788318, |
|
"grad_norm": 0.16895218193531036, |
|
"learning_rate": 0.00014120019832195986, |
|
"loss": 0.6771, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.18904444888074923, |
|
"grad_norm": 0.17422990500926971, |
|
"learning_rate": 0.00014104801440251863, |
|
"loss": 0.7027, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.18930484068361528, |
|
"grad_norm": 0.1705746352672577, |
|
"learning_rate": 0.00014089571606595294, |
|
"loss": 0.6907, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.18956523248648133, |
|
"grad_norm": 0.16771703958511353, |
|
"learning_rate": 0.0001407433037367788, |
|
"loss": 0.7105, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.18982562428934738, |
|
"grad_norm": 0.1731031835079193, |
|
"learning_rate": 0.00014059077783982993, |
|
"loss": 0.6683, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.19008601609221343, |
|
"grad_norm": 0.18010441958904266, |
|
"learning_rate": 0.00014043813880025658, |
|
"loss": 0.692, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.19034640789507945, |
|
"grad_norm": 0.17735110223293304, |
|
"learning_rate": 0.0001402853870435244, |
|
"loss": 0.6639, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.1906067996979455, |
|
"grad_norm": 0.16834424436092377, |
|
"learning_rate": 0.00014013252299541323, |
|
"loss": 0.6915, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.19086719150081155, |
|
"grad_norm": 0.17520864307880402, |
|
"learning_rate": 0.00013997954708201593, |
|
"loss": 0.6907, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.1911275833036776, |
|
"grad_norm": 0.18474625051021576, |
|
"learning_rate": 0.00013982645972973715, |
|
"loss": 0.7003, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.19138797510654365, |
|
"grad_norm": 0.1774008870124817, |
|
"learning_rate": 0.00013967326136529208, |
|
"loss": 0.7133, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.1916483669094097, |
|
"grad_norm": 0.17432044446468353, |
|
"learning_rate": 0.00013951995241570552, |
|
"loss": 0.7072, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.19190875871227575, |
|
"grad_norm": 0.1843249648809433, |
|
"learning_rate": 0.0001393665333083104, |
|
"loss": 0.6928, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.19216915051514177, |
|
"grad_norm": 0.17402757704257965, |
|
"learning_rate": 0.00013921300447074667, |
|
"loss": 0.7106, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.19242954231800782, |
|
"grad_norm": 0.17242158949375153, |
|
"learning_rate": 0.00013905936633096025, |
|
"loss": 0.6903, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.19268993412087387, |
|
"grad_norm": 0.18488599359989166, |
|
"learning_rate": 0.00013890561931720167, |
|
"loss": 0.688, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.19295032592373992, |
|
"grad_norm": 0.17597128450870514, |
|
"learning_rate": 0.0001387517638580249, |
|
"loss": 0.7062, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.19321071772660597, |
|
"grad_norm": 0.1805078685283661, |
|
"learning_rate": 0.00013859780038228634, |
|
"loss": 0.6999, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.19347110952947202, |
|
"grad_norm": 0.17452898621559143, |
|
"learning_rate": 0.00013844372931914325, |
|
"loss": 0.6961, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.19373150133233805, |
|
"grad_norm": 0.17718560993671417, |
|
"learning_rate": 0.00013828955109805295, |
|
"loss": 0.6909, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.1939918931352041, |
|
"grad_norm": 0.1740763783454895, |
|
"learning_rate": 0.00013813526614877138, |
|
"loss": 0.7126, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.19425228493807015, |
|
"grad_norm": 0.17878217995166779, |
|
"learning_rate": 0.000137980874901352, |
|
"loss": 0.7, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.1945126767409362, |
|
"grad_norm": 0.17267367243766785, |
|
"learning_rate": 0.0001378263777861445, |
|
"loss": 0.6756, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.19477306854380225, |
|
"grad_norm": 0.174414724111557, |
|
"learning_rate": 0.00013767177523379382, |
|
"loss": 0.6948, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.1950334603466683, |
|
"grad_norm": 0.17386887967586517, |
|
"learning_rate": 0.00013751706767523865, |
|
"loss": 0.6848, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.19529385214953432, |
|
"grad_norm": 0.1732412576675415, |
|
"learning_rate": 0.0001373622555417104, |
|
"loss": 0.6914, |
|
"step": 7500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 19202, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.5187550322688e+18, |
|
"train_batch_size": 5, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|