|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 3694, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0027070925825663237, |
|
"grad_norm": 2.7909258086527373, |
|
"learning_rate": 4.999977397559719e-05, |
|
"loss": 1.4602, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.005414185165132647, |
|
"grad_norm": 2.9246419242894404, |
|
"learning_rate": 4.9999095906475714e-05, |
|
"loss": 1.4843, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.008121277747698972, |
|
"grad_norm": 2.60296538082041, |
|
"learning_rate": 4.999796580489639e-05, |
|
"loss": 1.3953, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.010828370330265295, |
|
"grad_norm": 2.427882849139683, |
|
"learning_rate": 4.9996383691293656e-05, |
|
"loss": 1.3886, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01353546291283162, |
|
"grad_norm": 2.362478801666027, |
|
"learning_rate": 4.999434959427522e-05, |
|
"loss": 1.4087, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.016242555495397944, |
|
"grad_norm": 2.0562878551243253, |
|
"learning_rate": 4.999186355062154e-05, |
|
"loss": 1.3925, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.018949648077964266, |
|
"grad_norm": 1.4997595683614129, |
|
"learning_rate": 4.998892560528511e-05, |
|
"loss": 1.4251, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02165674066053059, |
|
"grad_norm": 1.8396372795029057, |
|
"learning_rate": 4.9985535811389726e-05, |
|
"loss": 1.4299, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.024363833243096916, |
|
"grad_norm": 3.016445354360887, |
|
"learning_rate": 4.99816942302295e-05, |
|
"loss": 1.4179, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02707092582566324, |
|
"grad_norm": 2.285685249444506, |
|
"learning_rate": 4.997740093126769e-05, |
|
"loss": 1.4453, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02977801840822956, |
|
"grad_norm": 2.3733951587674054, |
|
"learning_rate": 4.997265599213554e-05, |
|
"loss": 1.3905, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.03248511099079589, |
|
"grad_norm": 3.8476837651749336, |
|
"learning_rate": 4.996745949863082e-05, |
|
"loss": 1.4418, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03519220357336221, |
|
"grad_norm": 1.6916145193283187, |
|
"learning_rate": 4.996181154471625e-05, |
|
"loss": 1.4637, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03789929615592853, |
|
"grad_norm": 1.6551273153572528, |
|
"learning_rate": 4.9955712232517896e-05, |
|
"loss": 1.422, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.040606388738494856, |
|
"grad_norm": 1.842180664471275, |
|
"learning_rate": 4.99491616723232e-05, |
|
"loss": 1.4768, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04331348132106118, |
|
"grad_norm": 2.8401045455632503, |
|
"learning_rate": 4.994215998257909e-05, |
|
"loss": 1.4626, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0460205739036275, |
|
"grad_norm": 2.137479556495621, |
|
"learning_rate": 4.993470728988979e-05, |
|
"loss": 1.4147, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04872766648619383, |
|
"grad_norm": 1.9073264544958397, |
|
"learning_rate": 4.992680372901454e-05, |
|
"loss": 1.4119, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.051434759068760154, |
|
"grad_norm": 1.5685293387125376, |
|
"learning_rate": 4.9918449442865126e-05, |
|
"loss": 1.428, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.05414185165132648, |
|
"grad_norm": 2.128149762022408, |
|
"learning_rate": 4.9909644582503366e-05, |
|
"loss": 1.4497, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.0568489442338928, |
|
"grad_norm": 1.8550357912320867, |
|
"learning_rate": 4.990038930713833e-05, |
|
"loss": 1.4661, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.05955603681645912, |
|
"grad_norm": 1.811618523801653, |
|
"learning_rate": 4.989068378412346e-05, |
|
"loss": 1.418, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.062263129399025445, |
|
"grad_norm": 1.691670012971929, |
|
"learning_rate": 4.9880528188953556e-05, |
|
"loss": 1.4446, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.06497022198159177, |
|
"grad_norm": 1.7432757441956248, |
|
"learning_rate": 4.986992270526161e-05, |
|
"loss": 1.3911, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.0676773145641581, |
|
"grad_norm": 1.709102114001383, |
|
"learning_rate": 4.9858867524815456e-05, |
|
"loss": 1.4912, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.07038440714672442, |
|
"grad_norm": 1.7869823709380956, |
|
"learning_rate": 4.984736284751436e-05, |
|
"loss": 1.454, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.07309149972929074, |
|
"grad_norm": 1.6458598808862888, |
|
"learning_rate": 4.983540888138534e-05, |
|
"loss": 1.4278, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.07579859231185707, |
|
"grad_norm": 2.4415366747183054, |
|
"learning_rate": 4.982300584257943e-05, |
|
"loss": 1.4511, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.07850568489442339, |
|
"grad_norm": 1.7712176478354567, |
|
"learning_rate": 4.98101539553678e-05, |
|
"loss": 1.4221, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.08121277747698971, |
|
"grad_norm": 1.408329894177912, |
|
"learning_rate": 4.979685345213766e-05, |
|
"loss": 1.4153, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.08391987005955603, |
|
"grad_norm": 2.8322848403176106, |
|
"learning_rate": 4.978310457338806e-05, |
|
"loss": 1.4544, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.08662696264212236, |
|
"grad_norm": 1.349216831725713, |
|
"learning_rate": 4.976890756772558e-05, |
|
"loss": 1.4232, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.08933405522468868, |
|
"grad_norm": 3.302218333832659, |
|
"learning_rate": 4.9754262691859786e-05, |
|
"loss": 1.4716, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.092041147807255, |
|
"grad_norm": 2.049533306517282, |
|
"learning_rate": 4.9739170210598646e-05, |
|
"loss": 1.469, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.09474824038982133, |
|
"grad_norm": 2.329244123317743, |
|
"learning_rate": 4.9723630396843665e-05, |
|
"loss": 1.4268, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.09745533297238766, |
|
"grad_norm": 1.6230256143926636, |
|
"learning_rate": 4.9707643531585026e-05, |
|
"loss": 1.44, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.10016242555495398, |
|
"grad_norm": 2.048460766648452, |
|
"learning_rate": 4.969120990389645e-05, |
|
"loss": 1.4171, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.10286951813752031, |
|
"grad_norm": 1.8949361232151039, |
|
"learning_rate": 4.967432981093002e-05, |
|
"loss": 1.4555, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.10557661072008663, |
|
"grad_norm": 1.454874136363247, |
|
"learning_rate": 4.965700355791075e-05, |
|
"loss": 1.4637, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.10828370330265295, |
|
"grad_norm": 1.6866618414734464, |
|
"learning_rate": 4.9639231458131155e-05, |
|
"loss": 1.4882, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.11099079588521928, |
|
"grad_norm": 1.767137745890727, |
|
"learning_rate": 4.962101383294546e-05, |
|
"loss": 1.4643, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.1136978884677856, |
|
"grad_norm": 1.4955900585918869, |
|
"learning_rate": 4.9602351011763916e-05, |
|
"loss": 1.4916, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.11640498105035192, |
|
"grad_norm": 1.4089383124912003, |
|
"learning_rate": 4.958324333204675e-05, |
|
"loss": 1.4558, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.11911207363291824, |
|
"grad_norm": 1.794803201497381, |
|
"learning_rate": 4.9563691139298115e-05, |
|
"loss": 1.4685, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.12181916621548457, |
|
"grad_norm": 4.657348417565155, |
|
"learning_rate": 4.954369478705984e-05, |
|
"loss": 1.4517, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.12452625879805089, |
|
"grad_norm": 1.727020331207215, |
|
"learning_rate": 4.952325463690499e-05, |
|
"loss": 1.4905, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.12723335138061723, |
|
"grad_norm": 1.677461362340949, |
|
"learning_rate": 4.95023710584314e-05, |
|
"loss": 1.4786, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.12994044396318355, |
|
"grad_norm": 1.3718142212853295, |
|
"learning_rate": 4.948104442925493e-05, |
|
"loss": 1.5028, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.13264753654574987, |
|
"grad_norm": 1.762804612117205, |
|
"learning_rate": 4.945927513500267e-05, |
|
"loss": 1.53, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.1353546291283162, |
|
"grad_norm": 2.616132608044088, |
|
"learning_rate": 4.943706356930596e-05, |
|
"loss": 1.4513, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.13806172171088252, |
|
"grad_norm": 1.5396425075226725, |
|
"learning_rate": 4.941441013379328e-05, |
|
"loss": 1.4783, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.14076881429344884, |
|
"grad_norm": 1.5753718924201408, |
|
"learning_rate": 4.939131523808295e-05, |
|
"loss": 1.469, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.14347590687601516, |
|
"grad_norm": 1.990561438026612, |
|
"learning_rate": 4.936777929977578e-05, |
|
"loss": 1.5288, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.1461829994585815, |
|
"grad_norm": 1.535027221106918, |
|
"learning_rate": 4.9343802744447485e-05, |
|
"loss": 1.4864, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.1488900920411478, |
|
"grad_norm": 1.5120704229625987, |
|
"learning_rate": 4.9319386005641e-05, |
|
"loss": 1.5288, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.15159718462371413, |
|
"grad_norm": 1.3272029647679429, |
|
"learning_rate": 4.9294529524858605e-05, |
|
"loss": 1.4571, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.15430427720628045, |
|
"grad_norm": 1.3970253346744186, |
|
"learning_rate": 4.926923375155402e-05, |
|
"loss": 1.4252, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.15701136978884678, |
|
"grad_norm": 1.8691453221841268, |
|
"learning_rate": 4.924349914312421e-05, |
|
"loss": 1.4326, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.1597184623714131, |
|
"grad_norm": 1.949334637283444, |
|
"learning_rate": 4.921732616490111e-05, |
|
"loss": 1.4056, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.16242555495397942, |
|
"grad_norm": 1.5924101773217003, |
|
"learning_rate": 4.919071529014329e-05, |
|
"loss": 1.4681, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.16513264753654575, |
|
"grad_norm": 1.6221588411171692, |
|
"learning_rate": 4.91636670000273e-05, |
|
"loss": 1.509, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.16783974011911207, |
|
"grad_norm": 1.5149301503130714, |
|
"learning_rate": 4.913618178363904e-05, |
|
"loss": 1.5005, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.1705468327016784, |
|
"grad_norm": 1.6644176831544628, |
|
"learning_rate": 4.9108260137964865e-05, |
|
"loss": 1.4638, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.17325392528424471, |
|
"grad_norm": 1.569403594423818, |
|
"learning_rate": 4.9079902567882665e-05, |
|
"loss": 1.4826, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.17596101786681104, |
|
"grad_norm": 1.3674634000662147, |
|
"learning_rate": 4.9051109586152634e-05, |
|
"loss": 1.4344, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.17866811044937736, |
|
"grad_norm": 1.1930764967763101, |
|
"learning_rate": 4.902188171340811e-05, |
|
"loss": 1.4396, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.18137520303194368, |
|
"grad_norm": 1.89545649756294, |
|
"learning_rate": 4.8992219478146094e-05, |
|
"loss": 1.4858, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.18408229561451, |
|
"grad_norm": 4.121840228113138, |
|
"learning_rate": 4.8962123416717706e-05, |
|
"loss": 1.4912, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.18678938819707633, |
|
"grad_norm": 1.292830871259127, |
|
"learning_rate": 4.893159407331849e-05, |
|
"loss": 1.5105, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.18949648077964265, |
|
"grad_norm": 2.0378465705988797, |
|
"learning_rate": 4.8900631999978565e-05, |
|
"loss": 1.4478, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.19220357336220897, |
|
"grad_norm": 1.4086096037731015, |
|
"learning_rate": 4.886923775655267e-05, |
|
"loss": 1.4748, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.19491066594477532, |
|
"grad_norm": 1.6471666031866425, |
|
"learning_rate": 4.883741191071001e-05, |
|
"loss": 1.4616, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.19761775852734165, |
|
"grad_norm": 1.3016146758942742, |
|
"learning_rate": 4.880515503792402e-05, |
|
"loss": 1.4502, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.20032485110990797, |
|
"grad_norm": 1.7755038959773146, |
|
"learning_rate": 4.8772467721461916e-05, |
|
"loss": 1.4929, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.2030319436924743, |
|
"grad_norm": 1.8173068291163943, |
|
"learning_rate": 4.8739350552374206e-05, |
|
"loss": 1.4514, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.20573903627504062, |
|
"grad_norm": 1.3957806556643733, |
|
"learning_rate": 4.870580412948397e-05, |
|
"loss": 1.4901, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.20844612885760694, |
|
"grad_norm": 1.7574648378127529, |
|
"learning_rate": 4.8671829059375996e-05, |
|
"loss": 1.4239, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.21115322144017326, |
|
"grad_norm": 1.988134441146335, |
|
"learning_rate": 4.863742595638589e-05, |
|
"loss": 1.4929, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.21386031402273958, |
|
"grad_norm": 1.6248880174612887, |
|
"learning_rate": 4.8602595442588926e-05, |
|
"loss": 1.5049, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.2165674066053059, |
|
"grad_norm": 1.4759465960672522, |
|
"learning_rate": 4.856733814778878e-05, |
|
"loss": 1.4617, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.21927449918787223, |
|
"grad_norm": 1.6391142091975623, |
|
"learning_rate": 4.853165470950617e-05, |
|
"loss": 1.496, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.22198159177043855, |
|
"grad_norm": 1.5875776432159403, |
|
"learning_rate": 4.8495545772967336e-05, |
|
"loss": 1.4696, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.22468868435300487, |
|
"grad_norm": 1.3488488116129105, |
|
"learning_rate": 4.8459011991092335e-05, |
|
"loss": 1.4865, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.2273957769355712, |
|
"grad_norm": 1.2491528956283275, |
|
"learning_rate": 4.842205402448326e-05, |
|
"loss": 1.4818, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.23010286951813752, |
|
"grad_norm": 1.4146633075540191, |
|
"learning_rate": 4.8384672541412314e-05, |
|
"loss": 1.4461, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.23280996210070384, |
|
"grad_norm": 2.8216763132669405, |
|
"learning_rate": 4.834686821780966e-05, |
|
"loss": 1.4905, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.23551705468327017, |
|
"grad_norm": 1.3681470128440785, |
|
"learning_rate": 4.83086417372513e-05, |
|
"loss": 1.4762, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.2382241472658365, |
|
"grad_norm": 1.6949909678615045, |
|
"learning_rate": 4.826999379094662e-05, |
|
"loss": 1.4548, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.2409312398484028, |
|
"grad_norm": 1.7298574479777804, |
|
"learning_rate": 4.823092507772593e-05, |
|
"loss": 1.4457, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.24363833243096913, |
|
"grad_norm": 1.3751587504284029, |
|
"learning_rate": 4.8191436304027846e-05, |
|
"loss": 1.4485, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.24634542501353546, |
|
"grad_norm": 1.98272939662037, |
|
"learning_rate": 4.815152818388647e-05, |
|
"loss": 1.4827, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.24905251759610178, |
|
"grad_norm": 1.48867070663095, |
|
"learning_rate": 4.811120143891855e-05, |
|
"loss": 1.4961, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.2517596101786681, |
|
"grad_norm": 1.6582119972239975, |
|
"learning_rate": 4.807045679831035e-05, |
|
"loss": 1.5125, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.25446670276123445, |
|
"grad_norm": 2.2411742740260143, |
|
"learning_rate": 4.8029294998804515e-05, |
|
"loss": 1.481, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.25717379534380075, |
|
"grad_norm": 3.1196395093384757, |
|
"learning_rate": 4.798771678468673e-05, |
|
"loss": 1.4766, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.2598808879263671, |
|
"grad_norm": 1.747388174474549, |
|
"learning_rate": 4.794572290777228e-05, |
|
"loss": 1.4664, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.2625879805089334, |
|
"grad_norm": 1.244225229661525, |
|
"learning_rate": 4.790331412739246e-05, |
|
"loss": 1.4881, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.26529507309149974, |
|
"grad_norm": 1.5361914036706255, |
|
"learning_rate": 4.786049121038078e-05, |
|
"loss": 1.4481, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.26800216567406604, |
|
"grad_norm": 2.004415613855818, |
|
"learning_rate": 4.78172549310592e-05, |
|
"loss": 1.4654, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.2707092582566324, |
|
"grad_norm": 1.6485133719660112, |
|
"learning_rate": 4.777360607122405e-05, |
|
"loss": 1.4569, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2734163508391987, |
|
"grad_norm": 1.3237479316055099, |
|
"learning_rate": 4.772954542013193e-05, |
|
"loss": 1.4616, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.27612344342176504, |
|
"grad_norm": 1.4682831465817014, |
|
"learning_rate": 4.7685073774485424e-05, |
|
"loss": 1.4312, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.27883053600433133, |
|
"grad_norm": 2.66972568412979, |
|
"learning_rate": 4.764019193841871e-05, |
|
"loss": 1.4638, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.2815376285868977, |
|
"grad_norm": 1.8874806530406238, |
|
"learning_rate": 4.7594900723482994e-05, |
|
"loss": 1.4342, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.284244721169464, |
|
"grad_norm": 1.7012798675260252, |
|
"learning_rate": 4.7549200948631875e-05, |
|
"loss": 1.4808, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.2869518137520303, |
|
"grad_norm": 1.5041238189640276, |
|
"learning_rate": 4.750309344020649e-05, |
|
"loss": 1.477, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.2896589063345966, |
|
"grad_norm": 1.445182392004961, |
|
"learning_rate": 4.74565790319206e-05, |
|
"loss": 1.4684, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.292365998917163, |
|
"grad_norm": 1.5350175117888023, |
|
"learning_rate": 4.7409658564845526e-05, |
|
"loss": 1.4786, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.29507309149972927, |
|
"grad_norm": 1.6518274612672268, |
|
"learning_rate": 4.73623328873949e-05, |
|
"loss": 1.4819, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.2977801840822956, |
|
"grad_norm": 1.7095589003674432, |
|
"learning_rate": 4.731460285530936e-05, |
|
"loss": 1.4976, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.3004872766648619, |
|
"grad_norm": 1.723367767756465, |
|
"learning_rate": 4.726646933164108e-05, |
|
"loss": 1.4504, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.30319436924742826, |
|
"grad_norm": 1.9040390036164423, |
|
"learning_rate": 4.7217933186738114e-05, |
|
"loss": 1.4579, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.30590146182999456, |
|
"grad_norm": 1.2294419736565405, |
|
"learning_rate": 4.716899529822874e-05, |
|
"loss": 1.5031, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.3086085544125609, |
|
"grad_norm": 2.0118944901286997, |
|
"learning_rate": 4.7119656551005505e-05, |
|
"loss": 1.4532, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.31131564699512726, |
|
"grad_norm": 1.6217590845134082, |
|
"learning_rate": 4.706991783720928e-05, |
|
"loss": 1.4871, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.31402273957769355, |
|
"grad_norm": 1.509260025154433, |
|
"learning_rate": 4.701978005621311e-05, |
|
"loss": 1.4464, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.3167298321602599, |
|
"grad_norm": 1.5171464183166403, |
|
"learning_rate": 4.696924411460596e-05, |
|
"loss": 1.4486, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.3194369247428262, |
|
"grad_norm": 1.2572808716835593, |
|
"learning_rate": 4.6918310926176306e-05, |
|
"loss": 1.4458, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.32214401732539255, |
|
"grad_norm": 1.7849046370327453, |
|
"learning_rate": 4.686698141189564e-05, |
|
"loss": 1.4404, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.32485110990795885, |
|
"grad_norm": 1.914525739200728, |
|
"learning_rate": 4.6815256499901775e-05, |
|
"loss": 1.4898, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.3275582024905252, |
|
"grad_norm": 1.611833069659245, |
|
"learning_rate": 4.6763137125482104e-05, |
|
"loss": 1.4847, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.3302652950730915, |
|
"grad_norm": 1.753641811163595, |
|
"learning_rate": 4.671062423105667e-05, |
|
"loss": 1.433, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.33297238765565784, |
|
"grad_norm": 1.1996803367158615, |
|
"learning_rate": 4.66577187661611e-05, |
|
"loss": 1.4533, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.33567948023822414, |
|
"grad_norm": 1.554114829537543, |
|
"learning_rate": 4.660442168742951e-05, |
|
"loss": 1.4708, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.3383865728207905, |
|
"grad_norm": 6.405248874344029, |
|
"learning_rate": 4.6550733958577115e-05, |
|
"loss": 1.4721, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.3410936654033568, |
|
"grad_norm": 1.5464469028099914, |
|
"learning_rate": 4.6496656550382864e-05, |
|
"loss": 1.4524, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.34380075798592313, |
|
"grad_norm": 1.5438246148321, |
|
"learning_rate": 4.6442190440671886e-05, |
|
"loss": 1.5217, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.34650785056848943, |
|
"grad_norm": 1.6459917338592895, |
|
"learning_rate": 4.638733661429775e-05, |
|
"loss": 1.431, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.3492149431510558, |
|
"grad_norm": 1.7332054776628096, |
|
"learning_rate": 4.633209606312473e-05, |
|
"loss": 1.4896, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.3519220357336221, |
|
"grad_norm": 1.7861896691639296, |
|
"learning_rate": 4.6276469786009846e-05, |
|
"loss": 1.4587, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.3546291283161884, |
|
"grad_norm": 1.380983484664312, |
|
"learning_rate": 4.622045878878477e-05, |
|
"loss": 1.4928, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.3573362208987547, |
|
"grad_norm": 1.6722892346510752, |
|
"learning_rate": 4.616406408423769e-05, |
|
"loss": 1.3885, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.36004331348132107, |
|
"grad_norm": 1.302543220749753, |
|
"learning_rate": 4.610728669209494e-05, |
|
"loss": 1.4281, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.36275040606388737, |
|
"grad_norm": 1.3027170210142245, |
|
"learning_rate": 4.605012763900264e-05, |
|
"loss": 1.4695, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.3654574986464537, |
|
"grad_norm": 1.604586924613264, |
|
"learning_rate": 4.599258795850803e-05, |
|
"loss": 1.449, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.36816459122902, |
|
"grad_norm": 1.2848250120256988, |
|
"learning_rate": 4.593466869104087e-05, |
|
"loss": 1.41, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.37087168381158636, |
|
"grad_norm": 1.5746776369025801, |
|
"learning_rate": 4.5876370883894606e-05, |
|
"loss": 1.5164, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.37357877639415266, |
|
"grad_norm": 1.8869095435477306, |
|
"learning_rate": 4.581769559120738e-05, |
|
"loss": 1.4649, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.376285868976719, |
|
"grad_norm": 1.717413158097039, |
|
"learning_rate": 4.575864387394304e-05, |
|
"loss": 1.4801, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.3789929615592853, |
|
"grad_norm": 2.539587222307521, |
|
"learning_rate": 4.569921679987192e-05, |
|
"loss": 1.4385, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.38170005414185165, |
|
"grad_norm": 1.2980114991302223, |
|
"learning_rate": 4.563941544355152e-05, |
|
"loss": 1.4934, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.38440714672441795, |
|
"grad_norm": 1.6715775537496502, |
|
"learning_rate": 4.5579240886307136e-05, |
|
"loss": 1.4538, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.3871142393069843, |
|
"grad_norm": 2.709253749500051, |
|
"learning_rate": 4.551869421621221e-05, |
|
"loss": 1.4554, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.38982133188955065, |
|
"grad_norm": 1.5064962735011314, |
|
"learning_rate": 4.5457776528068743e-05, |
|
"loss": 1.5152, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.39252842447211694, |
|
"grad_norm": 1.4866135650495016, |
|
"learning_rate": 4.539648892338747e-05, |
|
"loss": 1.4701, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.3952355170546833, |
|
"grad_norm": 1.425267538841413, |
|
"learning_rate": 4.533483251036792e-05, |
|
"loss": 1.4794, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.3979426096372496, |
|
"grad_norm": 1.4920137928311075, |
|
"learning_rate": 4.527280840387842e-05, |
|
"loss": 1.4884, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.40064970221981594, |
|
"grad_norm": 1.4659663002416385, |
|
"learning_rate": 4.5210417725435896e-05, |
|
"loss": 1.4436, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.40335679480238223, |
|
"grad_norm": 1.4406570198350586, |
|
"learning_rate": 4.514766160318561e-05, |
|
"loss": 1.473, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.4060638873849486, |
|
"grad_norm": 1.4236232583730681, |
|
"learning_rate": 4.508454117188077e-05, |
|
"loss": 1.441, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.4087709799675149, |
|
"grad_norm": 1.658008187554401, |
|
"learning_rate": 4.5021057572862e-05, |
|
"loss": 1.4656, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.41147807255008123, |
|
"grad_norm": 1.5768571336837218, |
|
"learning_rate": 4.495721195403671e-05, |
|
"loss": 1.4681, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.4141851651326475, |
|
"grad_norm": 1.770673573444082, |
|
"learning_rate": 4.4893005469858316e-05, |
|
"loss": 1.42, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.4168922577152139, |
|
"grad_norm": 1.3645638511742517, |
|
"learning_rate": 4.482843928130541e-05, |
|
"loss": 1.4911, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.41959935029778017, |
|
"grad_norm": 1.6871925830755095, |
|
"learning_rate": 4.476351455586072e-05, |
|
"loss": 1.5269, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.4223064428803465, |
|
"grad_norm": 1.4062614507732203, |
|
"learning_rate": 4.469823246749003e-05, |
|
"loss": 1.445, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.4250135354629128, |
|
"grad_norm": 1.732815716768682, |
|
"learning_rate": 4.4632594196620945e-05, |
|
"loss": 1.5041, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.42772062804547917, |
|
"grad_norm": 1.2938163974151866, |
|
"learning_rate": 4.456660093012155e-05, |
|
"loss": 1.4565, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.43042772062804546, |
|
"grad_norm": 1.4591593663238542, |
|
"learning_rate": 4.4500253861278926e-05, |
|
"loss": 1.457, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.4331348132106118, |
|
"grad_norm": 1.619966018635216, |
|
"learning_rate": 4.443355418977761e-05, |
|
"loss": 1.4848, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.4358419057931781, |
|
"grad_norm": 1.5713970862575257, |
|
"learning_rate": 4.436650312167787e-05, |
|
"loss": 1.4311, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.43854899837574446, |
|
"grad_norm": 4.836628503687221, |
|
"learning_rate": 4.429910186939392e-05, |
|
"loss": 1.5039, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.44125609095831075, |
|
"grad_norm": 1.5617479359446638, |
|
"learning_rate": 4.4231351651671985e-05, |
|
"loss": 1.4387, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.4439631835408771, |
|
"grad_norm": 4.127569209273957, |
|
"learning_rate": 4.4163253693568266e-05, |
|
"loss": 1.4654, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.4466702761234434, |
|
"grad_norm": 1.9045020579112555, |
|
"learning_rate": 4.409480922642677e-05, |
|
"loss": 1.403, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.44937736870600975, |
|
"grad_norm": 1.444246465499322, |
|
"learning_rate": 4.40260194878571e-05, |
|
"loss": 1.4568, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.45208446128857604, |
|
"grad_norm": 1.3435184719673208, |
|
"learning_rate": 4.395688572171203e-05, |
|
"loss": 1.5321, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.4547915538711424, |
|
"grad_norm": 1.8192617831889093, |
|
"learning_rate": 4.3887409178064995e-05, |
|
"loss": 1.4693, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.4574986464537087, |
|
"grad_norm": 1.3323020396326049, |
|
"learning_rate": 4.3817591113187563e-05, |
|
"loss": 1.4333, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.46020573903627504, |
|
"grad_norm": 1.2996234776965605, |
|
"learning_rate": 4.374743278952662e-05, |
|
"loss": 1.4034, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.4629128316188414, |
|
"grad_norm": 1.4381391819706373, |
|
"learning_rate": 4.367693547568163e-05, |
|
"loss": 1.4924, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.4656199242014077, |
|
"grad_norm": 1.8146691731599425, |
|
"learning_rate": 4.360610044638167e-05, |
|
"loss": 1.4284, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.46832701678397404, |
|
"grad_norm": 2.968939444897473, |
|
"learning_rate": 4.353492898246234e-05, |
|
"loss": 1.4586, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.47103410936654033, |
|
"grad_norm": 2.510758529664001, |
|
"learning_rate": 4.346342237084267e-05, |
|
"loss": 1.4913, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.4737412019491067, |
|
"grad_norm": 1.6561919074934996, |
|
"learning_rate": 4.339158190450176e-05, |
|
"loss": 1.474, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.476448294531673, |
|
"grad_norm": 1.6475904490305402, |
|
"learning_rate": 4.331940888245552e-05, |
|
"loss": 1.4331, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.47915538711423933, |
|
"grad_norm": 1.5072770205173374, |
|
"learning_rate": 4.3246904609733074e-05, |
|
"loss": 1.4949, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.4818624796968056, |
|
"grad_norm": 1.3220720078913577, |
|
"learning_rate": 4.317407039735323e-05, |
|
"loss": 1.4037, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.484569572279372, |
|
"grad_norm": 2.630264685866369, |
|
"learning_rate": 4.310090756230072e-05, |
|
"loss": 1.4366, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.48727666486193827, |
|
"grad_norm": 1.8523936235855012, |
|
"learning_rate": 4.3027417427502435e-05, |
|
"loss": 1.4757, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.4899837574445046, |
|
"grad_norm": 1.6259780791806493, |
|
"learning_rate": 4.2953601321803494e-05, |
|
"loss": 1.4554, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.4926908500270709, |
|
"grad_norm": 1.3829948059505168, |
|
"learning_rate": 4.287946057994318e-05, |
|
"loss": 1.444, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.49539794260963727, |
|
"grad_norm": 1.472490908240983, |
|
"learning_rate": 4.2804996542530854e-05, |
|
"loss": 1.4799, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.49810503519220356, |
|
"grad_norm": 1.4418412770727913, |
|
"learning_rate": 4.273021055602168e-05, |
|
"loss": 1.4539, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.5008121277747699, |
|
"grad_norm": 1.3579847745967637, |
|
"learning_rate": 4.265510397269229e-05, |
|
"loss": 1.4866, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.5035192203573362, |
|
"grad_norm": 1.2861587741021074, |
|
"learning_rate": 4.2579678150616344e-05, |
|
"loss": 1.4509, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.5062263129399025, |
|
"grad_norm": 1.69680502271894, |
|
"learning_rate": 4.2503934453639945e-05, |
|
"loss": 1.4417, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.5089334055224689, |
|
"grad_norm": 1.374930502329713, |
|
"learning_rate": 4.2427874251357e-05, |
|
"loss": 1.4459, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.5116404981050352, |
|
"grad_norm": 1.6333223217571196, |
|
"learning_rate": 4.235149891908447e-05, |
|
"loss": 1.4187, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.5143475906876015, |
|
"grad_norm": 1.2550594285330077, |
|
"learning_rate": 4.2274809837837444e-05, |
|
"loss": 1.4777, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.5170546832701678, |
|
"grad_norm": 1.7713884342868287, |
|
"learning_rate": 4.219780839430424e-05, |
|
"loss": 1.4742, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.5197617758527342, |
|
"grad_norm": 1.8526614078728505, |
|
"learning_rate": 4.212049598082126e-05, |
|
"loss": 1.4639, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.5224688684353005, |
|
"grad_norm": 1.7077160912171838, |
|
"learning_rate": 4.204287399534791e-05, |
|
"loss": 1.4678, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.5251759610178668, |
|
"grad_norm": 1.7342679697841041, |
|
"learning_rate": 4.196494384144119e-05, |
|
"loss": 1.4587, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.5278830536004331, |
|
"grad_norm": 1.3760999544704575, |
|
"learning_rate": 4.188670692823042e-05, |
|
"loss": 1.4364, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.5305901461829995, |
|
"grad_norm": 1.4927106037863223, |
|
"learning_rate": 4.1808164670391747e-05, |
|
"loss": 1.4636, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.5332972387655658, |
|
"grad_norm": 1.3148570909335389, |
|
"learning_rate": 4.172931848812251e-05, |
|
"loss": 1.4781, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.5360043313481321, |
|
"grad_norm": 1.3941339635980279, |
|
"learning_rate": 4.1650169807115623e-05, |
|
"loss": 1.4214, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.5387114239306985, |
|
"grad_norm": 1.1834123922199669, |
|
"learning_rate": 4.157072005853373e-05, |
|
"loss": 1.4226, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.5414185165132648, |
|
"grad_norm": 1.564745160522187, |
|
"learning_rate": 4.149097067898342e-05, |
|
"loss": 1.4631, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.5441256090958311, |
|
"grad_norm": 1.5351238663722413, |
|
"learning_rate": 4.141092311048914e-05, |
|
"loss": 1.4231, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.5468327016783974, |
|
"grad_norm": 1.4673872230064178, |
|
"learning_rate": 4.133057880046721e-05, |
|
"loss": 1.4308, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.5495397942609638, |
|
"grad_norm": 2.0555674903653576, |
|
"learning_rate": 4.1249939201699605e-05, |
|
"loss": 1.4054, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.5522468868435301, |
|
"grad_norm": 1.4275260085968484, |
|
"learning_rate": 4.116900577230769e-05, |
|
"loss": 1.4171, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.5549539794260964, |
|
"grad_norm": 1.2596642799013085, |
|
"learning_rate": 4.108777997572588e-05, |
|
"loss": 1.4064, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.5576610720086627, |
|
"grad_norm": 1.475728319316803, |
|
"learning_rate": 4.100626328067515e-05, |
|
"loss": 1.451, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.5603681645912291, |
|
"grad_norm": 1.4445720383429121, |
|
"learning_rate": 4.0924457161136465e-05, |
|
"loss": 1.4357, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.5630752571737954, |
|
"grad_norm": 1.277001561017148, |
|
"learning_rate": 4.0842363096324186e-05, |
|
"loss": 1.4152, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.5657823497563617, |
|
"grad_norm": 4.077914450657582, |
|
"learning_rate": 4.075998257065927e-05, |
|
"loss": 1.4219, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.568489442338928, |
|
"grad_norm": 1.4881560211628038, |
|
"learning_rate": 4.067731707374245e-05, |
|
"loss": 1.4558, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.5711965349214944, |
|
"grad_norm": 1.8102222242604538, |
|
"learning_rate": 4.0594368100327276e-05, |
|
"loss": 1.4418, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.5739036275040607, |
|
"grad_norm": 1.5342688729092908, |
|
"learning_rate": 4.051113715029314e-05, |
|
"loss": 1.4186, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.576610720086627, |
|
"grad_norm": 1.4776107653456874, |
|
"learning_rate": 4.0427625728618094e-05, |
|
"loss": 1.4241, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.5793178126691932, |
|
"grad_norm": 1.8005823424782537, |
|
"learning_rate": 4.034383534535169e-05, |
|
"loss": 1.4604, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.5820249052517596, |
|
"grad_norm": 1.4602346950120104, |
|
"learning_rate": 4.0259767515587607e-05, |
|
"loss": 1.4164, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.584731997834326, |
|
"grad_norm": 2.2063404920756025, |
|
"learning_rate": 4.017542375943635e-05, |
|
"loss": 1.4546, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.5874390904168922, |
|
"grad_norm": 1.3913645582438086, |
|
"learning_rate": 4.009080560199767e-05, |
|
"loss": 1.4499, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.5901461829994585, |
|
"grad_norm": 1.4511449754518972, |
|
"learning_rate": 4.0005914573333076e-05, |
|
"loss": 1.3874, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.5928532755820249, |
|
"grad_norm": 1.374243913815498, |
|
"learning_rate": 3.992075220843806e-05, |
|
"loss": 1.3881, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.5955603681645912, |
|
"grad_norm": 1.4550895039236171, |
|
"learning_rate": 3.983532004721446e-05, |
|
"loss": 1.4356, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.5982674607471575, |
|
"grad_norm": 2.8598201176092597, |
|
"learning_rate": 3.974961963444252e-05, |
|
"loss": 1.4194, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.6009745533297238, |
|
"grad_norm": 1.2900018758134322, |
|
"learning_rate": 3.9663652519753016e-05, |
|
"loss": 1.4254, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.6036816459122902, |
|
"grad_norm": 1.3550238391234248, |
|
"learning_rate": 3.95774202575992e-05, |
|
"loss": 1.4265, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.6063887384948565, |
|
"grad_norm": 1.2357167108126608, |
|
"learning_rate": 3.9490924407228725e-05, |
|
"loss": 1.439, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.6090958310774228, |
|
"grad_norm": 1.7838096381387492, |
|
"learning_rate": 3.940416653265542e-05, |
|
"loss": 1.3754, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.6118029236599891, |
|
"grad_norm": 1.5918950572416704, |
|
"learning_rate": 3.931714820263104e-05, |
|
"loss": 1.4193, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.6145100162425555, |
|
"grad_norm": 1.9008726278943935, |
|
"learning_rate": 3.922987099061685e-05, |
|
"loss": 1.4014, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.6172171088251218, |
|
"grad_norm": 1.5355669123167321, |
|
"learning_rate": 3.914233647475525e-05, |
|
"loss": 1.4242, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.6199242014076881, |
|
"grad_norm": 8.645566079065198, |
|
"learning_rate": 3.9054546237841156e-05, |
|
"loss": 1.3978, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.6226312939902545, |
|
"grad_norm": 1.4299000700088789, |
|
"learning_rate": 3.896650186729345e-05, |
|
"loss": 1.4444, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.6253383865728208, |
|
"grad_norm": 1.5568306708216317, |
|
"learning_rate": 3.8878204955126224e-05, |
|
"loss": 1.4535, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.6280454791553871, |
|
"grad_norm": 1.4298727206313604, |
|
"learning_rate": 3.878965709792003e-05, |
|
"loss": 1.3964, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.6307525717379534, |
|
"grad_norm": 1.9081562507364116, |
|
"learning_rate": 3.8700859896793004e-05, |
|
"loss": 1.4637, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.6334596643205198, |
|
"grad_norm": 1.7135745696260376, |
|
"learning_rate": 3.861181495737187e-05, |
|
"loss": 1.4429, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.6361667569030861, |
|
"grad_norm": 1.5360272794198329, |
|
"learning_rate": 3.852252388976299e-05, |
|
"loss": 1.4268, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.6388738494856524, |
|
"grad_norm": 1.2103422788055682, |
|
"learning_rate": 3.843298830852317e-05, |
|
"loss": 1.4469, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.6415809420682187, |
|
"grad_norm": 1.2503359557443374, |
|
"learning_rate": 3.834320983263052e-05, |
|
"loss": 1.4208, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.6442880346507851, |
|
"grad_norm": 1.704839306648677, |
|
"learning_rate": 3.8253190085455136e-05, |
|
"loss": 1.4205, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.6469951272333514, |
|
"grad_norm": 1.7016731029603525, |
|
"learning_rate": 3.816293069472981e-05, |
|
"loss": 1.4311, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.6497022198159177, |
|
"grad_norm": 1.4362792263340989, |
|
"learning_rate": 3.8072433292520526e-05, |
|
"loss": 1.38, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.652409312398484, |
|
"grad_norm": 1.5654681480694452, |
|
"learning_rate": 3.7981699515196975e-05, |
|
"loss": 1.4094, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.6551164049810504, |
|
"grad_norm": 1.4659564972472934, |
|
"learning_rate": 3.789073100340298e-05, |
|
"loss": 1.409, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.6578234975636167, |
|
"grad_norm": 7.417972847111027, |
|
"learning_rate": 3.779952940202686e-05, |
|
"loss": 1.4294, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.660530590146183, |
|
"grad_norm": 1.3497894617366657, |
|
"learning_rate": 3.770809636017158e-05, |
|
"loss": 1.407, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.6632376827287493, |
|
"grad_norm": 1.3081753915783962, |
|
"learning_rate": 3.7616433531125035e-05, |
|
"loss": 1.3756, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.6659447753113157, |
|
"grad_norm": 1.527466315347287, |
|
"learning_rate": 3.752454257233013e-05, |
|
"loss": 1.4118, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.668651867893882, |
|
"grad_norm": 1.6627023176259257, |
|
"learning_rate": 3.7432425145354804e-05, |
|
"loss": 1.4329, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.6713589604764483, |
|
"grad_norm": 1.309294160787172, |
|
"learning_rate": 3.734008291586195e-05, |
|
"loss": 1.3788, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.6740660530590146, |
|
"grad_norm": 1.4372591663740029, |
|
"learning_rate": 3.724751755357937e-05, |
|
"loss": 1.3044, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.676773145641581, |
|
"grad_norm": 1.8886812778978754, |
|
"learning_rate": 3.71547307322695e-05, |
|
"loss": 1.3798, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.6794802382241473, |
|
"grad_norm": 1.9753319009276904, |
|
"learning_rate": 3.7061724129699233e-05, |
|
"loss": 1.4362, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.6821873308067136, |
|
"grad_norm": 1.5870592037312612, |
|
"learning_rate": 3.6968499427609493e-05, |
|
"loss": 1.434, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.6848944233892799, |
|
"grad_norm": 1.2460580041038836, |
|
"learning_rate": 3.6875058311684896e-05, |
|
"loss": 1.3991, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.6876015159718463, |
|
"grad_norm": 1.984255011434617, |
|
"learning_rate": 3.6781402471523244e-05, |
|
"loss": 1.426, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.6903086085544126, |
|
"grad_norm": 1.7368812988388407, |
|
"learning_rate": 3.6687533600604955e-05, |
|
"loss": 1.3894, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.6930157011369789, |
|
"grad_norm": 1.4619686462990809, |
|
"learning_rate": 3.659345339626247e-05, |
|
"loss": 1.4415, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.6957227937195453, |
|
"grad_norm": 1.2916377265272263, |
|
"learning_rate": 3.649916355964956e-05, |
|
"loss": 1.4081, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.6984298863021116, |
|
"grad_norm": 1.4083989609601546, |
|
"learning_rate": 3.640466579571052e-05, |
|
"loss": 1.3883, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.7011369788846779, |
|
"grad_norm": 1.7091470425749693, |
|
"learning_rate": 3.630996181314944e-05, |
|
"loss": 1.4253, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.7038440714672441, |
|
"grad_norm": 1.3001138215713586, |
|
"learning_rate": 3.621505332439918e-05, |
|
"loss": 1.4529, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.7065511640498106, |
|
"grad_norm": 1.7216936795211795, |
|
"learning_rate": 3.611994204559051e-05, |
|
"loss": 1.4673, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.7092582566323768, |
|
"grad_norm": 1.781463187900956, |
|
"learning_rate": 3.602462969652102e-05, |
|
"loss": 1.4071, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.7119653492149431, |
|
"grad_norm": 1.3210728327580619, |
|
"learning_rate": 3.5929118000624064e-05, |
|
"loss": 1.394, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.7146724417975094, |
|
"grad_norm": 1.7986279315081266, |
|
"learning_rate": 3.5833408684937563e-05, |
|
"loss": 1.4134, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.7173795343800758, |
|
"grad_norm": 1.8124093789053668, |
|
"learning_rate": 3.573750348007278e-05, |
|
"loss": 1.395, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.7200866269626421, |
|
"grad_norm": 1.3733544619692244, |
|
"learning_rate": 3.564140412018306e-05, |
|
"loss": 1.4952, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.7227937195452084, |
|
"grad_norm": 1.1625577227023642, |
|
"learning_rate": 3.554511234293243e-05, |
|
"loss": 1.3995, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.7255008121277747, |
|
"grad_norm": 1.1967978911566575, |
|
"learning_rate": 3.54486298894642e-05, |
|
"loss": 1.3751, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.7282079047103411, |
|
"grad_norm": 1.5901060329868977, |
|
"learning_rate": 3.5351958504369504e-05, |
|
"loss": 1.3714, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.7309149972929074, |
|
"grad_norm": 1.4331430962829053, |
|
"learning_rate": 3.525509993565569e-05, |
|
"loss": 1.4538, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.7336220898754737, |
|
"grad_norm": 1.578052835087518, |
|
"learning_rate": 3.5158055934714784e-05, |
|
"loss": 1.4321, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.73632918245804, |
|
"grad_norm": 1.4900021275863136, |
|
"learning_rate": 3.506082825629176e-05, |
|
"loss": 1.4224, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.7390362750406064, |
|
"grad_norm": 1.803432159658249, |
|
"learning_rate": 3.496341865845286e-05, |
|
"loss": 1.4066, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.7417433676231727, |
|
"grad_norm": 1.8906057321665828, |
|
"learning_rate": 3.4865828902553786e-05, |
|
"loss": 1.3412, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.744450460205739, |
|
"grad_norm": 1.5022839535897015, |
|
"learning_rate": 3.476806075320784e-05, |
|
"loss": 1.4204, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.7471575527883053, |
|
"grad_norm": 1.2487314478311355, |
|
"learning_rate": 3.467011597825402e-05, |
|
"loss": 1.3934, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.7498646453708717, |
|
"grad_norm": 1.546439548647951, |
|
"learning_rate": 3.4571996348725066e-05, |
|
"loss": 1.3971, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.752571737953438, |
|
"grad_norm": 1.2557741585940914, |
|
"learning_rate": 3.447370363881543e-05, |
|
"loss": 1.4221, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.7552788305360043, |
|
"grad_norm": 1.3960693603390584, |
|
"learning_rate": 3.437523962584923e-05, |
|
"loss": 1.4134, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.7579859231185706, |
|
"grad_norm": 1.5039396517235502, |
|
"learning_rate": 3.427660609024799e-05, |
|
"loss": 1.3927, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.760693015701137, |
|
"grad_norm": 1.0815563013041047, |
|
"learning_rate": 3.417780481549863e-05, |
|
"loss": 1.3605, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.7634001082837033, |
|
"grad_norm": 1.3586955443715294, |
|
"learning_rate": 3.407883758812106e-05, |
|
"loss": 1.4041, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.7661072008662696, |
|
"grad_norm": 1.0920930772742923, |
|
"learning_rate": 3.397970619763597e-05, |
|
"loss": 1.428, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.7688142934488359, |
|
"grad_norm": 1.3785331176013547, |
|
"learning_rate": 3.388041243653242e-05, |
|
"loss": 1.4384, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.7715213860314023, |
|
"grad_norm": 1.3052185999792532, |
|
"learning_rate": 3.3780958100235445e-05, |
|
"loss": 1.3429, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.7742284786139686, |
|
"grad_norm": 2.042603724777467, |
|
"learning_rate": 3.3681344987073615e-05, |
|
"loss": 1.3328, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.7769355711965349, |
|
"grad_norm": 1.441878338125671, |
|
"learning_rate": 3.358157489824648e-05, |
|
"loss": 1.4065, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.7796426637791013, |
|
"grad_norm": 1.35241437286716, |
|
"learning_rate": 3.348164963779202e-05, |
|
"loss": 1.4298, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.7823497563616676, |
|
"grad_norm": 1.6143507738425422, |
|
"learning_rate": 3.3381571012554024e-05, |
|
"loss": 1.3649, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.7850568489442339, |
|
"grad_norm": 1.4514578355155894, |
|
"learning_rate": 3.328134083214941e-05, |
|
"loss": 1.3838, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.7877639415268002, |
|
"grad_norm": 1.515687318078472, |
|
"learning_rate": 3.318096090893551e-05, |
|
"loss": 1.4035, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.7904710341093666, |
|
"grad_norm": 1.2884021166011788, |
|
"learning_rate": 3.308043305797729e-05, |
|
"loss": 1.3559, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.7931781266919329, |
|
"grad_norm": 1.4337183263775037, |
|
"learning_rate": 3.297975909701457e-05, |
|
"loss": 1.4059, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.7958852192744992, |
|
"grad_norm": 1.5371766509521272, |
|
"learning_rate": 3.2878940846429085e-05, |
|
"loss": 1.3774, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.7985923118570655, |
|
"grad_norm": 1.3405567316289324, |
|
"learning_rate": 3.277798012921163e-05, |
|
"loss": 1.3904, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.8012994044396319, |
|
"grad_norm": 1.4555652196907567, |
|
"learning_rate": 3.2676878770929075e-05, |
|
"loss": 1.4146, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.8040064970221982, |
|
"grad_norm": 1.4152140878656931, |
|
"learning_rate": 3.257563859969134e-05, |
|
"loss": 1.4307, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.8067135896047645, |
|
"grad_norm": 1.4286723786741538, |
|
"learning_rate": 3.2474261446118384e-05, |
|
"loss": 1.3802, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.8094206821873308, |
|
"grad_norm": 1.7253495020201928, |
|
"learning_rate": 3.237274914330704e-05, |
|
"loss": 1.3832, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.8121277747698972, |
|
"grad_norm": 1.42537117722243, |
|
"learning_rate": 3.227110352679791e-05, |
|
"loss": 1.4196, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.8148348673524635, |
|
"grad_norm": 1.6839610738971422, |
|
"learning_rate": 3.216932643454219e-05, |
|
"loss": 1.3631, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.8175419599350298, |
|
"grad_norm": 1.971059453029315, |
|
"learning_rate": 3.2067419706868404e-05, |
|
"loss": 1.3943, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.8202490525175961, |
|
"grad_norm": 1.4912077914008548, |
|
"learning_rate": 3.196538518644911e-05, |
|
"loss": 1.3607, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.8229561451001625, |
|
"grad_norm": 2.642611457217887, |
|
"learning_rate": 3.186322471826765e-05, |
|
"loss": 1.3885, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.8256632376827288, |
|
"grad_norm": 2.0970669091185052, |
|
"learning_rate": 3.176094014958473e-05, |
|
"loss": 1.4174, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.828370330265295, |
|
"grad_norm": 1.6411214953570172, |
|
"learning_rate": 3.1658533329905034e-05, |
|
"loss": 1.3908, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.8310774228478613, |
|
"grad_norm": 1.4488420914686069, |
|
"learning_rate": 3.155600611094377e-05, |
|
"loss": 1.3525, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.8337845154304278, |
|
"grad_norm": 1.3921616791086928, |
|
"learning_rate": 3.145336034659322e-05, |
|
"loss": 1.3815, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.836491608012994, |
|
"grad_norm": 1.3485960002369286, |
|
"learning_rate": 3.135059789288919e-05, |
|
"loss": 1.3718, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.8391987005955603, |
|
"grad_norm": 3.0257814626066333, |
|
"learning_rate": 3.1247720607977465e-05, |
|
"loss": 1.3773, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.8419057931781266, |
|
"grad_norm": 1.607580731614463, |
|
"learning_rate": 3.11447303520802e-05, |
|
"loss": 1.3527, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.844612885760693, |
|
"grad_norm": 1.370395503929159, |
|
"learning_rate": 3.1041628987462256e-05, |
|
"loss": 1.3978, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.8473199783432593, |
|
"grad_norm": 1.6192384678646978, |
|
"learning_rate": 3.0938418378397615e-05, |
|
"loss": 1.3757, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.8500270709258256, |
|
"grad_norm": 1.4273589649993463, |
|
"learning_rate": 3.0835100391135555e-05, |
|
"loss": 1.3831, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.852734163508392, |
|
"grad_norm": 1.35446316177469, |
|
"learning_rate": 3.0731676893866995e-05, |
|
"loss": 1.3877, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.8554412560909583, |
|
"grad_norm": 2.163498528349308, |
|
"learning_rate": 3.062814975669067e-05, |
|
"loss": 1.3685, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.8581483486735246, |
|
"grad_norm": 1.3900238288203155, |
|
"learning_rate": 3.052452085157933e-05, |
|
"loss": 1.358, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.8608554412560909, |
|
"grad_norm": 1.216239709612049, |
|
"learning_rate": 3.042079205234589e-05, |
|
"loss": 1.3548, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.8635625338386573, |
|
"grad_norm": 1.349220629569809, |
|
"learning_rate": 3.0316965234609535e-05, |
|
"loss": 1.3874, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.8662696264212236, |
|
"grad_norm": 1.1250681117090537, |
|
"learning_rate": 3.021304227576182e-05, |
|
"loss": 1.3876, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.8689767190037899, |
|
"grad_norm": 1.3913714515749762, |
|
"learning_rate": 3.0109025054932744e-05, |
|
"loss": 1.347, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.8716838115863562, |
|
"grad_norm": 1.0609773784043655, |
|
"learning_rate": 3.0004915452956695e-05, |
|
"loss": 1.3667, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.8743909041689226, |
|
"grad_norm": 1.4538812767833513, |
|
"learning_rate": 2.9900715352338538e-05, |
|
"loss": 1.3696, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.8770979967514889, |
|
"grad_norm": 1.7108604924584878, |
|
"learning_rate": 2.979642663721951e-05, |
|
"loss": 1.3795, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.8798050893340552, |
|
"grad_norm": 2.5139594534456546, |
|
"learning_rate": 2.9692051193343184e-05, |
|
"loss": 1.3734, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.8825121819166215, |
|
"grad_norm": 2.5623897245796736, |
|
"learning_rate": 2.958759090802134e-05, |
|
"loss": 1.372, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.8852192744991879, |
|
"grad_norm": 1.7887583210364995, |
|
"learning_rate": 2.948304767009986e-05, |
|
"loss": 1.3814, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.8879263670817542, |
|
"grad_norm": 1.0710727849835535, |
|
"learning_rate": 2.93784233699246e-05, |
|
"loss": 1.4349, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.8906334596643205, |
|
"grad_norm": 2.563978550658464, |
|
"learning_rate": 2.927371989930714e-05, |
|
"loss": 1.3437, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.8933405522468868, |
|
"grad_norm": 2.145609531812958, |
|
"learning_rate": 2.9168939151490638e-05, |
|
"loss": 1.4138, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.8960476448294532, |
|
"grad_norm": 1.4751882863646832, |
|
"learning_rate": 2.906408302111556e-05, |
|
"loss": 1.3525, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.8987547374120195, |
|
"grad_norm": 1.499040784897915, |
|
"learning_rate": 2.8959153404185468e-05, |
|
"loss": 1.3717, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.9014618299945858, |
|
"grad_norm": 1.3438190150311098, |
|
"learning_rate": 2.885415219803267e-05, |
|
"loss": 1.3545, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.9041689225771521, |
|
"grad_norm": 1.4506182307044833, |
|
"learning_rate": 2.8749081301283955e-05, |
|
"loss": 1.3841, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.9068760151597185, |
|
"grad_norm": 1.5878698130305084, |
|
"learning_rate": 2.8643942613826264e-05, |
|
"loss": 1.3263, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.9095831077422848, |
|
"grad_norm": 1.3742244372155115, |
|
"learning_rate": 2.8538738036772318e-05, |
|
"loss": 1.3403, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.9122902003248511, |
|
"grad_norm": 1.2893381483305373, |
|
"learning_rate": 2.8433469472426257e-05, |
|
"loss": 1.3252, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.9149972929074174, |
|
"grad_norm": 1.7256165310292995, |
|
"learning_rate": 2.832813882424923e-05, |
|
"loss": 1.3595, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.9177043854899838, |
|
"grad_norm": 1.574410258409617, |
|
"learning_rate": 2.8222747996824973e-05, |
|
"loss": 1.3498, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.9204114780725501, |
|
"grad_norm": 1.4304720741459473, |
|
"learning_rate": 2.8117298895825417e-05, |
|
"loss": 1.3613, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.9231185706551164, |
|
"grad_norm": 1.8155878547283097, |
|
"learning_rate": 2.8011793427976152e-05, |
|
"loss": 1.3675, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.9258256632376828, |
|
"grad_norm": 1.568315689782668, |
|
"learning_rate": 2.7906233501022005e-05, |
|
"loss": 1.3878, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.9285327558202491, |
|
"grad_norm": 1.3449271497175002, |
|
"learning_rate": 2.780062102369255e-05, |
|
"loss": 1.3451, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.9312398484028154, |
|
"grad_norm": 1.8796135561662028, |
|
"learning_rate": 2.7694957905667533e-05, |
|
"loss": 1.3541, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.9339469409853817, |
|
"grad_norm": 1.5881147795174977, |
|
"learning_rate": 2.7589246057542424e-05, |
|
"loss": 1.3481, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.9366540335679481, |
|
"grad_norm": 1.5669861875980806, |
|
"learning_rate": 2.74834873907938e-05, |
|
"loss": 1.3049, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.9393611261505144, |
|
"grad_norm": 1.3159700318764556, |
|
"learning_rate": 2.7377683817744825e-05, |
|
"loss": 1.3724, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.9420682187330807, |
|
"grad_norm": 1.1861068665908672, |
|
"learning_rate": 2.727183725153066e-05, |
|
"loss": 1.3996, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.944775311315647, |
|
"grad_norm": 1.588796314194285, |
|
"learning_rate": 2.7165949606063838e-05, |
|
"loss": 1.3868, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.9474824038982134, |
|
"grad_norm": 1.8405833406640584, |
|
"learning_rate": 2.7060022795999722e-05, |
|
"loss": 1.3248, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.9501894964807797, |
|
"grad_norm": 1.4845932153084007, |
|
"learning_rate": 2.6954058736701825e-05, |
|
"loss": 1.3113, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.952896589063346, |
|
"grad_norm": 2.2421879491898347, |
|
"learning_rate": 2.684805934420721e-05, |
|
"loss": 1.3569, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.9556036816459123, |
|
"grad_norm": 1.8263161858131314, |
|
"learning_rate": 2.674202653519183e-05, |
|
"loss": 1.353, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.9583107742284787, |
|
"grad_norm": 1.6830556154140768, |
|
"learning_rate": 2.6635962226935844e-05, |
|
"loss": 1.3581, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.961017866811045, |
|
"grad_norm": 1.1962057897073894, |
|
"learning_rate": 2.652986833728905e-05, |
|
"loss": 1.3377, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.9637249593936112, |
|
"grad_norm": 1.1605034457482457, |
|
"learning_rate": 2.642374678463606e-05, |
|
"loss": 1.3142, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.9664320519761775, |
|
"grad_norm": 1.6116484287446706, |
|
"learning_rate": 2.6317599487861732e-05, |
|
"loss": 1.3704, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.969139144558744, |
|
"grad_norm": 2.022789120877381, |
|
"learning_rate": 2.6211428366316414e-05, |
|
"loss": 1.3188, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.9718462371413102, |
|
"grad_norm": 2.0186900985710245, |
|
"learning_rate": 2.6105235339781254e-05, |
|
"loss": 1.3168, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.9745533297238765, |
|
"grad_norm": 2.040600986429446, |
|
"learning_rate": 2.599902232843348e-05, |
|
"loss": 1.361, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.9772604223064428, |
|
"grad_norm": 1.2311848491778494, |
|
"learning_rate": 2.5892791252811694e-05, |
|
"loss": 1.3658, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.9799675148890092, |
|
"grad_norm": 1.8540679978665582, |
|
"learning_rate": 2.578654403378112e-05, |
|
"loss": 1.373, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.9826746074715755, |
|
"grad_norm": 1.3073823909407571, |
|
"learning_rate": 2.5680282592498913e-05, |
|
"loss": 1.3845, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.9853817000541418, |
|
"grad_norm": 1.6720630501211762, |
|
"learning_rate": 2.5574008850379366e-05, |
|
"loss": 1.3438, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.9880887926367081, |
|
"grad_norm": 1.4546581832324275, |
|
"learning_rate": 2.546772472905921e-05, |
|
"loss": 1.2815, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.9907958852192745, |
|
"grad_norm": 1.4509964601293808, |
|
"learning_rate": 2.5361432150362848e-05, |
|
"loss": 1.3469, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.9935029778018408, |
|
"grad_norm": 1.4316417505770653, |
|
"learning_rate": 2.5255133036267608e-05, |
|
"loss": 1.3315, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.9962100703844071, |
|
"grad_norm": 1.7706024949129144, |
|
"learning_rate": 2.5148829308869004e-05, |
|
"loss": 1.3607, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.9989171629669734, |
|
"grad_norm": 1.5222816405267459, |
|
"learning_rate": 2.5042522890345936e-05, |
|
"loss": 1.2815, |
|
"step": 3690 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 7388, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"total_flos": 1754891515854848.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|