|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6506710044733631, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999967339434433e-05, |
|
"loss": 1.8952, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999869358591101e-05, |
|
"loss": 1.7279, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999706060030093e-05, |
|
"loss": 1.6717, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9994774480181473e-05, |
|
"loss": 1.64, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999183528528541e-05, |
|
"loss": 1.5903, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.998824309240937e-05, |
|
"loss": 1.5936, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.998399799541179e-05, |
|
"loss": 1.5633, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9979100105210475e-05, |
|
"loss": 1.5244, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.997354954977972e-05, |
|
"loss": 1.5517, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.996734647414696e-05, |
|
"loss": 1.5226, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.996049104038895e-05, |
|
"loss": 1.5107, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9952983427627566e-05, |
|
"loss": 1.5111, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9944823832025115e-05, |
|
"loss": 1.5107, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.99360124667792e-05, |
|
"loss": 1.5035, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9926549562117164e-05, |
|
"loss": 1.4973, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.991643536529006e-05, |
|
"loss": 1.4968, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.99056701405662e-05, |
|
"loss": 1.479, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.989425416922423e-05, |
|
"loss": 1.4879, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.988218774954583e-05, |
|
"loss": 1.4943, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.986947119680788e-05, |
|
"loss": 1.4952, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.985610484327421e-05, |
|
"loss": 1.4588, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9842089038186954e-05, |
|
"loss": 1.4869, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9827424147757405e-05, |
|
"loss": 1.4762, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.981211055515648e-05, |
|
"loss": 1.4909, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9796148660504624e-05, |
|
"loss": 1.4659, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9779538880861455e-05, |
|
"loss": 1.4683, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.976228165021482e-05, |
|
"loss": 1.4581, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9744377419469426e-05, |
|
"loss": 1.4481, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.972582665643514e-05, |
|
"loss": 1.4571, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.970662984581468e-05, |
|
"loss": 1.4683, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9686787489191e-05, |
|
"loss": 1.4528, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.966630010501417e-05, |
|
"loss": 1.4548, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.964516822858784e-05, |
|
"loss": 1.4487, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.962339241205523e-05, |
|
"loss": 1.4468, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.960097322438474e-05, |
|
"loss": 1.468, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.957791125135503e-05, |
|
"loss": 1.4601, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.955420709553979e-05, |
|
"loss": 1.4604, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9529861376291895e-05, |
|
"loss": 1.4528, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9504874729727344e-05, |
|
"loss": 1.4478, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.947924780870853e-05, |
|
"loss": 1.4697, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.945298128282724e-05, |
|
"loss": 1.45, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.942607583838715e-05, |
|
"loss": 1.4368, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.939853217838588e-05, |
|
"loss": 1.4279, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.937035102249665e-05, |
|
"loss": 1.4384, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.934153310704945e-05, |
|
"loss": 1.4543, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.931207918501181e-05, |
|
"loss": 1.4581, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9281990025969135e-05, |
|
"loss": 1.4503, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.925126641610458e-05, |
|
"loss": 1.4333, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.921990915817852e-05, |
|
"loss": 1.4506, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.91879190715076e-05, |
|
"loss": 1.4368, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.915529699194326e-05, |
|
"loss": 1.4428, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9122043771849957e-05, |
|
"loss": 1.4521, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9088160280082875e-05, |
|
"loss": 1.4296, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.905364740196521e-05, |
|
"loss": 1.4456, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.901850603926507e-05, |
|
"loss": 1.425, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.898273711017187e-05, |
|
"loss": 1.4604, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.894634154927238e-05, |
|
"loss": 1.4328, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.890932030752627e-05, |
|
"loss": 1.4232, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.887167435224131e-05, |
|
"loss": 1.4344, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.8833404667048045e-05, |
|
"loss": 1.4459, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.8794512251874125e-05, |
|
"loss": 1.4308, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.875499812291817e-05, |
|
"loss": 1.442, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.871486331262322e-05, |
|
"loss": 1.4297, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.867410886964976e-05, |
|
"loss": 1.442, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.863273585884831e-05, |
|
"loss": 1.436, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.8590745361231626e-05, |
|
"loss": 1.4153, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.8548138473946424e-05, |
|
"loss": 1.4344, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.850491631024473e-05, |
|
"loss": 1.4287, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.846107999945479e-05, |
|
"loss": 1.4492, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.841663068695157e-05, |
|
"loss": 1.4636, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.837156953412682e-05, |
|
"loss": 1.4465, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.8325897718358725e-05, |
|
"loss": 1.4127, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.8279616432981154e-05, |
|
"loss": 1.4162, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.823272688725248e-05, |
|
"loss": 1.4313, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.818523030632396e-05, |
|
"loss": 1.4191, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.8137127931207745e-05, |
|
"loss": 1.4212, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.808842101874447e-05, |
|
"loss": 1.4179, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.803911084157037e-05, |
|
"loss": 1.4353, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.798919868808406e-05, |
|
"loss": 1.4294, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.793868586241289e-05, |
|
"loss": 1.437, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.7887573684378815e-05, |
|
"loss": 1.4265, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.783586348946393e-05, |
|
"loss": 1.4121, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.778355662877564e-05, |
|
"loss": 1.414, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.773065446901123e-05, |
|
"loss": 1.4205, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.76771583924223e-05, |
|
"loss": 1.4289, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.7623069796778506e-05, |
|
"loss": 1.4088, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.756839009533118e-05, |
|
"loss": 1.4206, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.7513120716776294e-05, |
|
"loss": 1.4196, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.745726310521717e-05, |
|
"loss": 1.4409, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.740081872012676e-05, |
|
"loss": 1.4221, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.734378903630949e-05, |
|
"loss": 1.4097, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.728617554386276e-05, |
|
"loss": 1.4256, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.722797974813795e-05, |
|
"loss": 1.4139, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.716920316970115e-05, |
|
"loss": 1.4258, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.710984734429339e-05, |
|
"loss": 1.4362, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.7049913822790526e-05, |
|
"loss": 1.4329, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.6989404171162745e-05, |
|
"loss": 1.4058, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.692831997043359e-05, |
|
"loss": 1.4412, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.6866662816638694e-05, |
|
"loss": 1.4148, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.680443432078407e-05, |
|
"loss": 1.4253, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.6741636108804016e-05, |
|
"loss": 1.425, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.6678269821518626e-05, |
|
"loss": 1.4159, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.661433711459092e-05, |
|
"loss": 1.3995, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.654983965848361e-05, |
|
"loss": 1.4065, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.648477913841538e-05, |
|
"loss": 1.4023, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.641915725431696e-05, |
|
"loss": 1.39, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.6352975720786617e-05, |
|
"loss": 1.4028, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.6286236267045415e-05, |
|
"loss": 1.419, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.621894063689198e-05, |
|
"loss": 1.4118, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.6151090588657e-05, |
|
"loss": 1.4, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.608268789515723e-05, |
|
"loss": 1.4156, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.60137343436492e-05, |
|
"loss": 1.4186, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.5944231735782486e-05, |
|
"loss": 1.4164, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.587418188755268e-05, |
|
"loss": 1.4155, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.580358662925392e-05, |
|
"loss": 1.4117, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.5732447805431044e-05, |
|
"loss": 1.399, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.566076727483144e-05, |
|
"loss": 1.4182, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.558854691035644e-05, |
|
"loss": 1.4179, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.55157885990124e-05, |
|
"loss": 1.4144, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.544249424186139e-05, |
|
"loss": 1.4162, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.5368665753971554e-05, |
|
"loss": 1.4031, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.5294305064367014e-05, |
|
"loss": 1.4086, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.521941411597752e-05, |
|
"loss": 1.4069, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.5143994865587655e-05, |
|
"loss": 1.4146, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.506804928378571e-05, |
|
"loss": 1.4365, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.499157935491222e-05, |
|
"loss": 1.4197, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.491458707700807e-05, |
|
"loss": 1.3969, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.483707446176235e-05, |
|
"loss": 1.4089, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.475904353445973e-05, |
|
"loss": 1.4079, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.46804963339276e-05, |
|
"loss": 1.414, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.460143491248273e-05, |
|
"loss": 1.3883, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.452186133587773e-05, |
|
"loss": 1.407, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.4441777683247e-05, |
|
"loss": 1.4026, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.436118604705246e-05, |
|
"loss": 1.396, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.428008853302884e-05, |
|
"loss": 1.4037, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.419848726012868e-05, |
|
"loss": 1.3998, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.411638436046696e-05, |
|
"loss": 1.4212, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.403378197926539e-05, |
|
"loss": 1.3948, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.395068227479636e-05, |
|
"loss": 1.3882, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.3867087418326543e-05, |
|
"loss": 1.4053, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.378299959406018e-05, |
|
"loss": 1.414, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.369842099908199e-05, |
|
"loss": 1.4386, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.361335384329975e-05, |
|
"loss": 1.4114, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.352780034938663e-05, |
|
"loss": 1.4185, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.3441762752722995e-05, |
|
"loss": 1.4209, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.335524330133811e-05, |
|
"loss": 1.4071, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.326824425585135e-05, |
|
"loss": 1.3998, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.3180767889413144e-05, |
|
"loss": 1.3928, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.309281648764556e-05, |
|
"loss": 1.406, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.300439234858262e-05, |
|
"loss": 1.3937, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.291549778261025e-05, |
|
"loss": 1.3943, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.2826135112405875e-05, |
|
"loss": 1.393, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.273630667287779e-05, |
|
"loss": 1.3956, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.264601481110409e-05, |
|
"loss": 1.3972, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.2555261886271405e-05, |
|
"loss": 1.4232, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.2464050269613205e-05, |
|
"loss": 1.4231, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.237238234434789e-05, |
|
"loss": 1.3968, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.2280260505616477e-05, |
|
"loss": 1.3858, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.218768716042005e-05, |
|
"loss": 1.3965, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.209466472755687e-05, |
|
"loss": 1.4159, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.200119563755915e-05, |
|
"loss": 1.4228, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.190728233262955e-05, |
|
"loss": 1.391, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.18129272665774e-05, |
|
"loss": 1.401, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.171813290475455e-05, |
|
"loss": 1.4074, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.162290172399099e-05, |
|
"loss": 1.4002, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.152723621253008e-05, |
|
"loss": 1.3952, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.1431138869963614e-05, |
|
"loss": 1.3901, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.133461220716642e-05, |
|
"loss": 1.3889, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.1237658746230814e-05, |
|
"loss": 1.4069, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.11402810204007e-05, |
|
"loss": 1.3851, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.1042481574005355e-05, |
|
"loss": 1.4234, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.094426296239296e-05, |
|
"loss": 1.4101, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0845627751863843e-05, |
|
"loss": 1.4122, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0746578519603404e-05, |
|
"loss": 1.407, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.064711785361482e-05, |
|
"loss": 1.3968, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0547248352651354e-05, |
|
"loss": 1.3994, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.044697262614852e-05, |
|
"loss": 1.3806, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0346293294155865e-05, |
|
"loss": 1.4131, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0245212987268533e-05, |
|
"loss": 1.3817, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0143734346558515e-05, |
|
"loss": 1.3835, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.004186002350565e-05, |
|
"loss": 1.399, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.993959267992835e-05, |
|
"loss": 1.4, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.983693498791403e-05, |
|
"loss": 1.3731, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9733889629749324e-05, |
|
"loss": 1.4064, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.963045929784996e-05, |
|
"loss": 1.4022, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.952664669469046e-05, |
|
"loss": 1.4068, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.942245453273348e-05, |
|
"loss": 1.4103, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.931788553435898e-05, |
|
"loss": 1.4096, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9212942431793056e-05, |
|
"loss": 1.3996, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9107627967036576e-05, |
|
"loss": 1.4124, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.900194489179352e-05, |
|
"loss": 1.4065, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.88958959673991e-05, |
|
"loss": 1.3858, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.8789483964747595e-05, |
|
"loss": 1.3778, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.868271166421996e-05, |
|
"loss": 1.4049, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.857558185561117e-05, |
|
"loss": 1.3862, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.846809733805732e-05, |
|
"loss": 1.371, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.8360260919962535e-05, |
|
"loss": 1.3884, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.8252075418925525e-05, |
|
"loss": 1.3748, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.814354366166603e-05, |
|
"loss": 1.3801, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.8034668483950895e-05, |
|
"loss": 1.3609, |
|
"step": 2000 |
|
} |
|
], |
|
"max_steps": 6146, |
|
"num_train_epochs": 2, |
|
"total_flos": 3.5108919681776353e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|