|
{ |
|
"best_metric": 2.2857470512390137, |
|
"best_model_checkpoint": "../../saves/Yi-1.5-9B-Chat/lora/sft/checkpoint-2800", |
|
"epoch": 4.148148148148148, |
|
"eval_steps": 200, |
|
"global_step": 2800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 11.507218360900879, |
|
"learning_rate": 2e-05, |
|
"loss": 3.995, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.0516510009765625, |
|
"learning_rate": 4.5e-05, |
|
"loss": 3.8373, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.1662492752075195, |
|
"learning_rate": 4.999929854041747e-05, |
|
"loss": 3.4376, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.265901565551758, |
|
"learning_rate": 4.999644892832738e-05, |
|
"loss": 3.0327, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.507016658782959, |
|
"learning_rate": 4.999140757217391e-05, |
|
"loss": 2.7494, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.204484462738037, |
|
"learning_rate": 4.9984174913994355e-05, |
|
"loss": 2.6219, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.585574746131897, |
|
"learning_rate": 4.9974751587964214e-05, |
|
"loss": 2.6297, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.005038261413574, |
|
"learning_rate": 4.9963138420341604e-05, |
|
"loss": 2.6139, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.9263103008270264, |
|
"learning_rate": 4.994933642939482e-05, |
|
"loss": 2.6403, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.398559331893921, |
|
"learning_rate": 4.993334682531302e-05, |
|
"loss": 2.7009, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.3030171394348145, |
|
"learning_rate": 4.991517101010015e-05, |
|
"loss": 2.9349, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.3386640548706055, |
|
"learning_rate": 4.9894810577451975e-05, |
|
"loss": 2.2748, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.8978828191757202, |
|
"learning_rate": 4.9872267312616384e-05, |
|
"loss": 2.3982, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.644758939743042, |
|
"learning_rate": 4.9847543192236815e-05, |
|
"loss": 2.4243, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.887523889541626, |
|
"learning_rate": 4.9820640384178954e-05, |
|
"loss": 2.5096, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.8633583784103394, |
|
"learning_rate": 4.9791561247340674e-05, |
|
"loss": 2.3775, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.8847458362579346, |
|
"learning_rate": 4.976030833144516e-05, |
|
"loss": 2.2988, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.756564974784851, |
|
"learning_rate": 4.972688437681736e-05, |
|
"loss": 2.3573, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 4.001959800720215, |
|
"learning_rate": 4.969129231414374e-05, |
|
"loss": 2.5404, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.489682674407959, |
|
"learning_rate": 4.9653535264215256e-05, |
|
"loss": 2.5707, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_loss": 2.391483783721924, |
|
"eval_runtime": 96.186, |
|
"eval_samples_per_second": 6.238, |
|
"eval_steps_per_second": 3.119, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.316711187362671, |
|
"learning_rate": 4.961361653765377e-05, |
|
"loss": 2.5629, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.2436294555664062, |
|
"learning_rate": 4.957153963462172e-05, |
|
"loss": 2.3062, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.6833086013793945, |
|
"learning_rate": 4.952730824451527e-05, |
|
"loss": 2.4841, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.4797868728637695, |
|
"learning_rate": 4.9480926245640754e-05, |
|
"loss": 2.4149, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.539104700088501, |
|
"learning_rate": 4.943239770487469e-05, |
|
"loss": 2.5375, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.656684637069702, |
|
"learning_rate": 4.9381726877307124e-05, |
|
"loss": 2.58, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.8903274536132812, |
|
"learning_rate": 4.9328918205868556e-05, |
|
"loss": 2.36, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 3.5389297008514404, |
|
"learning_rate": 4.927397632094039e-05, |
|
"loss": 2.4487, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 2.3600199222564697, |
|
"learning_rate": 4.9216906039948896e-05, |
|
"loss": 2.5569, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.797351837158203, |
|
"learning_rate": 4.915771236694286e-05, |
|
"loss": 2.5081, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 2.2043402194976807, |
|
"learning_rate": 4.909640049215478e-05, |
|
"loss": 2.6873, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.070732831954956, |
|
"learning_rate": 4.903297579154577e-05, |
|
"loss": 2.4935, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.1852307319641113, |
|
"learning_rate": 4.896744382633419e-05, |
|
"loss": 2.4865, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.5772058963775635, |
|
"learning_rate": 4.889981034250807e-05, |
|
"loss": 2.3605, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.855281114578247, |
|
"learning_rate": 4.883008127032121e-05, |
|
"loss": 2.6443, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.7813808917999268, |
|
"learning_rate": 4.8758262723773255e-05, |
|
"loss": 2.2597, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.336205244064331, |
|
"learning_rate": 4.86843610000736e-05, |
|
"loss": 2.4404, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.562402367591858, |
|
"learning_rate": 4.860838257908925e-05, |
|
"loss": 2.3216, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.9673439264297485, |
|
"learning_rate": 4.85303341227766e-05, |
|
"loss": 2.3308, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 2.689194440841675, |
|
"learning_rate": 4.845022247459736e-05, |
|
"loss": 2.5978, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"eval_loss": 2.3469619750976562, |
|
"eval_runtime": 96.7403, |
|
"eval_samples_per_second": 6.202, |
|
"eval_steps_per_second": 3.101, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.1179988384246826, |
|
"learning_rate": 4.836805465891844e-05, |
|
"loss": 2.679, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.770677089691162, |
|
"learning_rate": 4.828383788039611e-05, |
|
"loss": 2.3037, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.4875175952911377, |
|
"learning_rate": 4.819757952334425e-05, |
|
"loss": 2.1605, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 2.146296501159668, |
|
"learning_rate": 4.810928715108683e-05, |
|
"loss": 2.4998, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.9566316604614258, |
|
"learning_rate": 4.801896850529482e-05, |
|
"loss": 2.6092, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 2.384901285171509, |
|
"learning_rate": 4.792663150530733e-05, |
|
"loss": 2.5815, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 2.225851535797119, |
|
"learning_rate": 4.783228424743726e-05, |
|
"loss": 2.3677, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.222594976425171, |
|
"learning_rate": 4.773593500426134e-05, |
|
"loss": 2.3782, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.6287986040115356, |
|
"learning_rate": 4.763759222389487e-05, |
|
"loss": 2.4165, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 2.4728448390960693, |
|
"learning_rate": 4.7537264529250835e-05, |
|
"loss": 2.3643, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.7345590591430664, |
|
"learning_rate": 4.743496071728396e-05, |
|
"loss": 2.4526, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.9642170667648315, |
|
"learning_rate": 4.7330689758219314e-05, |
|
"loss": 2.3306, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.757434368133545, |
|
"learning_rate": 4.722446079476576e-05, |
|
"loss": 2.5495, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.5214667320251465, |
|
"learning_rate": 4.711628314131436e-05, |
|
"loss": 2.5145, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 2.977623462677002, |
|
"learning_rate": 4.700616628312158e-05, |
|
"loss": 2.4552, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.109473466873169, |
|
"learning_rate": 4.689411987547773e-05, |
|
"loss": 2.4047, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.7021738290786743, |
|
"learning_rate": 4.678015374286025e-05, |
|
"loss": 2.5649, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.258920669555664, |
|
"learning_rate": 4.666427787807232e-05, |
|
"loss": 2.5556, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 2.1758129596710205, |
|
"learning_rate": 4.654650244136669e-05, |
|
"loss": 2.4234, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.581289529800415, |
|
"learning_rate": 4.642683775955476e-05, |
|
"loss": 2.5284, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"eval_loss": 2.327061414718628, |
|
"eval_runtime": 96.1253, |
|
"eval_samples_per_second": 6.242, |
|
"eval_steps_per_second": 3.121, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 3.0182411670684814, |
|
"learning_rate": 4.630529432510118e-05, |
|
"loss": 2.3928, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 2.1703760623931885, |
|
"learning_rate": 4.618188279520374e-05, |
|
"loss": 2.675, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 2.2590174674987793, |
|
"learning_rate": 4.6056613990859024e-05, |
|
"loss": 2.4192, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.697880744934082, |
|
"learning_rate": 4.5929498895913514e-05, |
|
"loss": 2.1851, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.6290298700332642, |
|
"learning_rate": 4.580054865610059e-05, |
|
"loss": 2.452, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.1037967205047607, |
|
"learning_rate": 4.5669774578063174e-05, |
|
"loss": 2.368, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.8899028301239014, |
|
"learning_rate": 4.5537188128362384e-05, |
|
"loss": 2.4681, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 2.6862452030181885, |
|
"learning_rate": 4.54028009324721e-05, |
|
"loss": 2.5741, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 2.2980988025665283, |
|
"learning_rate": 4.52666247737596e-05, |
|
"loss": 2.3131, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 2.9786365032196045, |
|
"learning_rate": 4.512867159245242e-05, |
|
"loss": 2.4059, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 2.39225435256958, |
|
"learning_rate": 4.498895348459135e-05, |
|
"loss": 2.3781, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.9918076992034912, |
|
"learning_rate": 4.484748270096988e-05, |
|
"loss": 2.399, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 2.9783575534820557, |
|
"learning_rate": 4.470427164605997e-05, |
|
"loss": 2.4341, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 1.9173182249069214, |
|
"learning_rate": 4.455933287692444e-05, |
|
"loss": 2.3917, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 5.648810863494873, |
|
"learning_rate": 4.441267910211594e-05, |
|
"loss": 2.6513, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 4.045050144195557, |
|
"learning_rate": 4.4264323180562574e-05, |
|
"loss": 2.5065, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 3.8237059116363525, |
|
"learning_rate": 4.411427812044049e-05, |
|
"loss": 2.3481, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 2.632697582244873, |
|
"learning_rate": 4.396255707803323e-05, |
|
"loss": 2.445, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 3.1144275665283203, |
|
"learning_rate": 4.3809173356578184e-05, |
|
"loss": 2.3096, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.7161847352981567, |
|
"learning_rate": 4.3654140405100116e-05, |
|
"loss": 2.4712, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"eval_loss": 2.317145824432373, |
|
"eval_runtime": 96.3893, |
|
"eval_samples_per_second": 6.225, |
|
"eval_steps_per_second": 3.112, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 2.709351062774658, |
|
"learning_rate": 4.349747181723197e-05, |
|
"loss": 2.4706, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 2.301166534423828, |
|
"learning_rate": 4.3339181330022876e-05, |
|
"loss": 2.5085, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 2.3112149238586426, |
|
"learning_rate": 4.3179282822733706e-05, |
|
"loss": 2.3204, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 1.850696325302124, |
|
"learning_rate": 4.301779031562011e-05, |
|
"loss": 2.4174, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 2.511995315551758, |
|
"learning_rate": 4.285471796870316e-05, |
|
"loss": 2.3967, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 3.4540021419525146, |
|
"learning_rate": 4.26900800805278e-05, |
|
"loss": 2.2189, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 3.0399599075317383, |
|
"learning_rate": 4.252389108690909e-05, |
|
"loss": 2.4208, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 2.1426591873168945, |
|
"learning_rate": 4.235616555966645e-05, |
|
"loss": 2.479, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 2.262714147567749, |
|
"learning_rate": 4.218691820534601e-05, |
|
"loss": 2.5144, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 2.40321683883667, |
|
"learning_rate": 4.201616386393102e-05, |
|
"loss": 2.332, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 2.4343059062957764, |
|
"learning_rate": 4.184391750754075e-05, |
|
"loss": 2.4799, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 2.6487956047058105, |
|
"learning_rate": 4.167019423911761e-05, |
|
"loss": 2.4492, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 3.0731077194213867, |
|
"learning_rate": 4.149500929110295e-05, |
|
"loss": 2.4789, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 2.791496515274048, |
|
"learning_rate": 4.1318378024101435e-05, |
|
"loss": 2.2895, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 2.860171318054199, |
|
"learning_rate": 4.114031592553417e-05, |
|
"loss": 2.3098, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 2.6719272136688232, |
|
"learning_rate": 4.096083860828076e-05, |
|
"loss": 2.2381, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 3.2551610469818115, |
|
"learning_rate": 4.07799618093103e-05, |
|
"loss": 2.4553, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 2.201517105102539, |
|
"learning_rate": 4.059770138830157e-05, |
|
"loss": 2.4248, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 3.5828166007995605, |
|
"learning_rate": 4.041407332625238e-05, |
|
"loss": 2.3741, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 2.608720064163208, |
|
"learning_rate": 4.022909372407835e-05, |
|
"loss": 2.4542, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"eval_loss": 2.308347463607788, |
|
"eval_runtime": 96.2879, |
|
"eval_samples_per_second": 6.231, |
|
"eval_steps_per_second": 3.116, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 2.607658624649048, |
|
"learning_rate": 4.004277880120113e-05, |
|
"loss": 2.5501, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 2.599700450897217, |
|
"learning_rate": 3.9855144894126235e-05, |
|
"loss": 2.2606, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 2.6854465007781982, |
|
"learning_rate": 3.966620845501067e-05, |
|
"loss": 2.3407, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 2.488729476928711, |
|
"learning_rate": 3.9475986050220314e-05, |
|
"loss": 2.4184, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 2.6692395210266113, |
|
"learning_rate": 3.928449435887737e-05, |
|
"loss": 2.4879, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 2.208466053009033, |
|
"learning_rate": 3.909175017139791e-05, |
|
"loss": 2.2039, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 2.5494725704193115, |
|
"learning_rate": 3.889777038801964e-05, |
|
"loss": 2.3029, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 2.0070173740386963, |
|
"learning_rate": 3.870257201732005e-05, |
|
"loss": 2.3363, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 2.75435209274292, |
|
"learning_rate": 3.8506172174725066e-05, |
|
"loss": 2.2523, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 2.6911637783050537, |
|
"learning_rate": 3.830858808100834e-05, |
|
"loss": 2.4057, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 3.0497798919677734, |
|
"learning_rate": 3.810983706078131e-05, |
|
"loss": 2.2635, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 3.2239983081817627, |
|
"learning_rate": 3.790993654097405e-05, |
|
"loss": 2.3918, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 2.4736838340759277, |
|
"learning_rate": 3.770890404930738e-05, |
|
"loss": 2.3823, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 2.585200548171997, |
|
"learning_rate": 3.7506757212755886e-05, |
|
"loss": 2.3349, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 2.8950488567352295, |
|
"learning_rate": 3.730351375600239e-05, |
|
"loss": 2.2586, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 2.7123405933380127, |
|
"learning_rate": 3.7099191499883806e-05, |
|
"loss": 2.309, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 2.049273729324341, |
|
"learning_rate": 3.6893808359828565e-05, |
|
"loss": 2.3608, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 2.6950619220733643, |
|
"learning_rate": 3.668738234428575e-05, |
|
"loss": 2.4085, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 3.231593370437622, |
|
"learning_rate": 3.64799315531461e-05, |
|
"loss": 2.2365, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 3.310612201690674, |
|
"learning_rate": 3.627147417615493e-05, |
|
"loss": 2.3518, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"eval_loss": 2.2971346378326416, |
|
"eval_runtime": 96.9315, |
|
"eval_samples_per_second": 6.19, |
|
"eval_steps_per_second": 3.095, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 2.5581092834472656, |
|
"learning_rate": 3.606202849131723e-05, |
|
"loss": 2.2343, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 2.7495291233062744, |
|
"learning_rate": 3.585161286329503e-05, |
|
"loss": 2.3144, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.433355689048767, |
|
"learning_rate": 3.564024574179713e-05, |
|
"loss": 2.4354, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.7245852947235107, |
|
"learning_rate": 3.542794565996137e-05, |
|
"loss": 2.405, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 2.598426103591919, |
|
"learning_rate": 3.5214731232729626e-05, |
|
"loss": 2.4057, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 2.4231202602386475, |
|
"learning_rate": 3.500062115521562e-05, |
|
"loss": 2.233, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 2.9336726665496826, |
|
"learning_rate": 3.478563420106565e-05, |
|
"loss": 2.5745, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 2.057365655899048, |
|
"learning_rate": 3.4569789220812544e-05, |
|
"loss": 2.4635, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 1.8743510246276855, |
|
"learning_rate": 3.435310514022272e-05, |
|
"loss": 2.3892, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 2.422725200653076, |
|
"learning_rate": 3.4135600958636794e-05, |
|
"loss": 2.4463, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 2.9806418418884277, |
|
"learning_rate": 3.391729574730365e-05, |
|
"loss": 2.2907, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 2.656452178955078, |
|
"learning_rate": 3.369820864770822e-05, |
|
"loss": 2.55, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 1.4007813930511475, |
|
"learning_rate": 3.347835886989318e-05, |
|
"loss": 2.4001, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 2.9661433696746826, |
|
"learning_rate": 3.3257765690774474e-05, |
|
"loss": 2.2728, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.8605289459228516, |
|
"learning_rate": 3.303644845245114e-05, |
|
"loss": 2.4102, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 2.4378559589385986, |
|
"learning_rate": 3.2814426560509335e-05, |
|
"loss": 2.3268, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 2.231828212738037, |
|
"learning_rate": 3.259171948232081e-05, |
|
"loss": 2.265, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 3.6883370876312256, |
|
"learning_rate": 3.236834674533595e-05, |
|
"loss": 2.3077, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 2.531064510345459, |
|
"learning_rate": 3.214432793537159e-05, |
|
"loss": 2.2186, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 3.7311625480651855, |
|
"learning_rate": 3.1919682694893676e-05, |
|
"loss": 2.3739, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"eval_loss": 2.2983055114746094, |
|
"eval_runtime": 97.4749, |
|
"eval_samples_per_second": 6.155, |
|
"eval_steps_per_second": 3.078, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 2.1773197650909424, |
|
"learning_rate": 3.169443072129498e-05, |
|
"loss": 2.3585, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 3.2135908603668213, |
|
"learning_rate": 3.146859176516795e-05, |
|
"loss": 2.4114, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 2.469650983810425, |
|
"learning_rate": 3.1242185628573e-05, |
|
"loss": 2.4616, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 3.1853158473968506, |
|
"learning_rate": 3.101523216330216e-05, |
|
"loss": 2.4351, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 3.484740972518921, |
|
"learning_rate": 3.0787751269138454e-05, |
|
"loss": 2.4084, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 2.925419330596924, |
|
"learning_rate": 3.055976289211105e-05, |
|
"loss": 2.3629, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 2.416266918182373, |
|
"learning_rate": 3.033128702274634e-05, |
|
"loss": 2.3339, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 2.828092575073242, |
|
"learning_rate": 3.010234369431511e-05, |
|
"loss": 2.2583, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 2.0409436225891113, |
|
"learning_rate": 2.9872952981076008e-05, |
|
"loss": 2.0624, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 2.849675416946411, |
|
"learning_rate": 2.9643134996515364e-05, |
|
"loss": 2.3726, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 4.13971471786499, |
|
"learning_rate": 2.9412909891583613e-05, |
|
"loss": 2.2965, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 3.702918529510498, |
|
"learning_rate": 2.9182297852928407e-05, |
|
"loss": 2.4658, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 3.2200419902801514, |
|
"learning_rate": 2.8951319101124598e-05, |
|
"loss": 2.4594, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 2.465409517288208, |
|
"learning_rate": 2.8719993888901258e-05, |
|
"loss": 2.4301, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 2.5337352752685547, |
|
"learning_rate": 2.848834249936589e-05, |
|
"loss": 2.3253, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 3.3071987628936768, |
|
"learning_rate": 2.8256385244225926e-05, |
|
"loss": 2.6393, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 2.4905800819396973, |
|
"learning_rate": 2.802414246200781e-05, |
|
"loss": 2.1755, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 2.8511528968811035, |
|
"learning_rate": 2.7791634516273574e-05, |
|
"loss": 2.2376, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 2.7542080879211426, |
|
"learning_rate": 2.755888179383543e-05, |
|
"loss": 2.3509, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 3.257232189178467, |
|
"learning_rate": 2.7325904702968137e-05, |
|
"loss": 2.2619, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"eval_loss": 2.294617176055908, |
|
"eval_runtime": 98.0772, |
|
"eval_samples_per_second": 6.118, |
|
"eval_steps_per_second": 3.059, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 2.707037925720215, |
|
"learning_rate": 2.7092723671619565e-05, |
|
"loss": 2.4258, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 3.6930806636810303, |
|
"learning_rate": 2.685935914561954e-05, |
|
"loss": 2.3555, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.9949381351470947, |
|
"learning_rate": 2.6625831586887116e-05, |
|
"loss": 2.3908, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 2.457606554031372, |
|
"learning_rate": 2.6392161471636413e-05, |
|
"loss": 2.2989, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 2.2386317253112793, |
|
"learning_rate": 2.615836928858122e-05, |
|
"loss": 2.6807, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 2.672177791595459, |
|
"learning_rate": 2.5924475537138497e-05, |
|
"loss": 2.1579, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 4.241297721862793, |
|
"learning_rate": 2.569050072563097e-05, |
|
"loss": 2.0706, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 2.5108397006988525, |
|
"learning_rate": 2.5456465369488864e-05, |
|
"loss": 2.4219, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.7684569358825684, |
|
"learning_rate": 2.5222389989451096e-05, |
|
"loss": 2.2234, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 3.104278087615967, |
|
"learning_rate": 2.4988295109765972e-05, |
|
"loss": 2.3018, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 3.235226631164551, |
|
"learning_rate": 2.4754201256391585e-05, |
|
"loss": 2.364, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 2.1415085792541504, |
|
"learning_rate": 2.4520128955196008e-05, |
|
"loss": 2.3683, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 2.5002896785736084, |
|
"learning_rate": 2.42860987301576e-05, |
|
"loss": 2.4247, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 2.8159451484680176, |
|
"learning_rate": 2.4052131101565364e-05, |
|
"loss": 2.3574, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 3.0876357555389404, |
|
"learning_rate": 2.3818246584219726e-05, |
|
"loss": 2.2649, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 2.8891026973724365, |
|
"learning_rate": 2.3584465685633738e-05, |
|
"loss": 2.4012, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 2.3504886627197266, |
|
"learning_rate": 2.335080890423491e-05, |
|
"loss": 2.3263, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 2.84779691696167, |
|
"learning_rate": 2.3117296727567897e-05, |
|
"loss": 2.4177, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 2.4880871772766113, |
|
"learning_rate": 2.288394963049807e-05, |
|
"loss": 2.3029, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 2.4965240955352783, |
|
"learning_rate": 2.2650788073416293e-05, |
|
"loss": 2.2876, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"eval_loss": 2.287304401397705, |
|
"eval_runtime": 97.8793, |
|
"eval_samples_per_second": 6.13, |
|
"eval_steps_per_second": 3.065, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 3.6506803035736084, |
|
"learning_rate": 2.2417832500444827e-05, |
|
"loss": 2.2686, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 2.156888008117676, |
|
"learning_rate": 2.2185103337644833e-05, |
|
"loss": 2.4572, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 2.9049007892608643, |
|
"learning_rate": 2.1952620991225285e-05, |
|
"loss": 2.4824, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 3.4357845783233643, |
|
"learning_rate": 2.1720405845753792e-05, |
|
"loss": 2.3334, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 2.405451774597168, |
|
"learning_rate": 2.148847826236914e-05, |
|
"loss": 2.4271, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 2.0909016132354736, |
|
"learning_rate": 2.125685857699609e-05, |
|
"loss": 2.3499, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 3.3600564002990723, |
|
"learning_rate": 2.1025567098562177e-05, |
|
"loss": 2.2665, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 3.0894439220428467, |
|
"learning_rate": 2.0794624107217056e-05, |
|
"loss": 2.3211, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 2.564870834350586, |
|
"learning_rate": 2.056404985255424e-05, |
|
"loss": 2.3905, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 2.177769422531128, |
|
"learning_rate": 2.0333864551835602e-05, |
|
"loss": 2.4703, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 2.499175548553467, |
|
"learning_rate": 2.010408838821866e-05, |
|
"loss": 2.3287, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 3.076934337615967, |
|
"learning_rate": 1.987474150898691e-05, |
|
"loss": 2.3857, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 5.456985950469971, |
|
"learning_rate": 1.9645844023783206e-05, |
|
"loss": 2.3238, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 2.502319574356079, |
|
"learning_rate": 1.941741600284656e-05, |
|
"loss": 2.3027, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 3.1800990104675293, |
|
"learning_rate": 1.918947747525232e-05, |
|
"loss": 2.086, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 3.400146245956421, |
|
"learning_rate": 1.896204842715596e-05, |
|
"loss": 2.5469, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 2.4540586471557617, |
|
"learning_rate": 1.873514880004065e-05, |
|
"loss": 2.2501, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 3.094639778137207, |
|
"learning_rate": 1.8508798488968803e-05, |
|
"loss": 2.3037, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 2.7142815589904785, |
|
"learning_rate": 1.8283017340837517e-05, |
|
"loss": 2.2974, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 2.40388560295105, |
|
"learning_rate": 1.8057825152638478e-05, |
|
"loss": 2.2484, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"eval_loss": 2.2887699604034424, |
|
"eval_runtime": 100.5569, |
|
"eval_samples_per_second": 5.967, |
|
"eval_steps_per_second": 2.983, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 3.5946638584136963, |
|
"learning_rate": 1.7833241669722015e-05, |
|
"loss": 2.2191, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 3.1359758377075195, |
|
"learning_rate": 1.760928658406587e-05, |
|
"loss": 2.4429, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 3.105003833770752, |
|
"learning_rate": 1.738597953254848e-05, |
|
"loss": 2.3241, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 2.3050458431243896, |
|
"learning_rate": 1.716334009522726e-05, |
|
"loss": 2.3608, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 2.2272346019744873, |
|
"learning_rate": 1.6941387793621673e-05, |
|
"loss": 2.3107, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 2.0482161045074463, |
|
"learning_rate": 1.672014208900165e-05, |
|
"loss": 2.1823, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 2.0835390090942383, |
|
"learning_rate": 1.6499622380681096e-05, |
|
"loss": 2.1622, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 3.687225103378296, |
|
"learning_rate": 1.6279848004316972e-05, |
|
"loss": 2.3643, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 3.2139699459075928, |
|
"learning_rate": 1.6060838230213883e-05, |
|
"loss": 2.2241, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 3.513046979904175, |
|
"learning_rate": 1.5842612261634392e-05, |
|
"loss": 2.311, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 2.698282241821289, |
|
"learning_rate": 1.5625189233115282e-05, |
|
"loss": 2.4009, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 2.889256238937378, |
|
"learning_rate": 1.5408588208789733e-05, |
|
"loss": 2.2708, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 2.4953372478485107, |
|
"learning_rate": 1.5192828180715824e-05, |
|
"loss": 2.2726, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 3.368839740753174, |
|
"learning_rate": 1.4977928067211178e-05, |
|
"loss": 2.0851, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 3.3648414611816406, |
|
"learning_rate": 1.4763906711194229e-05, |
|
"loss": 2.11, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 2.7509853839874268, |
|
"learning_rate": 1.4550782878531972e-05, |
|
"loss": 2.3487, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 3.071002721786499, |
|
"learning_rate": 1.4338575256394612e-05, |
|
"loss": 2.2536, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 2.7192609310150146, |
|
"learning_rate": 1.4127302451616936e-05, |
|
"loss": 2.2367, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 5.182852268218994, |
|
"learning_rate": 1.3916982989066915e-05, |
|
"loss": 2.0933, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 2.816575527191162, |
|
"learning_rate": 1.370763531002132e-05, |
|
"loss": 2.4534, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"eval_loss": 2.2867271900177, |
|
"eval_runtime": 98.3879, |
|
"eval_samples_per_second": 6.098, |
|
"eval_steps_per_second": 3.049, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 2.238353967666626, |
|
"learning_rate": 1.3499277770548823e-05, |
|
"loss": 2.3927, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 3.0616304874420166, |
|
"learning_rate": 1.3291928639900436e-05, |
|
"loss": 2.3978, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 3.807537317276001, |
|
"learning_rate": 1.3085606098907682e-05, |
|
"loss": 2.1303, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 3.6472954750061035, |
|
"learning_rate": 1.2880328238388393e-05, |
|
"loss": 2.3277, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 3.634000301361084, |
|
"learning_rate": 1.2676113057560515e-05, |
|
"loss": 2.358, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 5.468724727630615, |
|
"learning_rate": 1.2472978462463874e-05, |
|
"loss": 2.4583, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 3.201179265975952, |
|
"learning_rate": 1.2270942264390174e-05, |
|
"loss": 2.2543, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 2.4082183837890625, |
|
"learning_rate": 1.2070022178321186e-05, |
|
"loss": 2.2401, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 3.068176031112671, |
|
"learning_rate": 1.1870235821375553e-05, |
|
"loss": 2.3446, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 2.7859139442443848, |
|
"learning_rate": 1.1671600711263991e-05, |
|
"loss": 2.3761, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 2.423513650894165, |
|
"learning_rate": 1.1474134264753384e-05, |
|
"loss": 2.2563, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 3.0830302238464355, |
|
"learning_rate": 1.1277853796139554e-05, |
|
"loss": 2.2455, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 3.237128734588623, |
|
"learning_rate": 1.1082776515729201e-05, |
|
"loss": 2.3861, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 2.9493908882141113, |
|
"learning_rate": 1.0888919528330777e-05, |
|
"loss": 2.0657, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 3.7209978103637695, |
|
"learning_rate": 1.0696299831754753e-05, |
|
"loss": 2.4492, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 2.332671880722046, |
|
"learning_rate": 1.0504934315323181e-05, |
|
"loss": 2.3108, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 3.0048203468322754, |
|
"learning_rate": 1.0314839758388859e-05, |
|
"loss": 2.5104, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 3.380918502807617, |
|
"learning_rate": 1.0126032828863982e-05, |
|
"loss": 2.3024, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 2.693096876144409, |
|
"learning_rate": 9.938530081758764e-06, |
|
"loss": 2.3422, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 2.64311146736145, |
|
"learning_rate": 9.752347957729804e-06, |
|
"loss": 2.3934, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"eval_loss": 2.2875092029571533, |
|
"eval_runtime": 98.3803, |
|
"eval_samples_per_second": 6.099, |
|
"eval_steps_per_second": 3.049, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 2.693216562271118, |
|
"learning_rate": 9.567502781638516e-06, |
|
"loss": 2.3249, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 2.68764066696167, |
|
"learning_rate": 9.384010761119787e-06, |
|
"loss": 2.2552, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 4.221546649932861, |
|
"learning_rate": 9.201887985160804e-06, |
|
"loss": 2.3362, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 2.777925729751587, |
|
"learning_rate": 9.039161391719244e-06, |
|
"loss": 2.3256, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 2.7611911296844482, |
|
"learning_rate": 8.859684074465835e-06, |
|
"loss": 2.2209, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 2.7354393005371094, |
|
"learning_rate": 8.681621975898577e-06, |
|
"loss": 2.1957, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 2.901160478591919, |
|
"learning_rate": 8.504990708897056e-06, |
|
"loss": 2.2935, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 2.4266469478607178, |
|
"learning_rate": 8.329805760882403e-06, |
|
"loss": 2.307, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 2.3984947204589844, |
|
"learning_rate": 8.156082492459257e-06, |
|
"loss": 2.3943, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 2.6705055236816406, |
|
"learning_rate": 7.983836136068984e-06, |
|
"loss": 2.3774, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 3.173973321914673, |
|
"learning_rate": 7.813081794653995e-06, |
|
"loss": 2.2757, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 3.104217529296875, |
|
"learning_rate": 7.643834440333553e-06, |
|
"loss": 2.2961, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 3.088330030441284, |
|
"learning_rate": 7.476108913090915e-06, |
|
"loss": 2.2001, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 3.717886447906494, |
|
"learning_rate": 7.309919919472208e-06, |
|
"loss": 2.1859, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 3.12146258354187, |
|
"learning_rate": 7.145282031296841e-06, |
|
"loss": 2.2422, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 3.212069272994995, |
|
"learning_rate": 6.982209684379892e-06, |
|
"loss": 2.1191, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 3.0071327686309814, |
|
"learning_rate": 6.8207171772662976e-06, |
|
"loss": 2.1472, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 3.0386757850646973, |
|
"learning_rate": 6.660818669977134e-06, |
|
"loss": 2.3547, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 2.2448551654815674, |
|
"learning_rate": 6.5025281827680335e-06, |
|
"loss": 2.2866, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 3.914092779159546, |
|
"learning_rate": 6.345859594899886e-06, |
|
"loss": 2.3713, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"eval_loss": 2.2858927249908447, |
|
"eval_runtime": 98.5519, |
|
"eval_samples_per_second": 6.088, |
|
"eval_steps_per_second": 3.044, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 2.350407600402832, |
|
"learning_rate": 6.1908266434218235e-06, |
|
"loss": 2.2876, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 2.7468740940093994, |
|
"learning_rate": 6.037442921966771e-06, |
|
"loss": 2.3253, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 3.026620626449585, |
|
"learning_rate": 5.885721879559514e-06, |
|
"loss": 2.2033, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 2.2398440837860107, |
|
"learning_rate": 5.735676819437425e-06, |
|
"loss": 2.317, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 2.4124555587768555, |
|
"learning_rate": 5.587320897884066e-06, |
|
"loss": 2.284, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 2.4340391159057617, |
|
"learning_rate": 5.440667123075558e-06, |
|
"loss": 2.3012, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 2.7486612796783447, |
|
"learning_rate": 5.295728353940038e-06, |
|
"loss": 2.5206, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 2.5270161628723145, |
|
"learning_rate": 5.152517299030127e-06, |
|
"loss": 2.5541, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 3.589616298675537, |
|
"learning_rate": 5.011046515408657e-06, |
|
"loss": 2.3475, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 2.27154541015625, |
|
"learning_rate": 4.871328407547587e-06, |
|
"loss": 2.4889, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 2.9212470054626465, |
|
"learning_rate": 4.733375226240408e-06, |
|
"loss": 2.4318, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 3.5055928230285645, |
|
"learning_rate": 4.597199067527907e-06, |
|
"loss": 2.1628, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 2.520705223083496, |
|
"learning_rate": 4.462811871637618e-06, |
|
"loss": 2.0723, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 3.2816295623779297, |
|
"learning_rate": 4.330225421936823e-06, |
|
"loss": 2.2386, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 3.64699649810791, |
|
"learning_rate": 4.1994513438994156e-06, |
|
"loss": 2.1135, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 3.278775930404663, |
|
"learning_rate": 4.070501104086488e-06, |
|
"loss": 2.2199, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"grad_norm": 1.8690662384033203, |
|
"learning_rate": 3.943386009140984e-06, |
|
"loss": 2.2364, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 3.0856454372406006, |
|
"learning_rate": 3.818117204796262e-06, |
|
"loss": 2.0439, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"grad_norm": 3.822516679763794, |
|
"learning_rate": 3.694705674898827e-06, |
|
"loss": 2.2703, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 5.145390510559082, |
|
"learning_rate": 3.573162240445238e-06, |
|
"loss": 2.3365, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"eval_loss": 2.2857470512390137, |
|
"eval_runtime": 98.4481, |
|
"eval_samples_per_second": 6.095, |
|
"eval_steps_per_second": 3.047, |
|
"step": 2800 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3375, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 200, |
|
"total_flos": 7.882780555954094e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|