|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 9480, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010548523206751054, |
|
"grad_norm": 1.267115831375122, |
|
"learning_rate": 0.00015822784810126583, |
|
"loss": 7.4878, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.002109704641350211, |
|
"grad_norm": 1.086830735206604, |
|
"learning_rate": 0.00031645569620253165, |
|
"loss": 6.818, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0031645569620253164, |
|
"grad_norm": 0.8537070751190186, |
|
"learning_rate": 0.00047468354430379745, |
|
"loss": 6.1887, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.004219409282700422, |
|
"grad_norm": 1.0403074026107788, |
|
"learning_rate": 0.0006329113924050633, |
|
"loss": 5.6339, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.005274261603375527, |
|
"grad_norm": 0.630695641040802, |
|
"learning_rate": 0.0007911392405063291, |
|
"loss": 5.1053, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.006329113924050633, |
|
"grad_norm": 0.9471701383590698, |
|
"learning_rate": 0.0009493670886075949, |
|
"loss": 4.6251, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.007383966244725738, |
|
"grad_norm": 0.6331472396850586, |
|
"learning_rate": 0.0011075949367088608, |
|
"loss": 4.2957, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.008438818565400843, |
|
"grad_norm": 1.1874464750289917, |
|
"learning_rate": 0.0012658227848101266, |
|
"loss": 4.0742, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00949367088607595, |
|
"grad_norm": 1.1561050415039062, |
|
"learning_rate": 0.0014240506329113926, |
|
"loss": 3.8899, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.010548523206751054, |
|
"grad_norm": 0.7020412087440491, |
|
"learning_rate": 0.0015, |
|
"loss": 3.748, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.011603375527426161, |
|
"grad_norm": 1.058180570602417, |
|
"learning_rate": 0.0015, |
|
"loss": 3.5931, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.012658227848101266, |
|
"grad_norm": 0.8881299495697021, |
|
"learning_rate": 0.0015, |
|
"loss": 3.4895, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.013713080168776372, |
|
"grad_norm": 0.7307445406913757, |
|
"learning_rate": 0.0015, |
|
"loss": 3.3785, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.014767932489451477, |
|
"grad_norm": 0.9191378355026245, |
|
"learning_rate": 0.0015, |
|
"loss": 3.2912, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.015822784810126583, |
|
"grad_norm": 0.7868037819862366, |
|
"learning_rate": 0.0015, |
|
"loss": 3.2078, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.016877637130801686, |
|
"grad_norm": 1.0002555847167969, |
|
"learning_rate": 0.0015, |
|
"loss": 3.1415, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.017932489451476793, |
|
"grad_norm": 0.811924934387207, |
|
"learning_rate": 0.0015, |
|
"loss": 3.0762, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0189873417721519, |
|
"grad_norm": 0.707933783531189, |
|
"learning_rate": 0.0015, |
|
"loss": 3.0415, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.020042194092827006, |
|
"grad_norm": 0.7100071907043457, |
|
"learning_rate": 0.0015, |
|
"loss": 2.9773, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.02109704641350211, |
|
"grad_norm": 0.7788935303688049, |
|
"learning_rate": 0.0015, |
|
"loss": 2.9367, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.022151898734177215, |
|
"grad_norm": 0.7069195508956909, |
|
"learning_rate": 0.0015, |
|
"loss": 2.8957, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.023206751054852322, |
|
"grad_norm": 0.9104593396186829, |
|
"learning_rate": 0.0015, |
|
"loss": 2.8617, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.024261603375527425, |
|
"grad_norm": 0.830557644367218, |
|
"learning_rate": 0.0015, |
|
"loss": 2.8114, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.02531645569620253, |
|
"grad_norm": 0.8437513709068298, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7782, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.026371308016877638, |
|
"grad_norm": 0.9658205509185791, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7484, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.027426160337552744, |
|
"grad_norm": 0.8535057902336121, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7181, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.028481012658227847, |
|
"grad_norm": 1.0181001424789429, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6857, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.029535864978902954, |
|
"grad_norm": 0.6967226266860962, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6492, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.03059071729957806, |
|
"grad_norm": 1.1211285591125488, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6307, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.03164556962025317, |
|
"grad_norm": 0.9659064412117004, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6054, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.03270042194092827, |
|
"grad_norm": 0.737605094909668, |
|
"learning_rate": 0.0015, |
|
"loss": 2.582, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03375527426160337, |
|
"grad_norm": 1.2234512567520142, |
|
"learning_rate": 0.0015, |
|
"loss": 2.551, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.03481012658227848, |
|
"grad_norm": 0.8145862221717834, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5459, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.035864978902953586, |
|
"grad_norm": 1.0696101188659668, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5195, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.03691983122362869, |
|
"grad_norm": 0.8013420701026917, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4817, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.0379746835443038, |
|
"grad_norm": 0.9070076942443848, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4644, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.039029535864978905, |
|
"grad_norm": 0.8164359927177429, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4497, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.04008438818565401, |
|
"grad_norm": 1.052100419998169, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4406, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.04113924050632911, |
|
"grad_norm": 1.006508231163025, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4185, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.04219409282700422, |
|
"grad_norm": 0.6804226636886597, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3939, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.043248945147679324, |
|
"grad_norm": 1.1502838134765625, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3794, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.04430379746835443, |
|
"grad_norm": 0.8598443865776062, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3654, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.04535864978902954, |
|
"grad_norm": 0.9380781650543213, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3439, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.046413502109704644, |
|
"grad_norm": 0.9013979434967041, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3224, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.04746835443037975, |
|
"grad_norm": 0.9391945600509644, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3232, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.04852320675105485, |
|
"grad_norm": 0.9694176316261292, |
|
"learning_rate": 0.0015, |
|
"loss": 2.305, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.049578059071729956, |
|
"grad_norm": 1.1152819395065308, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2843, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.05063291139240506, |
|
"grad_norm": 0.8284817337989807, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2773, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.05168776371308017, |
|
"grad_norm": 0.8194335699081421, |
|
"learning_rate": 0.0015, |
|
"loss": 2.252, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.052742616033755275, |
|
"grad_norm": 1.0222529172897339, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2524, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.05379746835443038, |
|
"grad_norm": 0.7683922648429871, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2369, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.05485232067510549, |
|
"grad_norm": 0.917959988117218, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2205, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.05590717299578059, |
|
"grad_norm": 1.0505857467651367, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2095, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.056962025316455694, |
|
"grad_norm": 0.8172610998153687, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1846, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.0580168776371308, |
|
"grad_norm": 0.8242407441139221, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1885, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.05907172995780591, |
|
"grad_norm": 0.74329674243927, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1555, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.060126582278481014, |
|
"grad_norm": 0.7791258692741394, |
|
"learning_rate": 0.0015, |
|
"loss": 2.159, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.06118143459915612, |
|
"grad_norm": 1.2360996007919312, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1631, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.06223628691983123, |
|
"grad_norm": 0.662500262260437, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1354, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.06329113924050633, |
|
"grad_norm": 1.267376184463501, |
|
"learning_rate": 0.0015, |
|
"loss": 2.117, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.06434599156118144, |
|
"grad_norm": 0.7871881127357483, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1222, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.06540084388185655, |
|
"grad_norm": 0.6757326722145081, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1043, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.06645569620253164, |
|
"grad_norm": 0.6942896842956543, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0898, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.06751054852320675, |
|
"grad_norm": 0.9564072489738464, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1062, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.06856540084388185, |
|
"grad_norm": 0.9712061285972595, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0942, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.06962025316455696, |
|
"grad_norm": 1.0676331520080566, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0844, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.07067510548523206, |
|
"grad_norm": 0.7057194709777832, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0658, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.07172995780590717, |
|
"grad_norm": 0.7662429809570312, |
|
"learning_rate": 0.0015, |
|
"loss": 2.054, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.07278481012658228, |
|
"grad_norm": 0.8145813941955566, |
|
"learning_rate": 0.0015, |
|
"loss": 2.064, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.07383966244725738, |
|
"grad_norm": 0.8595874309539795, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0488, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.07489451476793249, |
|
"grad_norm": 0.7613713145256042, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0349, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.0759493670886076, |
|
"grad_norm": 0.9556564092636108, |
|
"learning_rate": 0.0015, |
|
"loss": 2.017, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.0770042194092827, |
|
"grad_norm": 0.8757444620132446, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0155, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.07805907172995781, |
|
"grad_norm": 0.8322359919548035, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0262, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.07911392405063292, |
|
"grad_norm": 0.8489624261856079, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0044, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.08016877637130802, |
|
"grad_norm": 0.9894970059394836, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0049, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.08122362869198312, |
|
"grad_norm": 0.7704083919525146, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9892, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.08227848101265822, |
|
"grad_norm": 0.8131700754165649, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9873, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 0.6630898118019104, |
|
"learning_rate": 0.0015, |
|
"loss": 1.993, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.08438818565400844, |
|
"grad_norm": 1.0395071506500244, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9718, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.08544303797468354, |
|
"grad_norm": 0.8773938417434692, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9574, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.08649789029535865, |
|
"grad_norm": 0.9530850648880005, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9641, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.08755274261603375, |
|
"grad_norm": 0.7528932094573975, |
|
"learning_rate": 0.0015, |
|
"loss": 1.967, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.08860759493670886, |
|
"grad_norm": 0.7244347333908081, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9426, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.08966244725738397, |
|
"grad_norm": 0.8388285040855408, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9452, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.09071729957805907, |
|
"grad_norm": 0.6546905636787415, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9389, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.09177215189873418, |
|
"grad_norm": 0.7514728903770447, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9398, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.09282700421940929, |
|
"grad_norm": 0.8813492655754089, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9308, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.0938818565400844, |
|
"grad_norm": 0.7346323132514954, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9256, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.0949367088607595, |
|
"grad_norm": 1.3726916313171387, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9269, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.09599156118143459, |
|
"grad_norm": 0.7512115240097046, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9277, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.0970464135021097, |
|
"grad_norm": 0.6616615056991577, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9149, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.0981012658227848, |
|
"grad_norm": 0.7120899558067322, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8987, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.09915611814345991, |
|
"grad_norm": 0.7128753066062927, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9079, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.10021097046413502, |
|
"grad_norm": 1.0504094362258911, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8872, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 0.6815115213394165, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8965, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.10232067510548523, |
|
"grad_norm": 1.2939375638961792, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8964, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.10337552742616034, |
|
"grad_norm": 0.7509056329727173, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8753, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.10443037974683544, |
|
"grad_norm": 0.7060526013374329, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8772, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.10548523206751055, |
|
"grad_norm": 0.7411452531814575, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8865, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.10654008438818566, |
|
"grad_norm": 0.71030592918396, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8803, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.10759493670886076, |
|
"grad_norm": 0.8691297769546509, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8729, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.10864978902953587, |
|
"grad_norm": 0.8365240693092346, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8733, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.10970464135021098, |
|
"grad_norm": 1.012802243232727, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8601, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.11075949367088607, |
|
"grad_norm": 0.7322136759757996, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8612, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.11181434599156118, |
|
"grad_norm": 0.6655473709106445, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8496, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.11286919831223628, |
|
"grad_norm": 0.642715573310852, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8397, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.11392405063291139, |
|
"grad_norm": 0.7579887509346008, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8407, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.1149789029535865, |
|
"grad_norm": 1.0999778509140015, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8539, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.1160337552742616, |
|
"grad_norm": 0.7269627451896667, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8403, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.11708860759493671, |
|
"grad_norm": 0.6931649446487427, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8427, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.11814345991561181, |
|
"grad_norm": 0.8667834997177124, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8355, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.11919831223628692, |
|
"grad_norm": 0.7619576454162598, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8222, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.12025316455696203, |
|
"grad_norm": 0.7043116092681885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8362, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.12130801687763713, |
|
"grad_norm": 0.983506977558136, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8228, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.12236286919831224, |
|
"grad_norm": 0.7836828231811523, |
|
"learning_rate": 0.0015, |
|
"loss": 1.832, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.12341772151898735, |
|
"grad_norm": 0.6655563116073608, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8178, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.12447257383966245, |
|
"grad_norm": 0.8217303156852722, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8054, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.12552742616033755, |
|
"grad_norm": 0.7065832614898682, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8076, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.12658227848101267, |
|
"grad_norm": 1.3760422468185425, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8235, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.12763713080168776, |
|
"grad_norm": 0.8185279369354248, |
|
"learning_rate": 0.0015, |
|
"loss": 1.822, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.12869198312236288, |
|
"grad_norm": 0.727121114730835, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7928, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.12974683544303797, |
|
"grad_norm": 0.7874573469161987, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7883, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.1308016877637131, |
|
"grad_norm": 0.6458255052566528, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8058, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.13185654008438819, |
|
"grad_norm": 0.7022703886032104, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8046, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.13291139240506328, |
|
"grad_norm": 1.2606161832809448, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7912, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.1339662447257384, |
|
"grad_norm": 0.6535800695419312, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7835, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.1350210970464135, |
|
"grad_norm": 0.6609776020050049, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7984, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.1360759493670886, |
|
"grad_norm": 0.7237253189086914, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7837, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.1371308016877637, |
|
"grad_norm": 0.6983092427253723, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7736, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.13818565400843882, |
|
"grad_norm": 0.7950878739356995, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7806, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.13924050632911392, |
|
"grad_norm": 0.7377669811248779, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7784, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.14029535864978904, |
|
"grad_norm": 1.0312517881393433, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7716, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.14135021097046413, |
|
"grad_norm": 0.7297598719596863, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7748, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.14240506329113925, |
|
"grad_norm": 0.7107803821563721, |
|
"learning_rate": 0.0015, |
|
"loss": 1.777, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.14345991561181434, |
|
"grad_norm": 0.6729479432106018, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7673, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.14451476793248946, |
|
"grad_norm": 0.6634864807128906, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7648, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.14556962025316456, |
|
"grad_norm": 0.6926726698875427, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7565, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.14662447257383968, |
|
"grad_norm": 0.8461670279502869, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7643, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.14767932489451477, |
|
"grad_norm": 0.9065084457397461, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7484, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.14873417721518986, |
|
"grad_norm": 0.68721604347229, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7508, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.14978902953586498, |
|
"grad_norm": 0.7580296993255615, |
|
"learning_rate": 0.0015, |
|
"loss": 1.758, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.15084388185654007, |
|
"grad_norm": 0.7169002294540405, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7543, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.1518987341772152, |
|
"grad_norm": 0.673450231552124, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7479, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.1529535864978903, |
|
"grad_norm": 0.8440428376197815, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7478, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.1540084388185654, |
|
"grad_norm": 0.6620081663131714, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7406, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.1550632911392405, |
|
"grad_norm": 0.8996078968048096, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7384, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.15611814345991562, |
|
"grad_norm": 0.7992554306983948, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7437, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.1571729957805907, |
|
"grad_norm": 0.6644994616508484, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7402, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.15822784810126583, |
|
"grad_norm": 0.6620984077453613, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7411, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.15928270042194093, |
|
"grad_norm": 0.7442142367362976, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7373, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.16033755274261605, |
|
"grad_norm": 0.728027880191803, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7397, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.16139240506329114, |
|
"grad_norm": 0.680416464805603, |
|
"learning_rate": 0.0015, |
|
"loss": 1.721, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.16244725738396623, |
|
"grad_norm": 0.6266154646873474, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7126, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.16350210970464135, |
|
"grad_norm": 0.6954091191291809, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7163, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.16455696202531644, |
|
"grad_norm": 0.6842447519302368, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7154, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.16561181434599156, |
|
"grad_norm": 0.6557826995849609, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7081, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.7723187804222107, |
|
"learning_rate": 0.0015, |
|
"loss": 1.721, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.16772151898734178, |
|
"grad_norm": 0.6312679052352905, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7185, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.16877637130801687, |
|
"grad_norm": 0.7498424053192139, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7233, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.169831223628692, |
|
"grad_norm": 0.6825190782546997, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7168, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.17088607594936708, |
|
"grad_norm": 0.6334119439125061, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6864, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.1719409282700422, |
|
"grad_norm": 0.6661301851272583, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6991, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.1729957805907173, |
|
"grad_norm": 0.7066164016723633, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7071, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.17405063291139242, |
|
"grad_norm": 1.2051690816879272, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6975, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.1751054852320675, |
|
"grad_norm": 0.6701253056526184, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6953, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.17616033755274263, |
|
"grad_norm": 0.7021659016609192, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7067, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.17721518987341772, |
|
"grad_norm": 0.9706708192825317, |
|
"learning_rate": 0.0015, |
|
"loss": 1.693, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.17827004219409281, |
|
"grad_norm": 0.7173320055007935, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6879, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.17932489451476794, |
|
"grad_norm": 0.661887526512146, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7034, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.18037974683544303, |
|
"grad_norm": 0.9063208699226379, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6858, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.18143459915611815, |
|
"grad_norm": 0.7057472467422485, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6827, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.18248945147679324, |
|
"grad_norm": 0.6843087077140808, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6858, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.18354430379746836, |
|
"grad_norm": 0.8571721911430359, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6897, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.18459915611814345, |
|
"grad_norm": 0.632314920425415, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7077, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.18565400843881857, |
|
"grad_norm": 0.6517012119293213, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6838, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.18670886075949367, |
|
"grad_norm": 0.6460869908332825, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6888, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.1877637130801688, |
|
"grad_norm": 0.746279776096344, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6894, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.18881856540084388, |
|
"grad_norm": 1.0219115018844604, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6931, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.189873417721519, |
|
"grad_norm": 1.011742115020752, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7003, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.1909282700421941, |
|
"grad_norm": 0.6796727180480957, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6699, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.19198312236286919, |
|
"grad_norm": 0.6485866904258728, |
|
"learning_rate": 0.0015, |
|
"loss": 1.665, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.1930379746835443, |
|
"grad_norm": 0.6299443244934082, |
|
"learning_rate": 0.0015, |
|
"loss": 1.667, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.1940928270042194, |
|
"grad_norm": 0.707040011882782, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6646, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.19514767932489452, |
|
"grad_norm": 0.6451976895332336, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6718, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.1962025316455696, |
|
"grad_norm": 0.6749624609947205, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6782, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.19725738396624473, |
|
"grad_norm": 0.6485472917556763, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6741, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.19831223628691982, |
|
"grad_norm": 0.6271134614944458, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6642, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.19936708860759494, |
|
"grad_norm": 0.6472867131233215, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6682, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.20042194092827004, |
|
"grad_norm": 1.1727449893951416, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6693, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.20147679324894516, |
|
"grad_norm": 1.0739372968673706, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6699, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 0.6262565851211548, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6639, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.20358649789029537, |
|
"grad_norm": 0.9380462765693665, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6629, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.20464135021097046, |
|
"grad_norm": 0.6129502058029175, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6537, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.20569620253164558, |
|
"grad_norm": 0.610660970211029, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6551, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.20675105485232068, |
|
"grad_norm": 0.7108441591262817, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6571, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.20780590717299577, |
|
"grad_norm": 0.61849445104599, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6689, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.2088607594936709, |
|
"grad_norm": 0.6824235916137695, |
|
"learning_rate": 0.0015, |
|
"loss": 1.65, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.20991561181434598, |
|
"grad_norm": 0.7959401607513428, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6474, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.2109704641350211, |
|
"grad_norm": 0.7023043632507324, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6458, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.2120253164556962, |
|
"grad_norm": 0.9508693218231201, |
|
"learning_rate": 0.0015, |
|
"loss": 1.655, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.21308016877637131, |
|
"grad_norm": 0.610762357711792, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6665, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.2141350210970464, |
|
"grad_norm": 0.6003895998001099, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6529, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.21518987341772153, |
|
"grad_norm": 0.6540138125419617, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6514, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.21624472573839662, |
|
"grad_norm": 0.6738853454589844, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6475, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.21729957805907174, |
|
"grad_norm": 0.6962546706199646, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6416, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.21835443037974683, |
|
"grad_norm": 0.6957595944404602, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6443, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.21940928270042195, |
|
"grad_norm": 0.7290619015693665, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6469, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.22046413502109705, |
|
"grad_norm": 0.6296234726905823, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6384, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.22151898734177214, |
|
"grad_norm": 0.5824480652809143, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6441, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.22257383966244726, |
|
"grad_norm": 0.975032389163971, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6376, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.22362869198312235, |
|
"grad_norm": 0.5698814988136292, |
|
"learning_rate": 0.0015, |
|
"loss": 1.633, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.22468354430379747, |
|
"grad_norm": 0.6816406846046448, |
|
"learning_rate": 0.0015, |
|
"loss": 1.639, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.22573839662447256, |
|
"grad_norm": 0.6882089972496033, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6291, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.22679324894514769, |
|
"grad_norm": 0.7746725082397461, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6364, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.22784810126582278, |
|
"grad_norm": 1.1132783889770508, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6518, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.2289029535864979, |
|
"grad_norm": 0.6515901684761047, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6343, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.229957805907173, |
|
"grad_norm": 0.6279417276382446, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6195, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.2310126582278481, |
|
"grad_norm": 0.6505322456359863, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6359, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.2320675105485232, |
|
"grad_norm": 0.6479453444480896, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6348, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.23312236286919832, |
|
"grad_norm": 0.6634397506713867, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6199, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.23417721518987342, |
|
"grad_norm": 0.7231172323226929, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6334, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.23523206751054854, |
|
"grad_norm": 0.9623863697052002, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6273, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.23628691983122363, |
|
"grad_norm": 0.6702282428741455, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6269, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.23734177215189872, |
|
"grad_norm": 0.7227782607078552, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6222, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.23839662447257384, |
|
"grad_norm": 0.6325075626373291, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6106, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.23945147679324894, |
|
"grad_norm": 0.7028928399085999, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6446, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.24050632911392406, |
|
"grad_norm": 0.8957093358039856, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6307, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.24156118143459915, |
|
"grad_norm": 1.2684556245803833, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6194, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.24261603375527427, |
|
"grad_norm": 0.6649700403213501, |
|
"learning_rate": 0.0015, |
|
"loss": 1.615, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.24367088607594936, |
|
"grad_norm": 0.6177629828453064, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6164, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.24472573839662448, |
|
"grad_norm": 0.62782222032547, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6224, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.24578059071729957, |
|
"grad_norm": 0.5993375778198242, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6071, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.2468354430379747, |
|
"grad_norm": 0.7031516432762146, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6027, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.2478902953586498, |
|
"grad_norm": 0.7956550717353821, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6064, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.2489451476793249, |
|
"grad_norm": 0.5608900189399719, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6189, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.5729573369026184, |
|
"learning_rate": 0.0015, |
|
"loss": 1.613, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.2510548523206751, |
|
"grad_norm": 0.6592503190040588, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6106, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.2521097046413502, |
|
"grad_norm": 0.6544248461723328, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6119, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"grad_norm": 0.6607205867767334, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6101, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.2542194092827004, |
|
"grad_norm": 0.6016968488693237, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6105, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.2552742616033755, |
|
"grad_norm": 0.7552834749221802, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6079, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.2563291139240506, |
|
"grad_norm": 0.6352930068969727, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6039, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.25738396624472576, |
|
"grad_norm": 0.7600632905960083, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6145, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.25843881856540085, |
|
"grad_norm": 0.7010210156440735, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6019, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.25949367088607594, |
|
"grad_norm": 0.5926560759544373, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6154, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.26054852320675104, |
|
"grad_norm": 0.9674777388572693, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6056, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.2616033755274262, |
|
"grad_norm": 0.6830757260322571, |
|
"learning_rate": 0.0015, |
|
"loss": 1.611, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.2626582278481013, |
|
"grad_norm": 0.7245091795921326, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5945, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.26371308016877637, |
|
"grad_norm": 0.6287095546722412, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5999, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.26476793248945146, |
|
"grad_norm": 0.5811812281608582, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5997, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.26582278481012656, |
|
"grad_norm": 0.8051828742027283, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6027, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.2668776371308017, |
|
"grad_norm": 0.690095841884613, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5991, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.2679324894514768, |
|
"grad_norm": 0.6827349066734314, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5996, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.2689873417721519, |
|
"grad_norm": 0.5629339218139648, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5949, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.270042194092827, |
|
"grad_norm": 0.8621921539306641, |
|
"learning_rate": 0.0015, |
|
"loss": 1.604, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.27109704641350213, |
|
"grad_norm": 0.6353599429130554, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5982, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.2721518987341772, |
|
"grad_norm": 0.5872393846511841, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5955, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.2732067510548523, |
|
"grad_norm": 0.5922430753707886, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6028, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.2742616033755274, |
|
"grad_norm": 0.615271270275116, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6015, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.27531645569620256, |
|
"grad_norm": 0.6027426719665527, |
|
"learning_rate": 0.0015, |
|
"loss": 1.599, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.27637130801687765, |
|
"grad_norm": 0.6611328721046448, |
|
"learning_rate": 0.0015, |
|
"loss": 1.598, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.27742616033755274, |
|
"grad_norm": 0.6782243847846985, |
|
"learning_rate": 0.0015, |
|
"loss": 1.586, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.27848101265822783, |
|
"grad_norm": 0.6558618545532227, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5986, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.2795358649789029, |
|
"grad_norm": 0.6752070188522339, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5919, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.2805907172995781, |
|
"grad_norm": 0.7462228536605835, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5855, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.28164556962025317, |
|
"grad_norm": 0.6451160907745361, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5828, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.28270042194092826, |
|
"grad_norm": 0.5997004508972168, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5869, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.28375527426160335, |
|
"grad_norm": 0.666940450668335, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5865, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.2848101265822785, |
|
"grad_norm": 0.6081534624099731, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5816, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.2858649789029536, |
|
"grad_norm": 0.6612553000450134, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5899, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.2869198312236287, |
|
"grad_norm": 0.6863659620285034, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5876, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.2879746835443038, |
|
"grad_norm": 0.7044730186462402, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5678, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.2890295358649789, |
|
"grad_norm": 0.9346474409103394, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5832, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.290084388185654, |
|
"grad_norm": 0.7210971117019653, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5848, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.2911392405063291, |
|
"grad_norm": 0.8036166429519653, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5939, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.2921940928270042, |
|
"grad_norm": 0.5704184174537659, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5877, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.29324894514767935, |
|
"grad_norm": 0.6624103784561157, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5855, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.29430379746835444, |
|
"grad_norm": 0.6007447838783264, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5836, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.29535864978902954, |
|
"grad_norm": 0.5330153703689575, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5809, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.29641350210970463, |
|
"grad_norm": 0.6317871809005737, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5817, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.2974683544303797, |
|
"grad_norm": 0.6370088458061218, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5856, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.29852320675105487, |
|
"grad_norm": 1.0981173515319824, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5637, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.29957805907172996, |
|
"grad_norm": 0.8902475833892822, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5675, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.30063291139240506, |
|
"grad_norm": 0.6024956107139587, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5698, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.30168776371308015, |
|
"grad_norm": 0.5766766667366028, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5725, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.3027426160337553, |
|
"grad_norm": 0.7689202427864075, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5739, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 0.7470451593399048, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5627, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.3048523206751055, |
|
"grad_norm": 0.6598032116889954, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5685, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.3059071729957806, |
|
"grad_norm": 0.6697877645492554, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5751, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.3069620253164557, |
|
"grad_norm": 0.7237564921379089, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5744, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.3080168776371308, |
|
"grad_norm": 0.5626104474067688, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5679, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.3090717299578059, |
|
"grad_norm": 0.6248372197151184, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5649, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.310126582278481, |
|
"grad_norm": 0.5619105696678162, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5715, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.3111814345991561, |
|
"grad_norm": 0.8324016332626343, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5809, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.31223628691983124, |
|
"grad_norm": 0.6818839907646179, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5626, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.31329113924050633, |
|
"grad_norm": 0.8424079418182373, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5555, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.3143459915611814, |
|
"grad_norm": 0.5838778614997864, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5597, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.3154008438818565, |
|
"grad_norm": 0.7394194006919861, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5782, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.31645569620253167, |
|
"grad_norm": 0.5810507535934448, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5541, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.31751054852320676, |
|
"grad_norm": 0.6484794616699219, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5625, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.31856540084388185, |
|
"grad_norm": 0.895905077457428, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5671, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.31962025316455694, |
|
"grad_norm": 0.6598799228668213, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5531, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.3206751054852321, |
|
"grad_norm": 0.635413408279419, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5617, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.3217299578059072, |
|
"grad_norm": 0.5888679027557373, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5576, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.3227848101265823, |
|
"grad_norm": 0.5969088077545166, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5694, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.32383966244725737, |
|
"grad_norm": 0.5850529074668884, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5567, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.32489451476793246, |
|
"grad_norm": 0.6805583238601685, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5465, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.3259493670886076, |
|
"grad_norm": 0.5836696624755859, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5649, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.3270042194092827, |
|
"grad_norm": 0.6702815294265747, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5541, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.3280590717299578, |
|
"grad_norm": 0.6411579847335815, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5566, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.3291139240506329, |
|
"grad_norm": 0.5650330781936646, |
|
"learning_rate": 0.0015, |
|
"loss": 1.558, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.33016877637130804, |
|
"grad_norm": 0.642634928226471, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5713, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.33122362869198313, |
|
"grad_norm": 1.084348201751709, |
|
"learning_rate": 0.0015, |
|
"loss": 1.547, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.3322784810126582, |
|
"grad_norm": 0.5517160892486572, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5552, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.6044959425926208, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5557, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.33438818565400846, |
|
"grad_norm": 0.5960533618927002, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5559, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.33544303797468356, |
|
"grad_norm": 0.6424879431724548, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5533, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.33649789029535865, |
|
"grad_norm": 0.6925928592681885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5531, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.33755274261603374, |
|
"grad_norm": 0.8220000863075256, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5488, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.33860759493670883, |
|
"grad_norm": 0.7931057214736938, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5621, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.339662447257384, |
|
"grad_norm": 0.6466951966285706, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5526, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.3407172995780591, |
|
"grad_norm": 0.6298534274101257, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5468, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.34177215189873417, |
|
"grad_norm": 0.7880116105079651, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5271, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.34282700421940926, |
|
"grad_norm": 0.7439488172531128, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5562, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.3438818565400844, |
|
"grad_norm": 0.5762472748756409, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5411, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.3449367088607595, |
|
"grad_norm": 0.6992225646972656, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5588, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.3459915611814346, |
|
"grad_norm": 0.6673420071601868, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5588, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.3470464135021097, |
|
"grad_norm": 0.6831956505775452, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5426, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.34810126582278483, |
|
"grad_norm": 0.5462791919708252, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5504, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.3491561181434599, |
|
"grad_norm": 0.6818351149559021, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5506, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.350210970464135, |
|
"grad_norm": 0.8001487851142883, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5527, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.3512658227848101, |
|
"grad_norm": 0.5488734245300293, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5443, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.35232067510548526, |
|
"grad_norm": 0.7689006924629211, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5471, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.35337552742616035, |
|
"grad_norm": 0.6005131602287292, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5421, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.35443037974683544, |
|
"grad_norm": 0.6102413535118103, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5415, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.35548523206751054, |
|
"grad_norm": 0.7920178174972534, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5482, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.35654008438818563, |
|
"grad_norm": 0.6190624833106995, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5438, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.3575949367088608, |
|
"grad_norm": 0.6588506698608398, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5379, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.35864978902953587, |
|
"grad_norm": 0.5450959205627441, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5435, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.35970464135021096, |
|
"grad_norm": 0.568746030330658, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5418, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.36075949367088606, |
|
"grad_norm": 0.6272512674331665, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5255, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.3618143459915612, |
|
"grad_norm": 0.5547628402709961, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5395, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.3628691983122363, |
|
"grad_norm": 0.5821362137794495, |
|
"learning_rate": 0.0015, |
|
"loss": 1.538, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.3639240506329114, |
|
"grad_norm": 0.6259918212890625, |
|
"learning_rate": 0.0015, |
|
"loss": 1.534, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.3649789029535865, |
|
"grad_norm": 0.607772946357727, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5313, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.36603375527426163, |
|
"grad_norm": 0.9403669238090515, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5476, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.3670886075949367, |
|
"grad_norm": 0.5947582721710205, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5519, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.3681434599156118, |
|
"grad_norm": 0.5908080339431763, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5425, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.3691983122362869, |
|
"grad_norm": 0.6022931933403015, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5232, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.370253164556962, |
|
"grad_norm": 0.7703867554664612, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5456, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.37130801687763715, |
|
"grad_norm": 0.6678274869918823, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5387, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.37236286919831224, |
|
"grad_norm": 0.6669195294380188, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5499, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.37341772151898733, |
|
"grad_norm": 1.0063786506652832, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5438, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.3744725738396624, |
|
"grad_norm": 0.916503369808197, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5345, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.3755274261603376, |
|
"grad_norm": 0.6114853024482727, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5324, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.37658227848101267, |
|
"grad_norm": 0.6031618714332581, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5299, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.37763713080168776, |
|
"grad_norm": 0.6233285665512085, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5302, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.37869198312236285, |
|
"grad_norm": 0.6138935685157776, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5401, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.379746835443038, |
|
"grad_norm": 0.7321248650550842, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5253, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.3808016877637131, |
|
"grad_norm": 0.6078612804412842, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5286, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.3818565400843882, |
|
"grad_norm": 0.6599511504173279, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5288, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.3829113924050633, |
|
"grad_norm": 0.6486068367958069, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5292, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.38396624472573837, |
|
"grad_norm": 0.8735135197639465, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5321, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.3850210970464135, |
|
"grad_norm": 0.6769356727600098, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5399, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.3860759493670886, |
|
"grad_norm": 0.7743157148361206, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5328, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.3871308016877637, |
|
"grad_norm": 0.6049450039863586, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5277, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.3881856540084388, |
|
"grad_norm": 0.6719385385513306, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5281, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.38924050632911394, |
|
"grad_norm": 0.576313316822052, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5382, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.39029535864978904, |
|
"grad_norm": 0.6430057883262634, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5422, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.39135021097046413, |
|
"grad_norm": 0.5845285654067993, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5302, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.3924050632911392, |
|
"grad_norm": 0.612210214138031, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5071, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.39345991561181437, |
|
"grad_norm": 0.55598384141922, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5233, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.39451476793248946, |
|
"grad_norm": 0.5805742144584656, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5258, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.39556962025316456, |
|
"grad_norm": 0.6504670977592468, |
|
"learning_rate": 0.0015, |
|
"loss": 1.531, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.39662447257383965, |
|
"grad_norm": 0.7802721858024597, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5204, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.39767932489451474, |
|
"grad_norm": 0.5795727968215942, |
|
"learning_rate": 0.0015, |
|
"loss": 1.535, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.3987341772151899, |
|
"grad_norm": 0.6096338629722595, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5217, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.399789029535865, |
|
"grad_norm": 0.63264399766922, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5268, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.4008438818565401, |
|
"grad_norm": 0.5819994211196899, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5292, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.40189873417721517, |
|
"grad_norm": 0.7298890948295593, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5206, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.4029535864978903, |
|
"grad_norm": 0.6244053840637207, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5121, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.4040084388185654, |
|
"grad_norm": 0.5711541771888733, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5136, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 0.5333322882652283, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5331, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.4061181434599156, |
|
"grad_norm": 0.6488534212112427, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5138, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.40717299578059074, |
|
"grad_norm": 0.7239291667938232, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5294, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.40822784810126583, |
|
"grad_norm": 0.9742686152458191, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5187, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.4092827004219409, |
|
"grad_norm": 0.6343986988067627, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5083, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.410337552742616, |
|
"grad_norm": 0.6439376473426819, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5098, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.41139240506329117, |
|
"grad_norm": 0.8655267357826233, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5106, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.41244725738396626, |
|
"grad_norm": 0.6127061247825623, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5127, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.41350210970464135, |
|
"grad_norm": 0.6228435635566711, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5086, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.41455696202531644, |
|
"grad_norm": 0.5865804553031921, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5151, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.41561181434599154, |
|
"grad_norm": 0.6816365122795105, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5133, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.8221717476844788, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5163, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.4177215189873418, |
|
"grad_norm": 0.5298409461975098, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5161, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.41877637130801687, |
|
"grad_norm": 0.5596839785575867, |
|
"learning_rate": 0.0015, |
|
"loss": 1.509, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.41983122362869196, |
|
"grad_norm": 0.532119870185852, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5174, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.4208860759493671, |
|
"grad_norm": 0.5433077216148376, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5094, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.4219409282700422, |
|
"grad_norm": 0.5456915497779846, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5222, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.4229957805907173, |
|
"grad_norm": 0.572923481464386, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5111, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.4240506329113924, |
|
"grad_norm": 0.5117266774177551, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5129, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.42510548523206754, |
|
"grad_norm": 0.5024316310882568, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5047, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.42616033755274263, |
|
"grad_norm": 0.5150533318519592, |
|
"learning_rate": 0.0015, |
|
"loss": 1.513, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.4272151898734177, |
|
"grad_norm": 0.5249171853065491, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5133, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.4282700421940928, |
|
"grad_norm": 0.7801232933998108, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5198, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.4293248945147679, |
|
"grad_norm": 0.8610855340957642, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5142, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.43037974683544306, |
|
"grad_norm": 0.6890126466751099, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5201, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.43143459915611815, |
|
"grad_norm": 0.6725890636444092, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5125, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.43248945147679324, |
|
"grad_norm": 0.5794495940208435, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5123, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.43354430379746833, |
|
"grad_norm": 0.6073198914527893, |
|
"learning_rate": 0.0015, |
|
"loss": 1.504, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.4345991561181435, |
|
"grad_norm": 0.7326914668083191, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5107, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.4356540084388186, |
|
"grad_norm": 0.6833801865577698, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5025, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.43670886075949367, |
|
"grad_norm": 0.5677995681762695, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5048, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.43776371308016876, |
|
"grad_norm": 0.6422021985054016, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5124, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.4388185654008439, |
|
"grad_norm": 0.5912719368934631, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5048, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.439873417721519, |
|
"grad_norm": 0.5334175825119019, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5044, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.4409282700421941, |
|
"grad_norm": 0.5514172911643982, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5006, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.4419831223628692, |
|
"grad_norm": 0.6370390057563782, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5065, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.4430379746835443, |
|
"grad_norm": 0.6007202863693237, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5082, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.4440928270042194, |
|
"grad_norm": 0.5911210179328918, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4974, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.4451476793248945, |
|
"grad_norm": 0.6960971355438232, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5068, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.4462025316455696, |
|
"grad_norm": 1.0867031812667847, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5039, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.4472573839662447, |
|
"grad_norm": 0.7056772112846375, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5031, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.44831223628691985, |
|
"grad_norm": 0.6110833883285522, |
|
"learning_rate": 0.0015, |
|
"loss": 1.509, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.44936708860759494, |
|
"grad_norm": 0.7566470503807068, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5115, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.45042194092827004, |
|
"grad_norm": 0.5677741765975952, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5004, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.45147679324894513, |
|
"grad_norm": 0.7918340563774109, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4942, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.4525316455696203, |
|
"grad_norm": 0.9699915051460266, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5053, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.45358649789029537, |
|
"grad_norm": 0.6517788767814636, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5048, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.45464135021097046, |
|
"grad_norm": 0.5126602649688721, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4994, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.45569620253164556, |
|
"grad_norm": 0.6962641477584839, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4972, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.45675105485232065, |
|
"grad_norm": 0.5313942432403564, |
|
"learning_rate": 0.0015, |
|
"loss": 1.505, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.4578059071729958, |
|
"grad_norm": 0.6441898941993713, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4961, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.4588607594936709, |
|
"grad_norm": 0.80213862657547, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4976, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.459915611814346, |
|
"grad_norm": 0.5837754607200623, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5131, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.4609704641350211, |
|
"grad_norm": 0.6574090719223022, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4909, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.4620253164556962, |
|
"grad_norm": 0.49875763058662415, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4971, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.4630801687763713, |
|
"grad_norm": 0.5440434217453003, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4923, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.4641350210970464, |
|
"grad_norm": 0.7480255365371704, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4991, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.4651898734177215, |
|
"grad_norm": 0.7502830028533936, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5154, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.46624472573839665, |
|
"grad_norm": 0.638038694858551, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4985, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.46729957805907174, |
|
"grad_norm": 0.5418035984039307, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4996, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.46835443037974683, |
|
"grad_norm": 0.7437496185302734, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4974, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.4694092827004219, |
|
"grad_norm": 0.7251719832420349, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5027, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.4704641350210971, |
|
"grad_norm": 0.5135377049446106, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4915, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.47151898734177217, |
|
"grad_norm": 0.546423614025116, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5014, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.47257383966244726, |
|
"grad_norm": 0.6129780411720276, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5068, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.47362869198312235, |
|
"grad_norm": 0.7669563293457031, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4937, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.47468354430379744, |
|
"grad_norm": 0.5725547671318054, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4939, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.4757383966244726, |
|
"grad_norm": 0.5419220924377441, |
|
"learning_rate": 0.0015, |
|
"loss": 1.498, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.4767932489451477, |
|
"grad_norm": 0.6583626866340637, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4798, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.4778481012658228, |
|
"grad_norm": 0.6703401207923889, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4996, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.47890295358649787, |
|
"grad_norm": 0.7447689771652222, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5011, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.479957805907173, |
|
"grad_norm": 0.6889923214912415, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4989, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.4810126582278481, |
|
"grad_norm": 0.5502811074256897, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4869, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.4820675105485232, |
|
"grad_norm": 0.59233558177948, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4674, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.4831223628691983, |
|
"grad_norm": 0.6495541334152222, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4921, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.48417721518987344, |
|
"grad_norm": 0.7146870493888855, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4795, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.48523206751054854, |
|
"grad_norm": 0.5922014117240906, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4991, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.48628691983122363, |
|
"grad_norm": 0.6273832321166992, |
|
"learning_rate": 0.0015, |
|
"loss": 1.503, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.4873417721518987, |
|
"grad_norm": 0.5555224418640137, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4766, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.4883966244725738, |
|
"grad_norm": 0.507270097732544, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4968, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.48945147679324896, |
|
"grad_norm": 0.5838307738304138, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4976, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.49050632911392406, |
|
"grad_norm": 0.6076533198356628, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4987, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.49156118143459915, |
|
"grad_norm": 0.5476945638656616, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4898, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.49261603375527424, |
|
"grad_norm": 0.6522284150123596, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4873, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.4936708860759494, |
|
"grad_norm": 0.6084822416305542, |
|
"learning_rate": 0.0015, |
|
"loss": 1.485, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.4947257383966245, |
|
"grad_norm": 0.5807247757911682, |
|
"learning_rate": 0.0015, |
|
"loss": 1.479, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.4957805907172996, |
|
"grad_norm": 0.5520457625389099, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4878, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.49683544303797467, |
|
"grad_norm": 0.5583330988883972, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4911, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.4978902953586498, |
|
"grad_norm": 0.8494092226028442, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4865, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.4989451476793249, |
|
"grad_norm": 0.6600103974342346, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4843, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.702814519405365, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4829, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.5010548523206751, |
|
"grad_norm": 0.6927804350852966, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5035, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.5021097046413502, |
|
"grad_norm": 0.5378764271736145, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4872, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.5031645569620253, |
|
"grad_norm": 0.6337290406227112, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4848, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.5042194092827004, |
|
"grad_norm": 0.5428599119186401, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4964, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.5052742616033755, |
|
"grad_norm": 0.7110328078269958, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4783, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 0.55487060546875, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4848, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.5073839662447257, |
|
"grad_norm": 0.5764284729957581, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4845, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.5084388185654009, |
|
"grad_norm": 0.8112941384315491, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5027, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.509493670886076, |
|
"grad_norm": 0.4965507984161377, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4856, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.510548523206751, |
|
"grad_norm": 0.5271897912025452, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4789, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.5116033755274262, |
|
"grad_norm": 0.5478551983833313, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4925, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.5126582278481012, |
|
"grad_norm": 0.5441624522209167, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4791, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.5137130801687764, |
|
"grad_norm": 0.8168677687644958, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4746, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.5147679324894515, |
|
"grad_norm": 0.9002726078033447, |
|
"learning_rate": 0.0015, |
|
"loss": 1.496, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.5158227848101266, |
|
"grad_norm": 0.6249581575393677, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4848, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.5168776371308017, |
|
"grad_norm": 0.5859907865524292, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4904, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.5179324894514767, |
|
"grad_norm": 0.6127180457115173, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4868, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.5189873417721519, |
|
"grad_norm": 0.5924661159515381, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4755, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.520042194092827, |
|
"grad_norm": 0.596203625202179, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4851, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.5210970464135021, |
|
"grad_norm": 0.5562626123428345, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4821, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.5221518987341772, |
|
"grad_norm": 0.7145407795906067, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4906, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.5232067510548524, |
|
"grad_norm": 0.8665297627449036, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4746, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.5242616033755274, |
|
"grad_norm": 0.5451953411102295, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4794, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.5253164556962026, |
|
"grad_norm": 0.6681820750236511, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4744, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.5263713080168776, |
|
"grad_norm": 0.6579897999763489, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4787, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.5274261603375527, |
|
"grad_norm": 0.7042908072471619, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4877, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5284810126582279, |
|
"grad_norm": 0.5207177400588989, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4814, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.5295358649789029, |
|
"grad_norm": 0.5590533018112183, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4736, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.5305907172995781, |
|
"grad_norm": 0.6029673218727112, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4594, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.5316455696202531, |
|
"grad_norm": 0.5923275947570801, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4841, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.5327004219409283, |
|
"grad_norm": 0.5961737632751465, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4741, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.5337552742616034, |
|
"grad_norm": 0.5873507261276245, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4896, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.5348101265822784, |
|
"grad_norm": 0.9630367755889893, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4768, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.5358649789029536, |
|
"grad_norm": 0.6927589774131775, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4899, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.5369198312236287, |
|
"grad_norm": 0.5878579020500183, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4808, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.5379746835443038, |
|
"grad_norm": 0.5937055945396423, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4612, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.5390295358649789, |
|
"grad_norm": 0.7419557571411133, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4776, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.540084388185654, |
|
"grad_norm": 0.6134268641471863, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4769, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.5411392405063291, |
|
"grad_norm": 0.636572539806366, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4738, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.5421940928270043, |
|
"grad_norm": 0.6202790141105652, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4894, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.5432489451476793, |
|
"grad_norm": 0.6438086032867432, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4838, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.5443037974683544, |
|
"grad_norm": 0.5364874005317688, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4778, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.5453586497890295, |
|
"grad_norm": 0.49875974655151367, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4761, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.5464135021097046, |
|
"grad_norm": 0.7342543601989746, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4883, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.5474683544303798, |
|
"grad_norm": 0.5707510113716125, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4696, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.5485232067510548, |
|
"grad_norm": 0.6253118515014648, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4792, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.54957805907173, |
|
"grad_norm": 0.9086582064628601, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4667, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.5506329113924051, |
|
"grad_norm": 0.5875641703605652, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4756, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.5516877637130801, |
|
"grad_norm": 0.5278132557868958, |
|
"learning_rate": 0.0015, |
|
"loss": 1.469, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.5527426160337553, |
|
"grad_norm": 0.5488489866256714, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4764, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.5537974683544303, |
|
"grad_norm": 0.9268010854721069, |
|
"learning_rate": 0.0015, |
|
"loss": 1.468, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.5548523206751055, |
|
"grad_norm": 0.582885205745697, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4772, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.5559071729957806, |
|
"grad_norm": 0.6753647923469543, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4842, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.5569620253164557, |
|
"grad_norm": 0.8944094181060791, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4754, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.5580168776371308, |
|
"grad_norm": 0.5675341486930847, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4751, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.5590717299578059, |
|
"grad_norm": 0.5383754372596741, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4789, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.560126582278481, |
|
"grad_norm": 0.7768966555595398, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4675, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.5611814345991561, |
|
"grad_norm": 0.5521121025085449, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4664, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.5622362869198312, |
|
"grad_norm": 0.5920925140380859, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4784, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.5632911392405063, |
|
"grad_norm": 0.5520569086074829, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4684, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.5643459915611815, |
|
"grad_norm": 0.5458168983459473, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4631, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.5654008438818565, |
|
"grad_norm": 0.7084580659866333, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4717, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.5664556962025317, |
|
"grad_norm": 0.5631759166717529, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4733, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.5675105485232067, |
|
"grad_norm": 0.5456891655921936, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4666, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.5685654008438819, |
|
"grad_norm": 0.629676342010498, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4623, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.569620253164557, |
|
"grad_norm": 0.6019992232322693, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4642, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.570675105485232, |
|
"grad_norm": 0.5304731130599976, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4666, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.5717299578059072, |
|
"grad_norm": 0.5179152488708496, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4606, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.5727848101265823, |
|
"grad_norm": 0.603348433971405, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4595, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.5738396624472574, |
|
"grad_norm": 0.8422991037368774, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4599, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.5748945147679325, |
|
"grad_norm": 0.6259440183639526, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4671, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.5759493670886076, |
|
"grad_norm": 0.7041751146316528, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4686, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.5770042194092827, |
|
"grad_norm": 0.5876805782318115, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4659, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.5780590717299579, |
|
"grad_norm": 0.7383052110671997, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4691, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.5791139240506329, |
|
"grad_norm": 0.6773043870925903, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4761, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.580168776371308, |
|
"grad_norm": 0.68573397397995, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4645, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.5812236286919831, |
|
"grad_norm": 0.6186241507530212, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4729, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.5822784810126582, |
|
"grad_norm": 0.5245220065116882, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4644, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.5833333333333334, |
|
"grad_norm": 0.5127766132354736, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4555, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.5843881856540084, |
|
"grad_norm": 0.5364044308662415, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4537, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.5854430379746836, |
|
"grad_norm": 0.6824843883514404, |
|
"learning_rate": 0.0015, |
|
"loss": 1.472, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.5864978902953587, |
|
"grad_norm": 0.562599778175354, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4768, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.5875527426160337, |
|
"grad_norm": 0.554816722869873, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4608, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.5886075949367089, |
|
"grad_norm": 0.5311549305915833, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4594, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.5896624472573839, |
|
"grad_norm": 0.6091548800468445, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4645, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.5907172995780591, |
|
"grad_norm": 0.5266405344009399, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4602, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.5917721518987342, |
|
"grad_norm": 0.5213054418563843, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4663, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.5928270042194093, |
|
"grad_norm": 0.5713757276535034, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4757, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.5938818565400844, |
|
"grad_norm": 0.7337353825569153, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4669, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.5949367088607594, |
|
"grad_norm": 0.5308693647384644, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4755, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.5959915611814346, |
|
"grad_norm": 0.6518898606300354, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4697, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.5970464135021097, |
|
"grad_norm": 0.6611716747283936, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4649, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.5981012658227848, |
|
"grad_norm": 0.5270864367485046, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4711, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.5991561181434599, |
|
"grad_norm": 0.5916832089424133, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4719, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.6002109704641351, |
|
"grad_norm": 0.9487943649291992, |
|
"learning_rate": 0.0015, |
|
"loss": 1.451, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.6012658227848101, |
|
"grad_norm": 0.5687475800514221, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4591, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.6023206751054853, |
|
"grad_norm": 0.5659775137901306, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4611, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.6033755274261603, |
|
"grad_norm": 0.5397734045982361, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4668, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.6044303797468354, |
|
"grad_norm": 0.6298732757568359, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4634, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.6054852320675106, |
|
"grad_norm": 0.48626509308815, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4601, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.6065400843881856, |
|
"grad_norm": 0.564589262008667, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4363, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.6075949367088608, |
|
"grad_norm": 0.5767689347267151, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4467, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.6086497890295358, |
|
"grad_norm": 0.5449169278144836, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4474, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.609704641350211, |
|
"grad_norm": 0.6358098387718201, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4572, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.6107594936708861, |
|
"grad_norm": 0.645729124546051, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4553, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.6118143459915611, |
|
"grad_norm": 0.6094199419021606, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4608, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.6128691983122363, |
|
"grad_norm": 0.5631193518638611, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4626, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.6139240506329114, |
|
"grad_norm": 0.6091004610061646, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4735, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.6149789029535865, |
|
"grad_norm": 0.6834111213684082, |
|
"learning_rate": 0.0015, |
|
"loss": 1.456, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.6160337552742616, |
|
"grad_norm": 0.7020549774169922, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4693, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.6170886075949367, |
|
"grad_norm": 0.6763173341751099, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4746, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.6181434599156118, |
|
"grad_norm": 0.6037977337837219, |
|
"learning_rate": 0.0015, |
|
"loss": 1.461, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.619198312236287, |
|
"grad_norm": 0.6056775450706482, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4543, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.620253164556962, |
|
"grad_norm": 0.5456224083900452, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4633, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.6213080168776371, |
|
"grad_norm": 0.5920342206954956, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4566, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.6223628691983122, |
|
"grad_norm": 0.5288448333740234, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4568, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.6234177215189873, |
|
"grad_norm": 0.6668508648872375, |
|
"learning_rate": 0.0015, |
|
"loss": 1.452, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.6244725738396625, |
|
"grad_norm": 0.6770985722541809, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4476, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.6255274261603375, |
|
"grad_norm": 0.8159175515174866, |
|
"learning_rate": 0.0015, |
|
"loss": 1.456, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.6265822784810127, |
|
"grad_norm": 0.6664168238639832, |
|
"learning_rate": 0.0015, |
|
"loss": 1.462, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.6276371308016878, |
|
"grad_norm": 0.5339956283569336, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4493, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.6286919831223629, |
|
"grad_norm": 0.6226065158843994, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4577, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.629746835443038, |
|
"grad_norm": 0.5611864328384399, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4578, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.630801687763713, |
|
"grad_norm": 0.49556663632392883, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4398, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.6318565400843882, |
|
"grad_norm": 0.8861162662506104, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4598, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.6329113924050633, |
|
"grad_norm": 0.8057578206062317, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4664, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.6339662447257384, |
|
"grad_norm": 0.6614024639129639, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4549, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.6350210970464135, |
|
"grad_norm": 0.591598629951477, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4594, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.6360759493670886, |
|
"grad_norm": 0.5660628080368042, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4406, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.6371308016877637, |
|
"grad_norm": 0.5870352983474731, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4598, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.6381856540084389, |
|
"grad_norm": 0.5869897603988647, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4405, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.6392405063291139, |
|
"grad_norm": 0.5584044456481934, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4606, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.640295358649789, |
|
"grad_norm": 0.6476332545280457, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4528, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.6413502109704642, |
|
"grad_norm": 0.5212739706039429, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4503, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.6424050632911392, |
|
"grad_norm": 0.5253696441650391, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4489, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.6434599156118144, |
|
"grad_norm": 0.9250648021697998, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4463, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.6445147679324894, |
|
"grad_norm": 0.7041076421737671, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4567, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.6455696202531646, |
|
"grad_norm": 0.5747056603431702, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4528, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.6466244725738397, |
|
"grad_norm": 0.6028876900672913, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4502, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.6476793248945147, |
|
"grad_norm": 1.0948182344436646, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4522, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.6487341772151899, |
|
"grad_norm": 0.6669796109199524, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4637, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.6497890295358649, |
|
"grad_norm": 0.592555046081543, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4567, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.6508438818565401, |
|
"grad_norm": 0.5710325241088867, |
|
"learning_rate": 0.0014854972418331944, |
|
"loss": 1.4339, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.6518987341772152, |
|
"grad_norm": 0.5590702295303345, |
|
"learning_rate": 0.0014650219182191931, |
|
"loss": 1.455, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.6529535864978903, |
|
"grad_norm": 0.5274255871772766, |
|
"learning_rate": 0.001444828815847542, |
|
"loss": 1.4558, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.6540084388185654, |
|
"grad_norm": 0.5450711846351624, |
|
"learning_rate": 0.0014249140447269945, |
|
"loss": 1.4476, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.6550632911392406, |
|
"grad_norm": 0.5037233233451843, |
|
"learning_rate": 0.0014052737684839257, |
|
"loss": 1.4416, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.6561181434599156, |
|
"grad_norm": 0.5511847734451294, |
|
"learning_rate": 0.0013859042036232954, |
|
"loss": 1.4488, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.6571729957805907, |
|
"grad_norm": 0.8101614117622375, |
|
"learning_rate": 0.001366801618799797, |
|
"loss": 1.4366, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.6582278481012658, |
|
"grad_norm": 0.5284369587898254, |
|
"learning_rate": 0.001347962334099052, |
|
"loss": 1.44, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.6592827004219409, |
|
"grad_norm": 0.5404887795448303, |
|
"learning_rate": 0.0013293827203287143, |
|
"loss": 1.4459, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.6603375527426161, |
|
"grad_norm": 0.5109414458274841, |
|
"learning_rate": 0.0013110591983193423, |
|
"loss": 1.4358, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.6613924050632911, |
|
"grad_norm": 0.5763075351715088, |
|
"learning_rate": 0.0012929882382349102, |
|
"loss": 1.4448, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.6624472573839663, |
|
"grad_norm": 0.5637813210487366, |
|
"learning_rate": 0.0012751663588928214, |
|
"loss": 1.442, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.6635021097046413, |
|
"grad_norm": 0.5197626352310181, |
|
"learning_rate": 0.0012575901270932943, |
|
"loss": 1.435, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.6645569620253164, |
|
"grad_norm": 0.7691735029220581, |
|
"learning_rate": 0.0012402561569579936, |
|
"loss": 1.4293, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.6656118143459916, |
|
"grad_norm": 0.6103032827377319, |
|
"learning_rate": 0.0012231611092777745, |
|
"loss": 1.4279, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.5120815634727478, |
|
"learning_rate": 0.0012063016908694193, |
|
"loss": 1.4225, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.6677215189873418, |
|
"grad_norm": 0.6098062992095947, |
|
"learning_rate": 0.0011896746539412405, |
|
"loss": 1.4318, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.6687763713080169, |
|
"grad_norm": 0.6241275668144226, |
|
"learning_rate": 0.0011732767954674265, |
|
"loss": 1.4225, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.669831223628692, |
|
"grad_norm": 0.5512659549713135, |
|
"learning_rate": 0.0011571049565710122, |
|
"loss": 1.4272, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.6708860759493671, |
|
"grad_norm": 0.6220884919166565, |
|
"learning_rate": 0.001141156021915355, |
|
"loss": 1.4261, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.6719409282700421, |
|
"grad_norm": 0.5056501030921936, |
|
"learning_rate": 0.001125426919103997, |
|
"loss": 1.4054, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.6729957805907173, |
|
"grad_norm": 0.6238651275634766, |
|
"learning_rate": 0.001109914618088799, |
|
"loss": 1.4082, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.6740506329113924, |
|
"grad_norm": 0.7365453839302063, |
|
"learning_rate": 0.0010946161305862348, |
|
"loss": 1.4211, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.6751054852320675, |
|
"grad_norm": 0.5034173727035522, |
|
"learning_rate": 0.001079528509501728, |
|
"loss": 1.4228, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.6761603375527426, |
|
"grad_norm": 0.5035203695297241, |
|
"learning_rate": 0.0010646488483619261, |
|
"loss": 1.4156, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.6772151898734177, |
|
"grad_norm": 0.5203441381454468, |
|
"learning_rate": 0.0010499742807547976, |
|
"loss": 1.405, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.6782700421940928, |
|
"grad_norm": 0.5264323353767395, |
|
"learning_rate": 0.0010355019797774478, |
|
"loss": 1.4121, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.679324894514768, |
|
"grad_norm": 0.5054761171340942, |
|
"learning_rate": 0.001021229157491546, |
|
"loss": 1.3984, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.680379746835443, |
|
"grad_norm": 0.566516637802124, |
|
"learning_rate": 0.0010071530643862578, |
|
"loss": 1.4024, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.6814345991561181, |
|
"grad_norm": 0.5494766235351562, |
|
"learning_rate": 0.000993270988848579, |
|
"loss": 1.413, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.6824894514767933, |
|
"grad_norm": 0.5305378437042236, |
|
"learning_rate": 0.0009795802566409742, |
|
"loss": 1.4059, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.6835443037974683, |
|
"grad_norm": 0.7373847365379333, |
|
"learning_rate": 0.0009660782303862109, |
|
"loss": 1.4106, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.6845991561181435, |
|
"grad_norm": 0.4979548156261444, |
|
"learning_rate": 0.0009527623090592963, |
|
"loss": 1.4036, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.6856540084388185, |
|
"grad_norm": 0.5357046723365784, |
|
"learning_rate": 0.0009396299274864177, |
|
"loss": 1.4184, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.6867088607594937, |
|
"grad_norm": 0.524194598197937, |
|
"learning_rate": 0.0009266785558507877, |
|
"loss": 1.4194, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.6877637130801688, |
|
"grad_norm": 0.547835111618042, |
|
"learning_rate": 0.0009139056992053016, |
|
"loss": 1.4028, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.6888185654008439, |
|
"grad_norm": 0.6181050539016724, |
|
"learning_rate": 0.000901308896991912, |
|
"loss": 1.3926, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.689873417721519, |
|
"grad_norm": 0.6838914752006531, |
|
"learning_rate": 0.000888885722567627, |
|
"loss": 1.4034, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.6909282700421941, |
|
"grad_norm": 0.5421223044395447, |
|
"learning_rate": 0.0008766337827370438, |
|
"loss": 1.3958, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.6919831223628692, |
|
"grad_norm": 0.6814435720443726, |
|
"learning_rate": 0.000864550717291324, |
|
"loss": 1.3963, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.6930379746835443, |
|
"grad_norm": 0.4749564528465271, |
|
"learning_rate": 0.0008526341985535229, |
|
"loss": 1.3934, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.6940928270042194, |
|
"grad_norm": 0.5062347650527954, |
|
"learning_rate": 0.0008408819309301891, |
|
"loss": 1.3864, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.6951476793248945, |
|
"grad_norm": 0.5495785474777222, |
|
"learning_rate": 0.0008292916504691397, |
|
"loss": 1.3922, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.6962025316455697, |
|
"grad_norm": 0.5529202222824097, |
|
"learning_rate": 0.0008178611244233354, |
|
"loss": 1.3978, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.6972573839662447, |
|
"grad_norm": 0.5035123229026794, |
|
"learning_rate": 0.0008065881508207637, |
|
"loss": 1.3851, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.6983122362869199, |
|
"grad_norm": 0.4964410066604614, |
|
"learning_rate": 0.0007954705580402523, |
|
"loss": 1.3936, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.6993670886075949, |
|
"grad_norm": 0.6161483526229858, |
|
"learning_rate": 0.0007845062043931298, |
|
"loss": 1.384, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.70042194092827, |
|
"grad_norm": 0.523698627948761, |
|
"learning_rate": 0.0007736929777106497, |
|
"loss": 1.3899, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.7014767932489452, |
|
"grad_norm": 0.49107348918914795, |
|
"learning_rate": 0.000763028794937105, |
|
"loss": 1.3831, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.7025316455696202, |
|
"grad_norm": 0.5189265608787537, |
|
"learning_rate": 0.0007525116017285476, |
|
"loss": 1.3804, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.7035864978902954, |
|
"grad_norm": 0.5364611744880676, |
|
"learning_rate": 0.0007421393720570417, |
|
"loss": 1.383, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.7046413502109705, |
|
"grad_norm": 0.48587021231651306, |
|
"learning_rate": 0.0007319101078203694, |
|
"loss": 1.3859, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.7056962025316456, |
|
"grad_norm": 0.5128830075263977, |
|
"learning_rate": 0.0007218218384571178, |
|
"loss": 1.3766, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.7067510548523207, |
|
"grad_norm": 0.46401098370552063, |
|
"learning_rate": 0.0007118726205670703, |
|
"loss": 1.3791, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.7078059071729957, |
|
"grad_norm": 0.48515763878822327, |
|
"learning_rate": 0.0007020605375368316, |
|
"loss": 1.3777, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.7088607594936709, |
|
"grad_norm": 0.6183516979217529, |
|
"learning_rate": 0.000692383699170611, |
|
"loss": 1.3688, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.709915611814346, |
|
"grad_norm": 0.5306627750396729, |
|
"learning_rate": 0.0006828402413260966, |
|
"loss": 1.3779, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.7109704641350211, |
|
"grad_norm": 0.49693840742111206, |
|
"learning_rate": 0.0006734283255553471, |
|
"loss": 1.3816, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.7120253164556962, |
|
"grad_norm": 0.5087738633155823, |
|
"learning_rate": 0.0006641461387506347, |
|
"loss": 1.3838, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.7130801687763713, |
|
"grad_norm": 0.48053449392318726, |
|
"learning_rate": 0.0006549918927951678, |
|
"loss": 1.3748, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.7141350210970464, |
|
"grad_norm": 0.5376818776130676, |
|
"learning_rate": 0.0006459638242186297, |
|
"loss": 1.3777, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.7151898734177216, |
|
"grad_norm": 0.6019859313964844, |
|
"learning_rate": 0.0006370601938574639, |
|
"loss": 1.3763, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.7162447257383966, |
|
"grad_norm": 0.5441733002662659, |
|
"learning_rate": 0.0006282792865198421, |
|
"loss": 1.3809, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.7172995780590717, |
|
"grad_norm": 0.4988143742084503, |
|
"learning_rate": 0.0006196194106552512, |
|
"loss": 1.3668, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.7183544303797469, |
|
"grad_norm": 0.9648948907852173, |
|
"learning_rate": 0.0006110788980286328, |
|
"loss": 1.3717, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.7194092827004219, |
|
"grad_norm": 0.539746105670929, |
|
"learning_rate": 0.0006026561033990158, |
|
"loss": 1.37, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.7204641350210971, |
|
"grad_norm": 0.6249383687973022, |
|
"learning_rate": 0.000594349404202577, |
|
"loss": 1.3662, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.7215189873417721, |
|
"grad_norm": 0.6611071825027466, |
|
"learning_rate": 0.0005861572002400716, |
|
"loss": 1.3574, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.7225738396624473, |
|
"grad_norm": 0.49944445490837097, |
|
"learning_rate": 0.0005780779133685717, |
|
"loss": 1.3672, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.7236286919831224, |
|
"grad_norm": 0.5754870772361755, |
|
"learning_rate": 0.0005701099871974524, |
|
"loss": 1.3603, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.7246835443037974, |
|
"grad_norm": 0.6039024591445923, |
|
"learning_rate": 0.0005622518867885708, |
|
"loss": 1.3578, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.7257383966244726, |
|
"grad_norm": 0.5799662470817566, |
|
"learning_rate": 0.0005545020983605748, |
|
"loss": 1.366, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.7267932489451476, |
|
"grad_norm": 0.6918553113937378, |
|
"learning_rate": 0.0005468591289972898, |
|
"loss": 1.3553, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.7278481012658228, |
|
"grad_norm": 0.5505524277687073, |
|
"learning_rate": 0.0005393215063601232, |
|
"loss": 1.3556, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.7289029535864979, |
|
"grad_norm": 0.5248740315437317, |
|
"learning_rate": 0.0005318877784044343, |
|
"loss": 1.3785, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.729957805907173, |
|
"grad_norm": 0.5648968815803528, |
|
"learning_rate": 0.0005245565130998126, |
|
"loss": 1.3593, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.7310126582278481, |
|
"grad_norm": 0.5115235447883606, |
|
"learning_rate": 0.000517326298154212, |
|
"loss": 1.365, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.7320675105485233, |
|
"grad_norm": 0.5762827396392822, |
|
"learning_rate": 0.0005101957407418877, |
|
"loss": 1.3666, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.7331223628691983, |
|
"grad_norm": 0.5095155239105225, |
|
"learning_rate": 0.0005031634672350829, |
|
"loss": 1.3629, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.7341772151898734, |
|
"grad_norm": 0.4851057827472687, |
|
"learning_rate": 0.0004962281229394129, |
|
"loss": 1.3539, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.7352320675105485, |
|
"grad_norm": 0.47508159279823303, |
|
"learning_rate": 0.0004893883718328983, |
|
"loss": 1.3475, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.7362869198312236, |
|
"grad_norm": 0.49186787009239197, |
|
"learning_rate": 0.0004826428963085938, |
|
"loss": 1.3528, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.7373417721518988, |
|
"grad_norm": 0.5632398128509521, |
|
"learning_rate": 0.00047599039692076457, |
|
"loss": 1.3509, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.7383966244725738, |
|
"grad_norm": 0.5433728098869324, |
|
"learning_rate": 0.0004694295921345622, |
|
"loss": 1.3636, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.739451476793249, |
|
"grad_norm": 0.4962522089481354, |
|
"learning_rate": 0.00046295921807915015, |
|
"loss": 1.335, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.740506329113924, |
|
"grad_norm": 0.6877003908157349, |
|
"learning_rate": 0.00045657802830423164, |
|
"loss": 1.35, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.7415611814345991, |
|
"grad_norm": 0.5457785129547119, |
|
"learning_rate": 0.00045028479353993473, |
|
"loss": 1.3536, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.7426160337552743, |
|
"grad_norm": 0.6001309752464294, |
|
"learning_rate": 0.00044407830146000587, |
|
"loss": 1.3588, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.7436708860759493, |
|
"grad_norm": 0.5580538511276245, |
|
"learning_rate": 0.0004379573564482676, |
|
"loss": 1.3451, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.7447257383966245, |
|
"grad_norm": 0.6439946293830872, |
|
"learning_rate": 0.0004319207793682963, |
|
"loss": 1.3511, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.7457805907172996, |
|
"grad_norm": 0.5633798241615295, |
|
"learning_rate": 0.0004259674073362731, |
|
"loss": 1.357, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.7468354430379747, |
|
"grad_norm": 0.5077953934669495, |
|
"learning_rate": 0.00042009609349696626, |
|
"loss": 1.3548, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.7478902953586498, |
|
"grad_norm": 0.4964649975299835, |
|
"learning_rate": 0.00041430570680280233, |
|
"loss": 1.3518, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.7489451476793249, |
|
"grad_norm": 0.49853572249412537, |
|
"learning_rate": 0.0004085951317959809, |
|
"loss": 1.3468, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5805441737174988, |
|
"learning_rate": 0.00040296326839359315, |
|
"loss": 1.3572, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.7510548523206751, |
|
"grad_norm": 0.5360079407691956, |
|
"learning_rate": 0.000397409031675703, |
|
"loss": 1.3502, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.7521097046413502, |
|
"grad_norm": 0.5109590888023376, |
|
"learning_rate": 0.00039193135167634786, |
|
"loss": 1.3585, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.7531645569620253, |
|
"grad_norm": 0.49240773916244507, |
|
"learning_rate": 0.00038652917317742123, |
|
"loss": 1.3419, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.7542194092827004, |
|
"grad_norm": 0.5305712819099426, |
|
"learning_rate": 0.0003812014555053956, |
|
"loss": 1.3392, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.7552742616033755, |
|
"grad_norm": 0.45412856340408325, |
|
"learning_rate": 0.00037594717233084774, |
|
"loss": 1.3468, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.7563291139240507, |
|
"grad_norm": 0.6645715236663818, |
|
"learning_rate": 0.0003707653114707471, |
|
"loss": 1.356, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.7573839662447257, |
|
"grad_norm": 0.548294186592102, |
|
"learning_rate": 0.00036565487469346906, |
|
"loss": 1.3334, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.7584388185654009, |
|
"grad_norm": 0.4824378788471222, |
|
"learning_rate": 0.0003606148775264958, |
|
"loss": 1.3351, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.759493670886076, |
|
"grad_norm": 0.4731684625148773, |
|
"learning_rate": 0.0003556443490667684, |
|
"loss": 1.34, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.760548523206751, |
|
"grad_norm": 0.4967353940010071, |
|
"learning_rate": 0.0003507423317936521, |
|
"loss": 1.3434, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.7616033755274262, |
|
"grad_norm": 0.5875260233879089, |
|
"learning_rate": 0.00034590788138448006, |
|
"loss": 1.3572, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.7626582278481012, |
|
"grad_norm": 0.6720730066299438, |
|
"learning_rate": 0.0003411400665326393, |
|
"loss": 1.3519, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.7637130801687764, |
|
"grad_norm": 0.5609285235404968, |
|
"learning_rate": 0.00033643796876816424, |
|
"loss": 1.3482, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.7647679324894515, |
|
"grad_norm": 0.4963809847831726, |
|
"learning_rate": 0.000331800682280803, |
|
"loss": 1.3437, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.7658227848101266, |
|
"grad_norm": 0.4802923798561096, |
|
"learning_rate": 0.0003272273137455226, |
|
"loss": 1.341, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.7668776371308017, |
|
"grad_norm": 0.538068950176239, |
|
"learning_rate": 0.00032271698215041863, |
|
"loss": 1.3383, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.7679324894514767, |
|
"grad_norm": 0.6135417819023132, |
|
"learning_rate": 0.0003182688186269984, |
|
"loss": 1.3399, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.7689873417721519, |
|
"grad_norm": 0.515723466873169, |
|
"learning_rate": 0.0003138819662828017, |
|
"loss": 1.3508, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.770042194092827, |
|
"grad_norm": 0.5416966080665588, |
|
"learning_rate": 0.00030955558003632966, |
|
"loss": 1.355, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.7710970464135021, |
|
"grad_norm": 0.5162220001220703, |
|
"learning_rate": 0.0003052888264542483, |
|
"loss": 1.3358, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.7721518987341772, |
|
"grad_norm": 0.568890392780304, |
|
"learning_rate": 0.0003010808835908368, |
|
"loss": 1.3303, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.7732067510548524, |
|
"grad_norm": 0.5876977443695068, |
|
"learning_rate": 0.00029693094082964785, |
|
"loss": 1.3368, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.7742616033755274, |
|
"grad_norm": 0.6197674870491028, |
|
"learning_rate": 0.0002928381987273508, |
|
"loss": 1.3339, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.7753164556962026, |
|
"grad_norm": 0.5474351644515991, |
|
"learning_rate": 0.0002888018688597272, |
|
"loss": 1.3285, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.7763713080168776, |
|
"grad_norm": 0.5118351578712463, |
|
"learning_rate": 0.0002848211736697894, |
|
"loss": 1.3379, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.7774261603375527, |
|
"grad_norm": 0.46654650568962097, |
|
"learning_rate": 0.00028089534631799183, |
|
"loss": 1.3384, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.7784810126582279, |
|
"grad_norm": 0.48345306515693665, |
|
"learning_rate": 0.0002770236305345076, |
|
"loss": 1.3253, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.7795358649789029, |
|
"grad_norm": 0.5052648186683655, |
|
"learning_rate": 0.00027320528047354093, |
|
"loss": 1.3355, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.7805907172995781, |
|
"grad_norm": 0.5024939179420471, |
|
"learning_rate": 0.00026943956056964773, |
|
"loss": 1.3426, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.7816455696202531, |
|
"grad_norm": 0.4708436131477356, |
|
"learning_rate": 0.0002657257453960364, |
|
"loss": 1.3234, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.7827004219409283, |
|
"grad_norm": 0.49298837780952454, |
|
"learning_rate": 0.0002620631195248222, |
|
"loss": 1.3288, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.7837552742616034, |
|
"grad_norm": 0.48005419969558716, |
|
"learning_rate": 0.00025845097738920735, |
|
"loss": 1.3146, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.7848101265822784, |
|
"grad_norm": 0.48186084628105164, |
|
"learning_rate": 0.0002548886231475606, |
|
"loss": 1.3179, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.7858649789029536, |
|
"grad_norm": 0.48465681076049805, |
|
"learning_rate": 0.0002513753705493713, |
|
"loss": 1.3211, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.7869198312236287, |
|
"grad_norm": 0.46673837304115295, |
|
"learning_rate": 0.0002479105428030497, |
|
"loss": 1.3332, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.7879746835443038, |
|
"grad_norm": 0.526908814907074, |
|
"learning_rate": 0.00024449347244555043, |
|
"loss": 1.3238, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.7890295358649789, |
|
"grad_norm": 0.49608245491981506, |
|
"learning_rate": 0.00024112350121379254, |
|
"loss": 1.3281, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.790084388185654, |
|
"grad_norm": 0.466810941696167, |
|
"learning_rate": 0.000237799979917852, |
|
"loss": 1.3379, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.7911392405063291, |
|
"grad_norm": 0.502129852771759, |
|
"learning_rate": 0.00023452226831590227, |
|
"loss": 1.3303, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.7921940928270043, |
|
"grad_norm": 0.5259256362915039, |
|
"learning_rate": 0.00023128973499087779, |
|
"loss": 1.3365, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.7932489451476793, |
|
"grad_norm": 0.500847339630127, |
|
"learning_rate": 0.00022810175722883858, |
|
"loss": 1.3225, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.7943037974683544, |
|
"grad_norm": 0.4649074077606201, |
|
"learning_rate": 0.0002249577208990106, |
|
"loss": 1.323, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.7953586497890295, |
|
"grad_norm": 0.5383433103561401, |
|
"learning_rate": 0.00022185702033547996, |
|
"loss": 1.321, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.7964135021097046, |
|
"grad_norm": 0.4654619097709656, |
|
"learning_rate": 0.00021879905822051756, |
|
"loss": 1.3161, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.7974683544303798, |
|
"grad_norm": 0.6051566004753113, |
|
"learning_rate": 0.00021578324546951222, |
|
"loss": 1.3206, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.7985232067510548, |
|
"grad_norm": 0.5371909737586975, |
|
"learning_rate": 0.00021280900111748948, |
|
"loss": 1.3315, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.79957805907173, |
|
"grad_norm": 0.5033254027366638, |
|
"learning_rate": 0.00020987575220719483, |
|
"loss": 1.3283, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.8006329113924051, |
|
"grad_norm": 0.48168230056762695, |
|
"learning_rate": 0.00020698293367871933, |
|
"loss": 1.3181, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.8016877637130801, |
|
"grad_norm": 0.4620802700519562, |
|
"learning_rate": 0.00020412998826064692, |
|
"loss": 1.3242, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.8027426160337553, |
|
"grad_norm": 0.47703951597213745, |
|
"learning_rate": 0.00020131636636270178, |
|
"loss": 1.3345, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.8037974683544303, |
|
"grad_norm": 0.4817696213722229, |
|
"learning_rate": 0.00019854152596987523, |
|
"loss": 1.3087, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.8048523206751055, |
|
"grad_norm": 0.4722531735897064, |
|
"learning_rate": 0.00019580493253801255, |
|
"loss": 1.3298, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.8059071729957806, |
|
"grad_norm": 0.5582346320152283, |
|
"learning_rate": 0.00019310605889083838, |
|
"loss": 1.3213, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.8069620253164557, |
|
"grad_norm": 0.5184255838394165, |
|
"learning_rate": 0.0001904443851184018, |
|
"loss": 1.3382, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.8080168776371308, |
|
"grad_norm": 0.47420576214790344, |
|
"learning_rate": 0.00018781939847692096, |
|
"loss": 1.3152, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.8090717299578059, |
|
"grad_norm": 0.4663408696651459, |
|
"learning_rate": 0.00018523059329000844, |
|
"loss": 1.3216, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.810126582278481, |
|
"grad_norm": 0.5505437254905701, |
|
"learning_rate": 0.0001826774708512579, |
|
"loss": 1.3372, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.8111814345991561, |
|
"grad_norm": 0.48496976494789124, |
|
"learning_rate": 0.00018015953932817348, |
|
"loss": 1.3095, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.8122362869198312, |
|
"grad_norm": 0.47624102234840393, |
|
"learning_rate": 0.00017767631366742332, |
|
"loss": 1.3192, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.8132911392405063, |
|
"grad_norm": 0.534982442855835, |
|
"learning_rate": 0.00017522731550139922, |
|
"loss": 1.3153, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.8143459915611815, |
|
"grad_norm": 0.505545437335968, |
|
"learning_rate": 0.00017281207305606407, |
|
"loss": 1.3211, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.8154008438818565, |
|
"grad_norm": 0.5112576484680176, |
|
"learning_rate": 0.00017043012106006926, |
|
"loss": 1.3274, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.8164556962025317, |
|
"grad_norm": 0.4805108308792114, |
|
"learning_rate": 0.00016808100065512528, |
|
"loss": 1.3275, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.8175105485232067, |
|
"grad_norm": 0.511094868183136, |
|
"learning_rate": 0.00016576425930760734, |
|
"loss": 1.3116, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.8185654008438819, |
|
"grad_norm": 0.48960891366004944, |
|
"learning_rate": 0.00016347945072137934, |
|
"loss": 1.3125, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.819620253164557, |
|
"grad_norm": 0.4569883346557617, |
|
"learning_rate": 0.00016122613475181977, |
|
"loss": 1.3218, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.820675105485232, |
|
"grad_norm": 0.534317672252655, |
|
"learning_rate": 0.00015900387732103232, |
|
"loss": 1.3121, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.8217299578059072, |
|
"grad_norm": 0.5055877566337585, |
|
"learning_rate": 0.00015681225033422526, |
|
"loss": 1.3301, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.8227848101265823, |
|
"grad_norm": 0.4627654254436493, |
|
"learning_rate": 0.00015465083159724345, |
|
"loss": 1.3202, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.8238396624472574, |
|
"grad_norm": 0.4758392572402954, |
|
"learning_rate": 0.0001525192047352371, |
|
"loss": 1.3154, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.8248945147679325, |
|
"grad_norm": 0.46388524770736694, |
|
"learning_rate": 0.00015041695911245136, |
|
"loss": 1.3238, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.8259493670886076, |
|
"grad_norm": 0.4874839186668396, |
|
"learning_rate": 0.00014834368975312172, |
|
"loss": 1.314, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.8270042194092827, |
|
"grad_norm": 0.481004536151886, |
|
"learning_rate": 0.00014629899726345958, |
|
"loss": 1.3004, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.8280590717299579, |
|
"grad_norm": 0.46791186928749084, |
|
"learning_rate": 0.00014428248775471316, |
|
"loss": 1.3086, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.8291139240506329, |
|
"grad_norm": 0.4821540117263794, |
|
"learning_rate": 0.000142293772767289, |
|
"loss": 1.2983, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.830168776371308, |
|
"grad_norm": 0.47279614210128784, |
|
"learning_rate": 0.00014033246919591922, |
|
"loss": 1.3116, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.8312236286919831, |
|
"grad_norm": 0.5413178205490112, |
|
"learning_rate": 0.00013839819921586025, |
|
"loss": 1.3313, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.8322784810126582, |
|
"grad_norm": 0.47640782594680786, |
|
"learning_rate": 0.00013649059021010894, |
|
"loss": 1.3278, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.4538135528564453, |
|
"learning_rate": 0.00013460927469762155, |
|
"loss": 1.3064, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.8343881856540084, |
|
"grad_norm": 0.5609554648399353, |
|
"learning_rate": 0.00013275389026252255, |
|
"loss": 1.3165, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.8354430379746836, |
|
"grad_norm": 0.5607855916023254, |
|
"learning_rate": 0.0001309240794842889, |
|
"loss": 1.3146, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.8364978902953587, |
|
"grad_norm": 0.47213301062583923, |
|
"learning_rate": 0.00012911948986889664, |
|
"loss": 1.3234, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.8375527426160337, |
|
"grad_norm": 0.4935596287250519, |
|
"learning_rate": 0.00012733977378091664, |
|
"loss": 1.3181, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.8386075949367089, |
|
"grad_norm": 0.47754672169685364, |
|
"learning_rate": 0.00012558458837654633, |
|
"loss": 1.322, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.8396624472573839, |
|
"grad_norm": 0.468411922454834, |
|
"learning_rate": 0.00012385359553756422, |
|
"loss": 1.306, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.8407172995780591, |
|
"grad_norm": 0.4971744418144226, |
|
"learning_rate": 0.0001221464618061951, |
|
"loss": 1.3064, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.8417721518987342, |
|
"grad_norm": 0.49825602769851685, |
|
"learning_rate": 0.0001204628583208727, |
|
"loss": 1.3032, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.8428270042194093, |
|
"grad_norm": 0.5823413729667664, |
|
"learning_rate": 0.00011880246075288824, |
|
"loss": 1.3174, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.8438818565400844, |
|
"grad_norm": 0.5061120986938477, |
|
"learning_rate": 0.00011716494924391148, |
|
"loss": 1.3104, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.8449367088607594, |
|
"grad_norm": 0.5084331035614014, |
|
"learning_rate": 0.00011555000834437363, |
|
"loss": 1.319, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.8459915611814346, |
|
"grad_norm": 0.5041303038597107, |
|
"learning_rate": 0.00011395732695269907, |
|
"loss": 1.3127, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.8470464135021097, |
|
"grad_norm": 0.49560263752937317, |
|
"learning_rate": 0.00011238659825537507, |
|
"loss": 1.3102, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.8481012658227848, |
|
"grad_norm": 0.5009322166442871, |
|
"learning_rate": 0.00011083751966784716, |
|
"loss": 1.3212, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.8491561181434599, |
|
"grad_norm": 0.5117639899253845, |
|
"learning_rate": 0.00010930979277622952, |
|
"loss": 1.3042, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.8502109704641351, |
|
"grad_norm": 0.4982527196407318, |
|
"learning_rate": 0.00010780312327981853, |
|
"loss": 1.3132, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.8512658227848101, |
|
"grad_norm": 0.4800499677658081, |
|
"learning_rate": 0.0001063172209343989, |
|
"loss": 1.3155, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.8523206751054853, |
|
"grad_norm": 0.4854012429714203, |
|
"learning_rate": 0.000104851799496331, |
|
"loss": 1.3173, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.8533755274261603, |
|
"grad_norm": 0.4941917359828949, |
|
"learning_rate": 0.00010340657666740917, |
|
"loss": 1.3044, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.8544303797468354, |
|
"grad_norm": 0.4994807541370392, |
|
"learning_rate": 0.00010198127404047976, |
|
"loss": 1.2993, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.8554852320675106, |
|
"grad_norm": 0.5245217680931091, |
|
"learning_rate": 0.00010057561704580898, |
|
"loss": 1.3183, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.8565400843881856, |
|
"grad_norm": 0.48488539457321167, |
|
"learning_rate": 9.918933489818986e-05, |
|
"loss": 1.3063, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.8575949367088608, |
|
"grad_norm": 0.5023126006126404, |
|
"learning_rate": 9.782216054477828e-05, |
|
"loss": 1.3086, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.8586497890295358, |
|
"grad_norm": 0.4720862805843353, |
|
"learning_rate": 9.647383061364803e-05, |
|
"loss": 1.3082, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.859704641350211, |
|
"grad_norm": 0.4822885990142822, |
|
"learning_rate": 9.514408536305497e-05, |
|
"loss": 1.3015, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.8607594936708861, |
|
"grad_norm": 0.4595473110675812, |
|
"learning_rate": 9.383266863140043e-05, |
|
"loss": 1.3143, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.8618143459915611, |
|
"grad_norm": 0.4833104908466339, |
|
"learning_rate": 9.25393277878844e-05, |
|
"loss": 1.3058, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.8628691983122363, |
|
"grad_norm": 0.5218775272369385, |
|
"learning_rate": 9.126381368383881e-05, |
|
"loss": 1.3149, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.8639240506329114, |
|
"grad_norm": 0.5130941271781921, |
|
"learning_rate": 9.000588060473158e-05, |
|
"loss": 1.3076, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.8649789029535865, |
|
"grad_norm": 0.46893396973609924, |
|
"learning_rate": 8.876528622283232e-05, |
|
"loss": 1.3165, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.8660337552742616, |
|
"grad_norm": 0.4639524221420288, |
|
"learning_rate": 8.754179155053052e-05, |
|
"loss": 1.3093, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.8670886075949367, |
|
"grad_norm": 0.4881211817264557, |
|
"learning_rate": 8.63351608942968e-05, |
|
"loss": 1.3184, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.8681434599156118, |
|
"grad_norm": 0.45782536268234253, |
|
"learning_rate": 8.514516180927926e-05, |
|
"loss": 1.3116, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.869198312236287, |
|
"grad_norm": 0.4648894667625427, |
|
"learning_rate": 8.397156505452524e-05, |
|
"loss": 1.3099, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.870253164556962, |
|
"grad_norm": 0.46031248569488525, |
|
"learning_rate": 8.28141445488205e-05, |
|
"loss": 1.3229, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.8713080168776371, |
|
"grad_norm": 0.4976094365119934, |
|
"learning_rate": 8.167267732713705e-05, |
|
"loss": 1.3154, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.8723628691983122, |
|
"grad_norm": 0.5178634524345398, |
|
"learning_rate": 8.054694349768114e-05, |
|
"loss": 1.3099, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.8734177215189873, |
|
"grad_norm": 0.4591270685195923, |
|
"learning_rate": 7.943672619953359e-05, |
|
"loss": 1.2948, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.8744725738396625, |
|
"grad_norm": 0.5108558535575867, |
|
"learning_rate": 7.834181156087357e-05, |
|
"loss": 1.295, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.8755274261603375, |
|
"grad_norm": 0.4601665437221527, |
|
"learning_rate": 7.726198865777852e-05, |
|
"loss": 1.3214, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.8765822784810127, |
|
"grad_norm": 0.47098612785339355, |
|
"learning_rate": 7.61970494735919e-05, |
|
"loss": 1.3, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.8776371308016878, |
|
"grad_norm": 0.46886900067329407, |
|
"learning_rate": 7.514678885885086e-05, |
|
"loss": 1.3178, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.8786919831223629, |
|
"grad_norm": 0.48209455609321594, |
|
"learning_rate": 7.411100449176634e-05, |
|
"loss": 1.3122, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.879746835443038, |
|
"grad_norm": 0.47015511989593506, |
|
"learning_rate": 7.308949683924792e-05, |
|
"loss": 1.2993, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.880801687763713, |
|
"grad_norm": 0.48078078031539917, |
|
"learning_rate": 7.208206911846581e-05, |
|
"loss": 1.3016, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.8818565400843882, |
|
"grad_norm": 0.48987117409706116, |
|
"learning_rate": 7.10885272589427e-05, |
|
"loss": 1.299, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.8829113924050633, |
|
"grad_norm": 0.46399369835853577, |
|
"learning_rate": 7.010867986516811e-05, |
|
"loss": 1.3071, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.8839662447257384, |
|
"grad_norm": 0.4776746332645416, |
|
"learning_rate": 6.914233817972799e-05, |
|
"loss": 1.3153, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.8850210970464135, |
|
"grad_norm": 0.4886586368083954, |
|
"learning_rate": 6.818931604694264e-05, |
|
"loss": 1.3037, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.8860759493670886, |
|
"grad_norm": 0.5076460838317871, |
|
"learning_rate": 6.724942987700563e-05, |
|
"loss": 1.3067, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.8871308016877637, |
|
"grad_norm": 0.48554062843322754, |
|
"learning_rate": 6.632249861061733e-05, |
|
"loss": 1.322, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.8881856540084389, |
|
"grad_norm": 0.4504407048225403, |
|
"learning_rate": 6.540834368410549e-05, |
|
"loss": 1.3099, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 0.8892405063291139, |
|
"grad_norm": 0.46726056933403015, |
|
"learning_rate": 6.4506788995027e-05, |
|
"loss": 1.3061, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.890295358649789, |
|
"grad_norm": 0.49006912112236023, |
|
"learning_rate": 6.361766086824344e-05, |
|
"loss": 1.2991, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 0.8913502109704642, |
|
"grad_norm": 0.5269207954406738, |
|
"learning_rate": 6.274078802246449e-05, |
|
"loss": 1.3083, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.8924050632911392, |
|
"grad_norm": 0.5159144997596741, |
|
"learning_rate": 6.187600153725223e-05, |
|
"loss": 1.3061, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.8934599156118144, |
|
"grad_norm": 0.45548757910728455, |
|
"learning_rate": 6.1023134820480546e-05, |
|
"loss": 1.3071, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 0.8945147679324894, |
|
"grad_norm": 0.4620956778526306, |
|
"learning_rate": 6.0182023576242725e-05, |
|
"loss": 1.299, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 0.8955696202531646, |
|
"grad_norm": 0.48572149872779846, |
|
"learning_rate": 5.9352505773201664e-05, |
|
"loss": 1.3064, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.8966244725738397, |
|
"grad_norm": 0.46652519702911377, |
|
"learning_rate": 5.8534421613376175e-05, |
|
"loss": 1.2968, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.8976793248945147, |
|
"grad_norm": 0.47046583890914917, |
|
"learning_rate": 5.772761350135759e-05, |
|
"loss": 1.3085, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 0.8987341772151899, |
|
"grad_norm": 0.47184139490127563, |
|
"learning_rate": 5.6931926013950586e-05, |
|
"loss": 1.3039, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.8997890295358649, |
|
"grad_norm": 0.45916128158569336, |
|
"learning_rate": 5.61472058702326e-05, |
|
"loss": 1.3163, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 0.9008438818565401, |
|
"grad_norm": 0.45790746808052063, |
|
"learning_rate": 5.53733019020258e-05, |
|
"loss": 1.3125, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 0.9018987341772152, |
|
"grad_norm": 0.4760609269142151, |
|
"learning_rate": 5.4610065024776125e-05, |
|
"loss": 1.3038, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.9029535864978903, |
|
"grad_norm": 0.46838563680648804, |
|
"learning_rate": 5.38573482088337e-05, |
|
"loss": 1.2975, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 0.9040084388185654, |
|
"grad_norm": 0.5150146484375, |
|
"learning_rate": 5.3115006451129075e-05, |
|
"loss": 1.3137, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 0.9050632911392406, |
|
"grad_norm": 0.4655349850654602, |
|
"learning_rate": 5.2382896747239935e-05, |
|
"loss": 1.306, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.9061181434599156, |
|
"grad_norm": 0.5194929838180542, |
|
"learning_rate": 5.166087806384275e-05, |
|
"loss": 1.3102, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 0.9071729957805907, |
|
"grad_norm": 0.4914683699607849, |
|
"learning_rate": 5.0948811311544186e-05, |
|
"loss": 1.304, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.9082278481012658, |
|
"grad_norm": 0.504937469959259, |
|
"learning_rate": 5.024655931808697e-05, |
|
"loss": 1.3056, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.9092827004219409, |
|
"grad_norm": 0.502181351184845, |
|
"learning_rate": 4.955398680192509e-05, |
|
"loss": 1.2998, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 0.9103375527426161, |
|
"grad_norm": 0.479855477809906, |
|
"learning_rate": 4.887096034616319e-05, |
|
"loss": 1.3109, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 0.9113924050632911, |
|
"grad_norm": 0.5619160532951355, |
|
"learning_rate": 4.819734837285529e-05, |
|
"loss": 1.2922, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.9124472573839663, |
|
"grad_norm": 0.48387664556503296, |
|
"learning_rate": 4.7533021117657475e-05, |
|
"loss": 1.2974, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.9135021097046413, |
|
"grad_norm": 0.5260951519012451, |
|
"learning_rate": 4.687785060483031e-05, |
|
"loss": 1.3008, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 0.9145569620253164, |
|
"grad_norm": 0.4827982187271118, |
|
"learning_rate": 4.623171062258557e-05, |
|
"loss": 1.3042, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.9156118143459916, |
|
"grad_norm": 0.46398162841796875, |
|
"learning_rate": 4.559447669877288e-05, |
|
"loss": 1.3161, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.9166666666666666, |
|
"grad_norm": 0.4830174148082733, |
|
"learning_rate": 4.496602607690141e-05, |
|
"loss": 1.3072, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 0.9177215189873418, |
|
"grad_norm": 0.48006269335746765, |
|
"learning_rate": 4.434623769249217e-05, |
|
"loss": 1.3048, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.9187763713080169, |
|
"grad_norm": 0.47011861205101013, |
|
"learning_rate": 4.373499214975615e-05, |
|
"loss": 1.3045, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 0.919831223628692, |
|
"grad_norm": 0.49005982279777527, |
|
"learning_rate": 4.313217169859397e-05, |
|
"loss": 1.297, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 0.9208860759493671, |
|
"grad_norm": 0.4628673195838928, |
|
"learning_rate": 4.253766021191256e-05, |
|
"loss": 1.3106, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.9219409282700421, |
|
"grad_norm": 0.4530351459980011, |
|
"learning_rate": 4.19513431632545e-05, |
|
"loss": 1.2989, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 0.9229957805907173, |
|
"grad_norm": 0.4768437147140503, |
|
"learning_rate": 4.1373107604735626e-05, |
|
"loss": 1.297, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.9240506329113924, |
|
"grad_norm": 0.5122866034507751, |
|
"learning_rate": 4.0802842145286876e-05, |
|
"loss": 1.2865, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.9251054852320675, |
|
"grad_norm": 0.4650144577026367, |
|
"learning_rate": 4.024043692919589e-05, |
|
"loss": 1.2969, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 0.9261603375527426, |
|
"grad_norm": 0.45484885573387146, |
|
"learning_rate": 3.968578361494449e-05, |
|
"loss": 1.3043, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 0.9272151898734177, |
|
"grad_norm": 0.48278769850730896, |
|
"learning_rate": 3.91387753543378e-05, |
|
"loss": 1.2988, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.9282700421940928, |
|
"grad_norm": 0.46038711071014404, |
|
"learning_rate": 3.859930677192103e-05, |
|
"loss": 1.295, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.929324894514768, |
|
"grad_norm": 0.45165112614631653, |
|
"learning_rate": 3.806727394468005e-05, |
|
"loss": 1.3157, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 0.930379746835443, |
|
"grad_norm": 0.44949862360954285, |
|
"learning_rate": 3.7542574382021635e-05, |
|
"loss": 1.3034, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.9314345991561181, |
|
"grad_norm": 0.47061365842819214, |
|
"learning_rate": 3.702510700602975e-05, |
|
"loss": 1.3181, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 0.9324894514767933, |
|
"grad_norm": 0.4659769833087921, |
|
"learning_rate": 3.651477213199394e-05, |
|
"loss": 1.3, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 0.9335443037974683, |
|
"grad_norm": 0.49148568511009216, |
|
"learning_rate": 3.601147144920609e-05, |
|
"loss": 1.3026, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.9345991561181435, |
|
"grad_norm": 0.5233895182609558, |
|
"learning_rate": 3.5515108002021946e-05, |
|
"loss": 1.3015, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 0.9356540084388185, |
|
"grad_norm": 0.46923792362213135, |
|
"learning_rate": 3.502558617118352e-05, |
|
"loss": 1.3034, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 0.9367088607594937, |
|
"grad_norm": 0.4924445152282715, |
|
"learning_rate": 3.454281165539913e-05, |
|
"loss": 1.306, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.9377637130801688, |
|
"grad_norm": 0.4852265417575836, |
|
"learning_rate": 3.406669145317717e-05, |
|
"loss": 1.3083, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 0.9388185654008439, |
|
"grad_norm": 0.46191665530204773, |
|
"learning_rate": 3.359713384491036e-05, |
|
"loss": 1.2987, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.939873417721519, |
|
"grad_norm": 0.5765952467918396, |
|
"learning_rate": 3.313404837520694e-05, |
|
"loss": 1.2972, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.9409282700421941, |
|
"grad_norm": 0.46600738167762756, |
|
"learning_rate": 3.267734583546536e-05, |
|
"loss": 1.3027, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 0.9419831223628692, |
|
"grad_norm": 0.49096518754959106, |
|
"learning_rate": 3.222693824668916e-05, |
|
"loss": 1.3, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 0.9430379746835443, |
|
"grad_norm": 0.4755017161369324, |
|
"learning_rate": 3.178273884253874e-05, |
|
"loss": 1.3009, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.9440928270042194, |
|
"grad_norm": 0.46774840354919434, |
|
"learning_rate": 3.134466205261674e-05, |
|
"loss": 1.3027, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.9451476793248945, |
|
"grad_norm": 0.47193506360054016, |
|
"learning_rate": 3.0912623485983774e-05, |
|
"loss": 1.3033, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.9462025316455697, |
|
"grad_norm": 0.5292438864707947, |
|
"learning_rate": 3.048653991490141e-05, |
|
"loss": 1.303, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.9472573839662447, |
|
"grad_norm": 0.5362447500228882, |
|
"learning_rate": 3.0066329258799184e-05, |
|
"loss": 1.3004, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 0.9483122362869199, |
|
"grad_norm": 0.4858575463294983, |
|
"learning_rate": 2.965191056846266e-05, |
|
"loss": 1.3037, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.9493670886075949, |
|
"grad_norm": 0.45525240898132324, |
|
"learning_rate": 2.9243204010439396e-05, |
|
"loss": 1.2989, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.95042194092827, |
|
"grad_norm": 0.4982556104660034, |
|
"learning_rate": 2.8840130851659852e-05, |
|
"loss": 1.297, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 0.9514767932489452, |
|
"grad_norm": 0.47312718629837036, |
|
"learning_rate": 2.844261344427029e-05, |
|
"loss": 1.2975, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 0.9525316455696202, |
|
"grad_norm": 0.5107970237731934, |
|
"learning_rate": 2.805057521067472e-05, |
|
"loss": 1.3169, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 0.9535864978902954, |
|
"grad_norm": 0.46951040625572205, |
|
"learning_rate": 2.766394062878302e-05, |
|
"loss": 1.2927, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 0.9546413502109705, |
|
"grad_norm": 0.47983628511428833, |
|
"learning_rate": 2.7282635217462405e-05, |
|
"loss": 1.304, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 0.9556962025316456, |
|
"grad_norm": 0.4806886315345764, |
|
"learning_rate": 2.6906585522189378e-05, |
|
"loss": 1.3048, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 0.9567510548523207, |
|
"grad_norm": 0.4659721553325653, |
|
"learning_rate": 2.653571910089951e-05, |
|
"loss": 1.3048, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 0.9578059071729957, |
|
"grad_norm": 0.46359288692474365, |
|
"learning_rate": 2.6169964510032243e-05, |
|
"loss": 1.3059, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 0.9588607594936709, |
|
"grad_norm": 0.46917709708213806, |
|
"learning_rate": 2.580925129076798e-05, |
|
"loss": 1.3071, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 0.959915611814346, |
|
"grad_norm": 0.46824902296066284, |
|
"learning_rate": 2.5453509955454954e-05, |
|
"loss": 1.2872, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.9609704641350211, |
|
"grad_norm": 0.4602473974227905, |
|
"learning_rate": 2.510267197422317e-05, |
|
"loss": 1.2974, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 0.9620253164556962, |
|
"grad_norm": 0.4930126667022705, |
|
"learning_rate": 2.4756669761782806e-05, |
|
"loss": 1.3093, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 0.9630801687763713, |
|
"grad_norm": 0.4724462628364563, |
|
"learning_rate": 2.4415436664404643e-05, |
|
"loss": 1.2922, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 0.9641350210970464, |
|
"grad_norm": 0.5241295099258423, |
|
"learning_rate": 2.4078906947079882e-05, |
|
"loss": 1.3119, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 0.9651898734177216, |
|
"grad_norm": 0.4791512191295624, |
|
"learning_rate": 2.3747015780857007e-05, |
|
"loss": 1.2982, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.9662447257383966, |
|
"grad_norm": 0.4800410270690918, |
|
"learning_rate": 2.3419699230353144e-05, |
|
"loss": 1.306, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 0.9672995780590717, |
|
"grad_norm": 0.484261155128479, |
|
"learning_rate": 2.3096894241437583e-05, |
|
"loss": 1.3059, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 0.9683544303797469, |
|
"grad_norm": 0.47010815143585205, |
|
"learning_rate": 2.2778538629085057e-05, |
|
"loss": 1.2997, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 0.9694092827004219, |
|
"grad_norm": 0.47857558727264404, |
|
"learning_rate": 2.2464571065396428e-05, |
|
"loss": 1.2977, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 0.9704641350210971, |
|
"grad_norm": 0.48201119899749756, |
|
"learning_rate": 2.2154931067784525e-05, |
|
"loss": 1.2991, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.9715189873417721, |
|
"grad_norm": 0.48913222551345825, |
|
"learning_rate": 2.1849558987322783e-05, |
|
"loss": 1.3011, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 0.9725738396624473, |
|
"grad_norm": 0.514324426651001, |
|
"learning_rate": 2.1548395997254516e-05, |
|
"loss": 1.2995, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 0.9736286919831224, |
|
"grad_norm": 0.47292664647102356, |
|
"learning_rate": 2.1251384081660546e-05, |
|
"loss": 1.29, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 0.9746835443037974, |
|
"grad_norm": 0.46298399567604065, |
|
"learning_rate": 2.0958466024283035e-05, |
|
"loss": 1.283, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 0.9757383966244726, |
|
"grad_norm": 0.4443071484565735, |
|
"learning_rate": 2.0669585397503362e-05, |
|
"loss": 1.2966, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 0.9767932489451476, |
|
"grad_norm": 0.4724883437156677, |
|
"learning_rate": 2.0384686551471954e-05, |
|
"loss": 1.2923, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 0.9778481012658228, |
|
"grad_norm": 0.4791187345981598, |
|
"learning_rate": 2.0103714603387898e-05, |
|
"loss": 1.3048, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 0.9789029535864979, |
|
"grad_norm": 0.45725077390670776, |
|
"learning_rate": 1.9826615426926342e-05, |
|
"loss": 1.2913, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 0.979957805907173, |
|
"grad_norm": 0.4532528817653656, |
|
"learning_rate": 1.9553335641811623e-05, |
|
"loss": 1.2992, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 0.9810126582278481, |
|
"grad_norm": 0.47902509570121765, |
|
"learning_rate": 1.9283822603534143e-05, |
|
"loss": 1.2967, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.9820675105485233, |
|
"grad_norm": 0.48579680919647217, |
|
"learning_rate": 1.90180243932089e-05, |
|
"loss": 1.2981, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 0.9831223628691983, |
|
"grad_norm": 0.4836713969707489, |
|
"learning_rate": 1.8755889807573868e-05, |
|
"loss": 1.2941, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 0.9841772151898734, |
|
"grad_norm": 0.4789362847805023, |
|
"learning_rate": 1.8497368349126255e-05, |
|
"loss": 1.3053, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 0.9852320675105485, |
|
"grad_norm": 0.45681995153427124, |
|
"learning_rate": 1.824241021639465e-05, |
|
"loss": 1.3029, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 0.9862869198312236, |
|
"grad_norm": 0.4653390645980835, |
|
"learning_rate": 1.799096629434529e-05, |
|
"loss": 1.3135, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 0.9873417721518988, |
|
"grad_norm": 0.4777088463306427, |
|
"learning_rate": 1.7742988144920578e-05, |
|
"loss": 1.3055, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 0.9883966244725738, |
|
"grad_norm": 0.4822547137737274, |
|
"learning_rate": 1.7498427997707978e-05, |
|
"loss": 1.3066, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 0.989451476793249, |
|
"grad_norm": 0.47279828786849976, |
|
"learning_rate": 1.7257238740737548e-05, |
|
"loss": 1.2909, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 0.990506329113924, |
|
"grad_norm": 0.48346567153930664, |
|
"learning_rate": 1.7019373911406307e-05, |
|
"loss": 1.2946, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 0.9915611814345991, |
|
"grad_norm": 0.46991166472435, |
|
"learning_rate": 1.67847876875277e-05, |
|
"loss": 1.3076, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.9926160337552743, |
|
"grad_norm": 0.49248459935188293, |
|
"learning_rate": 1.655343487850443e-05, |
|
"loss": 1.2832, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 0.9936708860759493, |
|
"grad_norm": 0.45897576212882996, |
|
"learning_rate": 1.6325270916622947e-05, |
|
"loss": 1.3021, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 0.9947257383966245, |
|
"grad_norm": 0.4570271372795105, |
|
"learning_rate": 1.610025184846797e-05, |
|
"loss": 1.3033, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 0.9957805907172996, |
|
"grad_norm": 0.4789440631866455, |
|
"learning_rate": 1.587833432645528e-05, |
|
"loss": 1.2982, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 0.9968354430379747, |
|
"grad_norm": 0.4682486653327942, |
|
"learning_rate": 1.5659475600481297e-05, |
|
"loss": 1.3047, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.9978902953586498, |
|
"grad_norm": 0.45759284496307373, |
|
"learning_rate": 1.544363350968769e-05, |
|
"loss": 1.292, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 0.9989451476793249, |
|
"grad_norm": 0.46701884269714355, |
|
"learning_rate": 1.523076647433954e-05, |
|
"loss": 1.3033, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.3098622560501099, |
|
"learning_rate": 1.5020833487815421e-05, |
|
"loss": 1.2948, |
|
"step": 9480 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 9480, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.832308198648013e+16, |
|
"train_batch_size": 1024, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|