|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.899951667472209, |
|
"eval_steps": 500, |
|
"global_step": 6000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 75.8236083984375, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 3.7148, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 52.23726272583008, |
|
"learning_rate": 9e-07, |
|
"loss": 3.2721, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 35.391998291015625, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 2.4262, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 15.225934028625488, |
|
"learning_rate": 1.9e-06, |
|
"loss": 1.8828, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 13.488654136657715, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.5398, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.773133277893066, |
|
"learning_rate": 2.8500000000000002e-06, |
|
"loss": 1.2772, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.736795902252197, |
|
"learning_rate": 3.3500000000000005e-06, |
|
"loss": 1.1159, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.9463019371032715, |
|
"learning_rate": 3.85e-06, |
|
"loss": 0.9883, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.3851969242095947, |
|
"learning_rate": 4.35e-06, |
|
"loss": 0.902, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.4642181396484375, |
|
"learning_rate": 4.85e-06, |
|
"loss": 0.8701, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.6514687538146973, |
|
"learning_rate": 5.3500000000000004e-06, |
|
"loss": 0.8548, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.4448617994785309, |
|
"learning_rate": 5.850000000000001e-06, |
|
"loss": 0.8038, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.5425733327865601, |
|
"learning_rate": 6.35e-06, |
|
"loss": 0.825, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.47352057695388794, |
|
"learning_rate": 6.8500000000000005e-06, |
|
"loss": 0.812, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.40401744842529297, |
|
"learning_rate": 7.35e-06, |
|
"loss": 0.7912, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3112718164920807, |
|
"learning_rate": 7.850000000000001e-06, |
|
"loss": 0.8057, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.45351892709732056, |
|
"learning_rate": 8.350000000000001e-06, |
|
"loss": 0.7912, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.38080641627311707, |
|
"learning_rate": 8.85e-06, |
|
"loss": 0.7956, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.22949431836605072, |
|
"learning_rate": 9.35e-06, |
|
"loss": 0.7785, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.8331744074821472, |
|
"learning_rate": 9.85e-06, |
|
"loss": 0.7769, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.30213871598243713, |
|
"learning_rate": 1.035e-05, |
|
"loss": 0.7797, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.32015591859817505, |
|
"learning_rate": 1.0850000000000001e-05, |
|
"loss": 0.7941, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.22447015345096588, |
|
"learning_rate": 1.1350000000000001e-05, |
|
"loss": 0.8014, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.2546006143093109, |
|
"learning_rate": 1.185e-05, |
|
"loss": 0.771, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.23833389580249786, |
|
"learning_rate": 1.235e-05, |
|
"loss": 0.7956, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.1831669956445694, |
|
"learning_rate": 1.285e-05, |
|
"loss": 0.7833, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.19860319793224335, |
|
"learning_rate": 1.3350000000000001e-05, |
|
"loss": 0.7956, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.21220380067825317, |
|
"learning_rate": 1.3850000000000001e-05, |
|
"loss": 0.7942, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.26025038957595825, |
|
"learning_rate": 1.435e-05, |
|
"loss": 0.7877, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.2029470056295395, |
|
"learning_rate": 1.485e-05, |
|
"loss": 0.7637, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.21639183163642883, |
|
"learning_rate": 1.535e-05, |
|
"loss": 0.7885, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.23670387268066406, |
|
"learning_rate": 1.5850000000000002e-05, |
|
"loss": 0.7558, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.2855335772037506, |
|
"learning_rate": 1.635e-05, |
|
"loss": 0.7854, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.21515759825706482, |
|
"learning_rate": 1.6850000000000003e-05, |
|
"loss": 0.7577, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.18981383740901947, |
|
"learning_rate": 1.7349999999999998e-05, |
|
"loss": 0.7802, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2674611508846283, |
|
"learning_rate": 1.785e-05, |
|
"loss": 0.7809, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.24821774661540985, |
|
"learning_rate": 1.8350000000000002e-05, |
|
"loss": 0.7965, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3644929528236389, |
|
"learning_rate": 1.885e-05, |
|
"loss": 0.7957, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3568972647190094, |
|
"learning_rate": 1.9350000000000003e-05, |
|
"loss": 0.7882, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3236319422721863, |
|
"learning_rate": 1.985e-05, |
|
"loss": 0.7841, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.24553723633289337, |
|
"learning_rate": 2.035e-05, |
|
"loss": 0.7824, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1855400949716568, |
|
"learning_rate": 2.085e-05, |
|
"loss": 0.7645, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.26212435960769653, |
|
"learning_rate": 2.135e-05, |
|
"loss": 0.7824, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3167509138584137, |
|
"learning_rate": 2.1850000000000003e-05, |
|
"loss": 0.7908, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.21845023334026337, |
|
"learning_rate": 2.235e-05, |
|
"loss": 0.7888, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.30318892002105713, |
|
"learning_rate": 2.2850000000000003e-05, |
|
"loss": 0.7682, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.28828638792037964, |
|
"learning_rate": 2.3350000000000002e-05, |
|
"loss": 0.7588, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.2422240674495697, |
|
"learning_rate": 2.385e-05, |
|
"loss": 0.7787, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3579326272010803, |
|
"learning_rate": 2.435e-05, |
|
"loss": 0.7932, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3549540042877197, |
|
"learning_rate": 2.485e-05, |
|
"loss": 0.7634, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"eval_loss": 0.7744565606117249, |
|
"eval_runtime": 43.9626, |
|
"eval_samples_per_second": 45.493, |
|
"eval_steps_per_second": 0.364, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.2931281328201294, |
|
"learning_rate": 2.5350000000000003e-05, |
|
"loss": 0.7763, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.21228083968162537, |
|
"learning_rate": 2.585e-05, |
|
"loss": 0.7748, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.26342302560806274, |
|
"learning_rate": 2.6350000000000004e-05, |
|
"loss": 0.789, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2840302288532257, |
|
"learning_rate": 2.6850000000000002e-05, |
|
"loss": 0.768, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.273771196603775, |
|
"learning_rate": 2.7350000000000004e-05, |
|
"loss": 0.7707, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.6022837162017822, |
|
"learning_rate": 2.7850000000000003e-05, |
|
"loss": 0.7705, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.3082919716835022, |
|
"learning_rate": 2.8349999999999998e-05, |
|
"loss": 0.7849, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.25341132283210754, |
|
"learning_rate": 2.885e-05, |
|
"loss": 0.7658, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2250708043575287, |
|
"learning_rate": 2.935e-05, |
|
"loss": 0.756, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.23522531986236572, |
|
"learning_rate": 2.985e-05, |
|
"loss": 0.7692, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2448827624320984, |
|
"learning_rate": 3.035e-05, |
|
"loss": 0.7635, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.19717586040496826, |
|
"learning_rate": 3.0850000000000004e-05, |
|
"loss": 0.7548, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.24299098551273346, |
|
"learning_rate": 3.135e-05, |
|
"loss": 0.7673, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2555953562259674, |
|
"learning_rate": 3.185e-05, |
|
"loss": 0.7528, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2102127969264984, |
|
"learning_rate": 3.235e-05, |
|
"loss": 0.7678, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.21767646074295044, |
|
"learning_rate": 3.2850000000000006e-05, |
|
"loss": 0.7564, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.3582068085670471, |
|
"learning_rate": 3.3350000000000004e-05, |
|
"loss": 0.7645, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.27804431319236755, |
|
"learning_rate": 3.385e-05, |
|
"loss": 0.7645, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.2464047521352768, |
|
"learning_rate": 3.435e-05, |
|
"loss": 0.7325, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.22323842346668243, |
|
"learning_rate": 3.485e-05, |
|
"loss": 0.7579, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.28329703211784363, |
|
"learning_rate": 3.535e-05, |
|
"loss": 0.7399, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.2471865713596344, |
|
"learning_rate": 3.585e-05, |
|
"loss": 0.765, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.314908504486084, |
|
"learning_rate": 3.635e-05, |
|
"loss": 0.7529, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.21466995775699615, |
|
"learning_rate": 3.685e-05, |
|
"loss": 0.7365, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.2545580565929413, |
|
"learning_rate": 3.735e-05, |
|
"loss": 0.7458, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.2651059329509735, |
|
"learning_rate": 3.7850000000000005e-05, |
|
"loss": 0.7578, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.2408379763364792, |
|
"learning_rate": 3.8350000000000004e-05, |
|
"loss": 0.7283, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2575875520706177, |
|
"learning_rate": 3.885e-05, |
|
"loss": 0.7367, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.28182050585746765, |
|
"learning_rate": 3.935e-05, |
|
"loss": 0.7784, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.20579397678375244, |
|
"learning_rate": 3.9850000000000006e-05, |
|
"loss": 0.7533, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.1906532198190689, |
|
"learning_rate": 4.0350000000000005e-05, |
|
"loss": 0.7544, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.18311142921447754, |
|
"learning_rate": 4.085e-05, |
|
"loss": 0.75, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.1910839080810547, |
|
"learning_rate": 4.135e-05, |
|
"loss": 0.7538, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.2178005427122116, |
|
"learning_rate": 4.185e-05, |
|
"loss": 0.7456, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.27642378211021423, |
|
"learning_rate": 4.235e-05, |
|
"loss": 0.744, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.26685312390327454, |
|
"learning_rate": 4.285e-05, |
|
"loss": 0.7611, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.2021845579147339, |
|
"learning_rate": 4.335e-05, |
|
"loss": 0.758, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.20516808331012726, |
|
"learning_rate": 4.385e-05, |
|
"loss": 0.7404, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.20619668066501617, |
|
"learning_rate": 4.435e-05, |
|
"loss": 0.7379, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.22439967095851898, |
|
"learning_rate": 4.4850000000000006e-05, |
|
"loss": 0.7495, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2192295491695404, |
|
"learning_rate": 4.5350000000000005e-05, |
|
"loss": 0.7377, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.26503369212150574, |
|
"learning_rate": 4.585e-05, |
|
"loss": 0.7481, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.2249353975057602, |
|
"learning_rate": 4.635e-05, |
|
"loss": 0.742, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.21413160860538483, |
|
"learning_rate": 4.685000000000001e-05, |
|
"loss": 0.7485, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.20971478521823883, |
|
"learning_rate": 4.735e-05, |
|
"loss": 0.7339, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.20295798778533936, |
|
"learning_rate": 4.785e-05, |
|
"loss": 0.722, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1957596242427826, |
|
"learning_rate": 4.835e-05, |
|
"loss": 0.7538, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.18014560639858246, |
|
"learning_rate": 4.885e-05, |
|
"loss": 0.7251, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.19334861636161804, |
|
"learning_rate": 4.935e-05, |
|
"loss": 0.7444, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.1753440499305725, |
|
"learning_rate": 4.9850000000000006e-05, |
|
"loss": 0.7376, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"eval_loss": 0.73988938331604, |
|
"eval_runtime": 42.9535, |
|
"eval_samples_per_second": 46.562, |
|
"eval_steps_per_second": 0.372, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.24455028772354126, |
|
"learning_rate": 4.99999307775404e-05, |
|
"loss": 0.7404, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.2149570733308792, |
|
"learning_rate": 4.999959172968145e-05, |
|
"loss": 0.7342, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1931283175945282, |
|
"learning_rate": 4.999897014592085e-05, |
|
"loss": 0.735, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.2331906110048294, |
|
"learning_rate": 4.999806603328352e-05, |
|
"loss": 0.7145, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.21177324652671814, |
|
"learning_rate": 4.999687940198738e-05, |
|
"loss": 0.7308, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2026621401309967, |
|
"learning_rate": 4.9995410265443286e-05, |
|
"loss": 0.7445, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.19811251759529114, |
|
"learning_rate": 4.999365864025487e-05, |
|
"loss": 0.738, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1981566846370697, |
|
"learning_rate": 4.9991624546218334e-05, |
|
"loss": 0.7286, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.17122291028499603, |
|
"learning_rate": 4.9989308006322235e-05, |
|
"loss": 0.737, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.2182038575410843, |
|
"learning_rate": 4.9986709046747225e-05, |
|
"loss": 0.7354, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.1823730617761612, |
|
"learning_rate": 4.9983827696865764e-05, |
|
"loss": 0.725, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.2195768803358078, |
|
"learning_rate": 4.998066398924177e-05, |
|
"loss": 0.7173, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.18349726498126984, |
|
"learning_rate": 4.997721795963028e-05, |
|
"loss": 0.7159, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2440386265516281, |
|
"learning_rate": 4.997348964697699e-05, |
|
"loss": 0.7171, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.20371629297733307, |
|
"learning_rate": 4.9969479093417894e-05, |
|
"loss": 0.7179, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.2982828617095947, |
|
"learning_rate": 4.9965186344278746e-05, |
|
"loss": 0.7222, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.2097097784280777, |
|
"learning_rate": 4.9960611448074555e-05, |
|
"loss": 0.7174, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.2264368236064911, |
|
"learning_rate": 4.995575445650907e-05, |
|
"loss": 0.7079, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.24737605452537537, |
|
"learning_rate": 4.995061542447418e-05, |
|
"loss": 0.7272, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.19530241191387177, |
|
"learning_rate": 4.9945194410049254e-05, |
|
"loss": 0.7216, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.2791181802749634, |
|
"learning_rate": 4.993949147450055e-05, |
|
"loss": 0.712, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.2095661610364914, |
|
"learning_rate": 4.993350668228049e-05, |
|
"loss": 0.7067, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.17129307985305786, |
|
"learning_rate": 4.9927240101026915e-05, |
|
"loss": 0.7087, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.2154540866613388, |
|
"learning_rate": 4.992069180156235e-05, |
|
"loss": 0.7411, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.22642803192138672, |
|
"learning_rate": 4.991386185789319e-05, |
|
"loss": 0.7266, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.19676081836223602, |
|
"learning_rate": 4.9906750347208866e-05, |
|
"loss": 0.7012, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.2095811814069748, |
|
"learning_rate": 4.989935734988098e-05, |
|
"loss": 0.7162, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.21455398201942444, |
|
"learning_rate": 4.9891682949462385e-05, |
|
"loss": 0.7177, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.20205985009670258, |
|
"learning_rate": 4.988372723268623e-05, |
|
"loss": 0.718, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.18780440092086792, |
|
"learning_rate": 4.987549028946502e-05, |
|
"loss": 0.7103, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.20362792909145355, |
|
"learning_rate": 4.986697221288956e-05, |
|
"loss": 0.7304, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.2820493280887604, |
|
"learning_rate": 4.985817309922792e-05, |
|
"loss": 0.7182, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.20787598192691803, |
|
"learning_rate": 4.984909304792435e-05, |
|
"loss": 0.7142, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.21624509990215302, |
|
"learning_rate": 4.983973216159813e-05, |
|
"loss": 0.7208, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.19343602657318115, |
|
"learning_rate": 4.983009054604246e-05, |
|
"loss": 0.7097, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.254402756690979, |
|
"learning_rate": 4.9820168310223215e-05, |
|
"loss": 0.7193, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.1940561830997467, |
|
"learning_rate": 4.980996556627774e-05, |
|
"loss": 0.6988, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.22593462467193604, |
|
"learning_rate": 4.9799482429513576e-05, |
|
"loss": 0.7054, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.22020135819911957, |
|
"learning_rate": 4.978871901840716e-05, |
|
"loss": 0.7221, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.22807644307613373, |
|
"learning_rate": 4.977767545460248e-05, |
|
"loss": 0.7213, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.1925562173128128, |
|
"learning_rate": 4.9766351862909703e-05, |
|
"loss": 0.7127, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.20845136046409607, |
|
"learning_rate": 4.9754748371303775e-05, |
|
"loss": 0.7207, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.18583528697490692, |
|
"learning_rate": 4.974286511092294e-05, |
|
"loss": 0.7098, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.2019319236278534, |
|
"learning_rate": 4.973070221606732e-05, |
|
"loss": 0.7029, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.1966458261013031, |
|
"learning_rate": 4.971825982419731e-05, |
|
"loss": 0.7241, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.20924623310565948, |
|
"learning_rate": 4.970553807593214e-05, |
|
"loss": 0.7126, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.20655803382396698, |
|
"learning_rate": 4.969253711504814e-05, |
|
"loss": 0.6903, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.2179209142923355, |
|
"learning_rate": 4.967925708847727e-05, |
|
"loss": 0.683, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.2198410928249359, |
|
"learning_rate": 4.966569814630534e-05, |
|
"loss": 0.7087, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.17970815300941467, |
|
"learning_rate": 4.9651860441770374e-05, |
|
"loss": 0.7006, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"eval_loss": 0.7137542366981506, |
|
"eval_runtime": 43.0052, |
|
"eval_samples_per_second": 46.506, |
|
"eval_steps_per_second": 0.372, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.1931023746728897, |
|
"learning_rate": 4.963774413126086e-05, |
|
"loss": 0.7245, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.21812495589256287, |
|
"learning_rate": 4.9623349374313973e-05, |
|
"loss": 0.7119, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.20784322917461395, |
|
"learning_rate": 4.960867633361381e-05, |
|
"loss": 0.704, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.18697650730609894, |
|
"learning_rate": 4.9593725174989507e-05, |
|
"loss": 0.7129, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.25061744451522827, |
|
"learning_rate": 4.957849606741338e-05, |
|
"loss": 0.6816, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.1888163685798645, |
|
"learning_rate": 4.956298918299903e-05, |
|
"loss": 0.6905, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1991235315799713, |
|
"learning_rate": 4.954720469699938e-05, |
|
"loss": 0.7258, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.18779252469539642, |
|
"learning_rate": 4.953114278780471e-05, |
|
"loss": 0.6918, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.21268677711486816, |
|
"learning_rate": 4.951480363694061e-05, |
|
"loss": 0.6995, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.20242677628993988, |
|
"learning_rate": 4.949818742906597e-05, |
|
"loss": 0.7132, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.2314455509185791, |
|
"learning_rate": 4.9481294351970874e-05, |
|
"loss": 0.6866, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.19638539850711823, |
|
"learning_rate": 4.9464124596574465e-05, |
|
"loss": 0.6993, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.185603529214859, |
|
"learning_rate": 4.944667835692281e-05, |
|
"loss": 0.6971, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.1594298928976059, |
|
"learning_rate": 4.942895583018669e-05, |
|
"loss": 0.7258, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.21844029426574707, |
|
"learning_rate": 4.9410957216659374e-05, |
|
"loss": 0.7022, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.19146642088890076, |
|
"learning_rate": 4.9392682719754364e-05, |
|
"loss": 0.6954, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.20112347602844238, |
|
"learning_rate": 4.9374132546003096e-05, |
|
"loss": 0.7017, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.22593769431114197, |
|
"learning_rate": 4.935530690505259e-05, |
|
"loss": 0.7397, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2611504793167114, |
|
"learning_rate": 4.933620600966311e-05, |
|
"loss": 0.7048, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.23995471000671387, |
|
"learning_rate": 4.931683007570571e-05, |
|
"loss": 0.709, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.22278735041618347, |
|
"learning_rate": 4.9297179322159856e-05, |
|
"loss": 0.7116, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2096502035856247, |
|
"learning_rate": 4.927725397111093e-05, |
|
"loss": 0.7061, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.22335517406463623, |
|
"learning_rate": 4.9257054247747644e-05, |
|
"loss": 0.6982, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.18597687780857086, |
|
"learning_rate": 4.923658038035965e-05, |
|
"loss": 0.6927, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1971946507692337, |
|
"learning_rate": 4.9215832600334816e-05, |
|
"loss": 0.6768, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2101408839225769, |
|
"learning_rate": 4.9194811142156674e-05, |
|
"loss": 0.6933, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.17352430522441864, |
|
"learning_rate": 4.917351624340178e-05, |
|
"loss": 0.6918, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.18568973243236542, |
|
"learning_rate": 4.915194814473699e-05, |
|
"loss": 0.6711, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.22991472482681274, |
|
"learning_rate": 4.913010708991679e-05, |
|
"loss": 0.6938, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.201872780919075, |
|
"learning_rate": 4.910799332578048e-05, |
|
"loss": 0.7051, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.2069864720106125, |
|
"learning_rate": 4.908560710224943e-05, |
|
"loss": 0.6983, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2075955718755722, |
|
"learning_rate": 4.906294867232426e-05, |
|
"loss": 0.6996, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.19524317979812622, |
|
"learning_rate": 4.904001829208194e-05, |
|
"loss": 0.691, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1967206597328186, |
|
"learning_rate": 4.901681622067293e-05, |
|
"loss": 0.6964, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.2195650041103363, |
|
"learning_rate": 4.899334272031825e-05, |
|
"loss": 0.6982, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.23072399199008942, |
|
"learning_rate": 4.8969598056306475e-05, |
|
"loss": 0.6838, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.1950792670249939, |
|
"learning_rate": 4.89455824969908e-05, |
|
"loss": 0.6984, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2289438396692276, |
|
"learning_rate": 4.892129631378597e-05, |
|
"loss": 0.7012, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.22316494584083557, |
|
"learning_rate": 4.8896739781165215e-05, |
|
"loss": 0.6904, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.24066634476184845, |
|
"learning_rate": 4.8871913176657135e-05, |
|
"loss": 0.6974, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.18922199308872223, |
|
"learning_rate": 4.8846816780842606e-05, |
|
"loss": 0.686, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2425553798675537, |
|
"learning_rate": 4.882145087735158e-05, |
|
"loss": 0.715, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2267230898141861, |
|
"learning_rate": 4.879581575285988e-05, |
|
"loss": 0.698, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.22119645774364471, |
|
"learning_rate": 4.876991169708592e-05, |
|
"loss": 0.6942, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.22854161262512207, |
|
"learning_rate": 4.874373900278756e-05, |
|
"loss": 0.6912, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1927136778831482, |
|
"learning_rate": 4.871729796575863e-05, |
|
"loss": 0.6829, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.2238394170999527, |
|
"learning_rate": 4.869058888482571e-05, |
|
"loss": 0.6831, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.17643775045871735, |
|
"learning_rate": 4.86636120618447e-05, |
|
"loss": 0.6875, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2161882072687149, |
|
"learning_rate": 4.863636780169742e-05, |
|
"loss": 0.6893, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.20689886808395386, |
|
"learning_rate": 4.860885641228815e-05, |
|
"loss": 0.6721, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"eval_loss": 0.701475203037262, |
|
"eval_runtime": 43.016, |
|
"eval_samples_per_second": 46.494, |
|
"eval_steps_per_second": 0.372, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.18866655230522156, |
|
"learning_rate": 4.8581078204540196e-05, |
|
"loss": 0.6941, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.20633092522621155, |
|
"learning_rate": 4.855303349239231e-05, |
|
"loss": 0.6943, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2335944026708603, |
|
"learning_rate": 4.85247225927952e-05, |
|
"loss": 0.6986, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.18950659036636353, |
|
"learning_rate": 4.8496145825707905e-05, |
|
"loss": 0.6894, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.22117875516414642, |
|
"learning_rate": 4.8467303514094204e-05, |
|
"loss": 0.6956, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.23478396236896515, |
|
"learning_rate": 4.8438195983918967e-05, |
|
"loss": 0.6919, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.24754443764686584, |
|
"learning_rate": 4.8408823564144454e-05, |
|
"loss": 0.6904, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.19141903519630432, |
|
"learning_rate": 4.837918658672661e-05, |
|
"loss": 0.6914, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.21387813985347748, |
|
"learning_rate": 4.834928538661131e-05, |
|
"loss": 0.6965, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.20495550334453583, |
|
"learning_rate": 4.831912030173058e-05, |
|
"loss": 0.6949, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.35174649953842163, |
|
"learning_rate": 4.8288691672998765e-05, |
|
"loss": 0.6813, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.2461618334054947, |
|
"learning_rate": 4.825799984430867e-05, |
|
"loss": 0.6687, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.20039792358875275, |
|
"learning_rate": 4.8227045162527714e-05, |
|
"loss": 0.6737, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.19774436950683594, |
|
"learning_rate": 4.8195827977493955e-05, |
|
"loss": 0.6815, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.21472251415252686, |
|
"learning_rate": 4.8164348642012194e-05, |
|
"loss": 0.6957, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.1990274041891098, |
|
"learning_rate": 4.813260751184992e-05, |
|
"loss": 0.6916, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.20387563109397888, |
|
"learning_rate": 4.810060494573335e-05, |
|
"loss": 0.6814, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.1792985051870346, |
|
"learning_rate": 4.806834130534336e-05, |
|
"loss": 0.698, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.22255714237689972, |
|
"learning_rate": 4.803581695531134e-05, |
|
"loss": 0.6765, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.19549421966075897, |
|
"learning_rate": 4.8003032263215185e-05, |
|
"loss": 0.6867, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.1929677277803421, |
|
"learning_rate": 4.796998759957504e-05, |
|
"loss": 0.6935, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.22705228626728058, |
|
"learning_rate": 4.793668333784915e-05, |
|
"loss": 0.6897, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.21798522770404816, |
|
"learning_rate": 4.790311985442966e-05, |
|
"loss": 0.6694, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.19095788896083832, |
|
"learning_rate": 4.7869297528638315e-05, |
|
"loss": 0.6699, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.23002374172210693, |
|
"learning_rate": 4.7835216742722225e-05, |
|
"loss": 0.6807, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.2107260823249817, |
|
"learning_rate": 4.780087788184947e-05, |
|
"loss": 0.6855, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.23853424191474915, |
|
"learning_rate": 4.776628133410487e-05, |
|
"loss": 0.6946, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.25372380018234253, |
|
"learning_rate": 4.7731427490485455e-05, |
|
"loss": 0.6724, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.182255819439888, |
|
"learning_rate": 4.7696316744896146e-05, |
|
"loss": 0.6976, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.20137862861156464, |
|
"learning_rate": 4.7660949494145276e-05, |
|
"loss": 0.6853, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.19899927079677582, |
|
"learning_rate": 4.7625326137940106e-05, |
|
"loss": 0.6696, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.24147948622703552, |
|
"learning_rate": 4.758944707888228e-05, |
|
"loss": 0.6615, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.19519510865211487, |
|
"learning_rate": 4.7553312722463305e-05, |
|
"loss": 0.685, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.20128916203975677, |
|
"learning_rate": 4.751692347705998e-05, |
|
"loss": 0.6829, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.16112156212329865, |
|
"learning_rate": 4.748027975392976e-05, |
|
"loss": 0.6714, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.2659178674221039, |
|
"learning_rate": 4.744338196720608e-05, |
|
"loss": 0.6834, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.20477250218391418, |
|
"learning_rate": 4.740623053389374e-05, |
|
"loss": 0.6664, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.18069259822368622, |
|
"learning_rate": 4.7368825873864154e-05, |
|
"loss": 0.669, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.2146090418100357, |
|
"learning_rate": 4.733116840985058e-05, |
|
"loss": 0.6836, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.20656266808509827, |
|
"learning_rate": 4.729325856744341e-05, |
|
"loss": 0.6818, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.1764717549085617, |
|
"learning_rate": 4.725509677508528e-05, |
|
"loss": 0.7012, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.19059453904628754, |
|
"learning_rate": 4.721668346406631e-05, |
|
"loss": 0.6885, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.1867195963859558, |
|
"learning_rate": 4.7178019068519165e-05, |
|
"loss": 0.6704, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.19844096899032593, |
|
"learning_rate": 4.713910402541416e-05, |
|
"loss": 0.6746, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.21426193416118622, |
|
"learning_rate": 4.709993877455436e-05, |
|
"loss": 0.6627, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.19326262176036835, |
|
"learning_rate": 4.706052375857058e-05, |
|
"loss": 0.6652, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.229305237531662, |
|
"learning_rate": 4.7020859422916365e-05, |
|
"loss": 0.6746, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.20060321688652039, |
|
"learning_rate": 4.698094621586299e-05, |
|
"loss": 0.6658, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.21455462276935577, |
|
"learning_rate": 4.694078458849438e-05, |
|
"loss": 0.6966, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.18379837274551392, |
|
"learning_rate": 4.690037499470202e-05, |
|
"loss": 0.6753, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_loss": 0.6940956711769104, |
|
"eval_runtime": 42.7889, |
|
"eval_samples_per_second": 46.741, |
|
"eval_steps_per_second": 0.374, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.19967205822467804, |
|
"learning_rate": 4.68597178911798e-05, |
|
"loss": 0.697, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.1688542366027832, |
|
"learning_rate": 4.681881373741888e-05, |
|
"loss": 0.681, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.19124309718608856, |
|
"learning_rate": 4.67776629957025e-05, |
|
"loss": 0.6855, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.22720417380332947, |
|
"learning_rate": 4.6736266131100706e-05, |
|
"loss": 0.6747, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.21285779774188995, |
|
"learning_rate": 4.6694623611465185e-05, |
|
"loss": 0.687, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.1932476907968521, |
|
"learning_rate": 4.6652735907423886e-05, |
|
"loss": 0.6745, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.2050684541463852, |
|
"learning_rate": 4.661060349237574e-05, |
|
"loss": 0.6865, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.20403894782066345, |
|
"learning_rate": 4.656822684248533e-05, |
|
"loss": 0.6897, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.20080254971981049, |
|
"learning_rate": 4.652560643667747e-05, |
|
"loss": 0.6768, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.20147019624710083, |
|
"learning_rate": 4.648274275663183e-05, |
|
"loss": 0.6911, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.17825543880462646, |
|
"learning_rate": 4.643963628677743e-05, |
|
"loss": 0.6635, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.20095126330852509, |
|
"learning_rate": 4.6396287514287275e-05, |
|
"loss": 0.664, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.20084308087825775, |
|
"learning_rate": 4.6352696929072727e-05, |
|
"loss": 0.6653, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.21178288757801056, |
|
"learning_rate": 4.630886502377805e-05, |
|
"loss": 0.6981, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.1963459551334381, |
|
"learning_rate": 4.62647922937748e-05, |
|
"loss": 0.6666, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.21702295541763306, |
|
"learning_rate": 4.6220479237156254e-05, |
|
"loss": 0.6868, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.20558403432369232, |
|
"learning_rate": 4.6175926354731785e-05, |
|
"loss": 0.6794, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.20398521423339844, |
|
"learning_rate": 4.613113415002115e-05, |
|
"loss": 0.69, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.2166917473077774, |
|
"learning_rate": 4.6086103129248846e-05, |
|
"loss": 0.6604, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.2121194452047348, |
|
"learning_rate": 4.604083380133841e-05, |
|
"loss": 0.6653, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.252469927072525, |
|
"learning_rate": 4.5995326677906605e-05, |
|
"loss": 0.6678, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.1864558607339859, |
|
"learning_rate": 4.5949582273257656e-05, |
|
"loss": 0.6806, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.19438904523849487, |
|
"learning_rate": 4.59036011043775e-05, |
|
"loss": 0.6832, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.1954311728477478, |
|
"learning_rate": 4.5857383690927844e-05, |
|
"loss": 0.6772, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.38669708371162415, |
|
"learning_rate": 4.5810930555240364e-05, |
|
"loss": 0.6663, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.5850951075553894, |
|
"learning_rate": 4.576424222231078e-05, |
|
"loss": 0.6578, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.22347845137119293, |
|
"learning_rate": 4.571731921979292e-05, |
|
"loss": 0.6735, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.22810417413711548, |
|
"learning_rate": 4.567016207799276e-05, |
|
"loss": 0.6628, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.18823856115341187, |
|
"learning_rate": 4.562277132986241e-05, |
|
"loss": 0.6803, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.219554141163826, |
|
"learning_rate": 4.557514751099415e-05, |
|
"loss": 0.6701, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.1830976903438568, |
|
"learning_rate": 4.55272911596143e-05, |
|
"loss": 0.6864, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.18995565176010132, |
|
"learning_rate": 4.5479202816577195e-05, |
|
"loss": 0.6683, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.19047056138515472, |
|
"learning_rate": 4.543088302535903e-05, |
|
"loss": 0.6855, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.18707235157489777, |
|
"learning_rate": 4.538233233205177e-05, |
|
"loss": 0.6786, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.19992856681346893, |
|
"learning_rate": 4.533355128535693e-05, |
|
"loss": 0.6676, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.20344853401184082, |
|
"learning_rate": 4.5284540436579395e-05, |
|
"loss": 0.6938, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.1867518275976181, |
|
"learning_rate": 4.5235300339621164e-05, |
|
"loss": 0.6736, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.23014211654663086, |
|
"learning_rate": 4.518583155097517e-05, |
|
"loss": 0.674, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.1905904859304428, |
|
"learning_rate": 4.5136134629718855e-05, |
|
"loss": 0.6704, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.2236865758895874, |
|
"learning_rate": 4.5086210137508e-05, |
|
"loss": 0.6633, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.19701044261455536, |
|
"learning_rate": 4.5036058638570264e-05, |
|
"loss": 0.6626, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.19464708864688873, |
|
"learning_rate": 4.4985680699698855e-05, |
|
"loss": 0.6595, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.19034789502620697, |
|
"learning_rate": 4.493507689024614e-05, |
|
"loss": 0.6688, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.19750766456127167, |
|
"learning_rate": 4.488424778211717e-05, |
|
"loss": 0.6521, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.20544354617595673, |
|
"learning_rate": 4.483319394976325e-05, |
|
"loss": 0.6947, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.22409361600875854, |
|
"learning_rate": 4.478191597017541e-05, |
|
"loss": 0.674, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.19409336149692535, |
|
"learning_rate": 4.473041442287793e-05, |
|
"loss": 0.6978, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.20303764939308167, |
|
"learning_rate": 4.4678689889921755e-05, |
|
"loss": 0.6771, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.193495512008667, |
|
"learning_rate": 4.462674295587794e-05, |
|
"loss": 0.6693, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.18171487748622894, |
|
"learning_rate": 4.457457420783103e-05, |
|
"loss": 0.6716, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"eval_loss": 0.6894034147262573, |
|
"eval_runtime": 43.0285, |
|
"eval_samples_per_second": 46.481, |
|
"eval_steps_per_second": 0.372, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.22247907519340515, |
|
"learning_rate": 4.452218423537241e-05, |
|
"loss": 0.6753, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.2469698041677475, |
|
"learning_rate": 4.4469573630593686e-05, |
|
"loss": 0.6715, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.18804265558719635, |
|
"learning_rate": 4.4416742988079945e-05, |
|
"loss": 0.6546, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.19240662455558777, |
|
"learning_rate": 4.436369290490307e-05, |
|
"loss": 0.6857, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.22342869639396667, |
|
"learning_rate": 4.431042398061499e-05, |
|
"loss": 0.6938, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.20553608238697052, |
|
"learning_rate": 4.425693681724086e-05, |
|
"loss": 0.6687, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.19881366193294525, |
|
"learning_rate": 4.420323201927231e-05, |
|
"loss": 0.6644, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.18151846528053284, |
|
"learning_rate": 4.41493101936606e-05, |
|
"loss": 0.6667, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.1906978040933609, |
|
"learning_rate": 4.409517194980974e-05, |
|
"loss": 0.6578, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.18123480677604675, |
|
"learning_rate": 4.4040817899569644e-05, |
|
"loss": 0.681, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.18974031507968903, |
|
"learning_rate": 4.3986248657229134e-05, |
|
"loss": 0.6645, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.2028564214706421, |
|
"learning_rate": 4.3931464839509105e-05, |
|
"loss": 0.6913, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.17814776301383972, |
|
"learning_rate": 4.387646706555548e-05, |
|
"loss": 0.6949, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.21407385170459747, |
|
"learning_rate": 4.382125595693224e-05, |
|
"loss": 0.6656, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.21102961897850037, |
|
"learning_rate": 4.376583213761438e-05, |
|
"loss": 0.6612, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.19947603344917297, |
|
"learning_rate": 4.371019623398088e-05, |
|
"loss": 0.6894, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.20861291885375977, |
|
"learning_rate": 4.365434887480763e-05, |
|
"loss": 0.6739, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.2098263055086136, |
|
"learning_rate": 4.359829069126028e-05, |
|
"loss": 0.6739, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.19079270958900452, |
|
"learning_rate": 4.3542022316887166e-05, |
|
"loss": 0.6739, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.21104255318641663, |
|
"learning_rate": 4.34855443876121e-05, |
|
"loss": 0.652, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.19791875779628754, |
|
"learning_rate": 4.342885754172721e-05, |
|
"loss": 0.6551, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.22481437027454376, |
|
"learning_rate": 4.337196241988573e-05, |
|
"loss": 0.6649, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.19483722746372223, |
|
"learning_rate": 4.3314859665094745e-05, |
|
"loss": 0.6762, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.18499700725078583, |
|
"learning_rate": 4.3257549922707926e-05, |
|
"loss": 0.6754, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.21884921193122864, |
|
"learning_rate": 4.320003384041823e-05, |
|
"loss": 0.6757, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.20222313702106476, |
|
"learning_rate": 4.314231206825061e-05, |
|
"loss": 0.6527, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.18140584230422974, |
|
"learning_rate": 4.3084385258554635e-05, |
|
"loss": 0.6731, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.21039867401123047, |
|
"learning_rate": 4.302625406599713e-05, |
|
"loss": 0.6722, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.18162797391414642, |
|
"learning_rate": 4.296791914755478e-05, |
|
"loss": 0.6563, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.178864985704422, |
|
"learning_rate": 4.29093811625067e-05, |
|
"loss": 0.6666, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.21808697283267975, |
|
"learning_rate": 4.285064077242699e-05, |
|
"loss": 0.6689, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.20632699131965637, |
|
"learning_rate": 4.279169864117727e-05, |
|
"loss": 0.6664, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.20482878386974335, |
|
"learning_rate": 4.273255543489912e-05, |
|
"loss": 0.6407, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.18613804876804352, |
|
"learning_rate": 4.267321182200664e-05, |
|
"loss": 0.666, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.23219124972820282, |
|
"learning_rate": 4.2613668473178836e-05, |
|
"loss": 0.6802, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.17973752319812775, |
|
"learning_rate": 4.255392606135202e-05, |
|
"loss": 0.6571, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.21650458872318268, |
|
"learning_rate": 4.2493985261712285e-05, |
|
"loss": 0.6727, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.19476266205310822, |
|
"learning_rate": 4.2433846751687815e-05, |
|
"loss": 0.6836, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.20223549008369446, |
|
"learning_rate": 4.237351121094121e-05, |
|
"loss": 0.6666, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.2078579217195511, |
|
"learning_rate": 4.231297932136189e-05, |
|
"loss": 0.673, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.193430095911026, |
|
"learning_rate": 4.225225176705829e-05, |
|
"loss": 0.6718, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.18017272651195526, |
|
"learning_rate": 4.2191329234350194e-05, |
|
"loss": 0.6806, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.20289075374603271, |
|
"learning_rate": 4.213021241176096e-05, |
|
"loss": 0.667, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.2014080286026001, |
|
"learning_rate": 4.2068901990009726e-05, |
|
"loss": 0.6441, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.17385190725326538, |
|
"learning_rate": 4.200739866200363e-05, |
|
"loss": 0.6564, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.20427776873111725, |
|
"learning_rate": 4.1945703122829924e-05, |
|
"loss": 0.6509, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.2093549370765686, |
|
"learning_rate": 4.1883816069748214e-05, |
|
"loss": 0.6741, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.21946479380130768, |
|
"learning_rate": 4.182173820218249e-05, |
|
"loss": 0.6778, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.2244766503572464, |
|
"learning_rate": 4.175947022171326e-05, |
|
"loss": 0.681, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.22939437627792358, |
|
"learning_rate": 4.169701283206961e-05, |
|
"loss": 0.6595, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"eval_loss": 0.6864724159240723, |
|
"eval_runtime": 43.0148, |
|
"eval_samples_per_second": 46.496, |
|
"eval_steps_per_second": 0.372, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.2838324010372162, |
|
"learning_rate": 4.163436673912127e-05, |
|
"loss": 0.666, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.1862134337425232, |
|
"learning_rate": 4.1571532650870614e-05, |
|
"loss": 0.6671, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.2170250117778778, |
|
"learning_rate": 4.1508511277444674e-05, |
|
"loss": 0.6566, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.17831502854824066, |
|
"learning_rate": 4.1445303331087096e-05, |
|
"loss": 0.6776, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.17921501398086548, |
|
"learning_rate": 4.138190952615012e-05, |
|
"loss": 0.6876, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.19673947989940643, |
|
"learning_rate": 4.131833057908648e-05, |
|
"loss": 0.6596, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.1829831749200821, |
|
"learning_rate": 4.1254567208441296e-05, |
|
"loss": 0.6712, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.3023044764995575, |
|
"learning_rate": 4.119062013484402e-05, |
|
"loss": 0.6747, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.21133002638816833, |
|
"learning_rate": 4.11264900810002e-05, |
|
"loss": 0.674, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.19868247210979462, |
|
"learning_rate": 4.106217777168339e-05, |
|
"loss": 0.68, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.21643079817295074, |
|
"learning_rate": 4.0997683933726894e-05, |
|
"loss": 0.6555, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.1953793615102768, |
|
"learning_rate": 4.093300929601562e-05, |
|
"loss": 0.6704, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.21795640885829926, |
|
"learning_rate": 4.086815458947777e-05, |
|
"loss": 0.6583, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.22441154718399048, |
|
"learning_rate": 4.080312054707665e-05, |
|
"loss": 0.6772, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.20651431381702423, |
|
"learning_rate": 4.073790790380232e-05, |
|
"loss": 0.6793, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.21724598109722137, |
|
"learning_rate": 4.0672517396663354e-05, |
|
"loss": 0.6742, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.27277424931526184, |
|
"learning_rate": 4.0606949764678434e-05, |
|
"loss": 0.6495, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.21489045023918152, |
|
"learning_rate": 4.054120574886808e-05, |
|
"loss": 0.6516, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.19597062468528748, |
|
"learning_rate": 4.0475286092246215e-05, |
|
"loss": 0.6498, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.20840667188167572, |
|
"learning_rate": 4.0409191539811774e-05, |
|
"loss": 0.6394, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.23823177814483643, |
|
"learning_rate": 4.0342922838540334e-05, |
|
"loss": 0.6798, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.20505908131599426, |
|
"learning_rate": 4.0276480737375606e-05, |
|
"loss": 0.6607, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.28554779291152954, |
|
"learning_rate": 4.0209865987221014e-05, |
|
"loss": 0.6467, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.24168336391448975, |
|
"learning_rate": 4.014307934093119e-05, |
|
"loss": 0.6625, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.21519850194454193, |
|
"learning_rate": 4.007612155330348e-05, |
|
"loss": 0.6431, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.1954280287027359, |
|
"learning_rate": 4.000899338106939e-05, |
|
"loss": 0.6669, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.22829805314540863, |
|
"learning_rate": 3.9941695582886065e-05, |
|
"loss": 0.6507, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.30064237117767334, |
|
"learning_rate": 3.9874228919327685e-05, |
|
"loss": 0.664, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.21391743421554565, |
|
"learning_rate": 3.980659415287689e-05, |
|
"loss": 0.6577, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.237900048494339, |
|
"learning_rate": 3.9738792047916143e-05, |
|
"loss": 0.6678, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.2035730481147766, |
|
"learning_rate": 3.967082337071911e-05, |
|
"loss": 0.6822, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.25883620977401733, |
|
"learning_rate": 3.960268888944202e-05, |
|
"loss": 0.6657, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.21861082315444946, |
|
"learning_rate": 3.9534389374114905e-05, |
|
"loss": 0.6608, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.2185363620519638, |
|
"learning_rate": 3.946592559663299e-05, |
|
"loss": 0.6763, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.2056983858346939, |
|
"learning_rate": 3.9397298330747905e-05, |
|
"loss": 0.6703, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.19903923571109772, |
|
"learning_rate": 3.932850835205899e-05, |
|
"loss": 0.6887, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.19114772975444794, |
|
"learning_rate": 3.9259556438004476e-05, |
|
"loss": 0.6563, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.20320531725883484, |
|
"learning_rate": 3.9190443367852736e-05, |
|
"loss": 0.6471, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.23587201535701752, |
|
"learning_rate": 3.912116992269348e-05, |
|
"loss": 0.6705, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.19107288122177124, |
|
"learning_rate": 3.9051736885428886e-05, |
|
"loss": 0.6696, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.20316600799560547, |
|
"learning_rate": 3.898214504076482e-05, |
|
"loss": 0.6724, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.2201535403728485, |
|
"learning_rate": 3.89123951752019e-05, |
|
"loss": 0.658, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.20604528486728668, |
|
"learning_rate": 3.884248807702665e-05, |
|
"loss": 0.6746, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.24204471707344055, |
|
"learning_rate": 3.8772424536302564e-05, |
|
"loss": 0.6737, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.20363375544548035, |
|
"learning_rate": 3.870220534486121e-05, |
|
"loss": 0.669, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.22277967631816864, |
|
"learning_rate": 3.8631831296293246e-05, |
|
"loss": 0.6676, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.18391238152980804, |
|
"learning_rate": 3.856130318593947e-05, |
|
"loss": 0.6651, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.185078427195549, |
|
"learning_rate": 3.849062181088183e-05, |
|
"loss": 0.6432, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.2277977466583252, |
|
"learning_rate": 3.841978796993442e-05, |
|
"loss": 0.6518, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.22232377529144287, |
|
"learning_rate": 3.834880246363443e-05, |
|
"loss": 0.6743, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"eval_loss": 0.6847629547119141, |
|
"eval_runtime": 42.7658, |
|
"eval_samples_per_second": 46.766, |
|
"eval_steps_per_second": 0.374, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.20745177567005157, |
|
"learning_rate": 3.8277666094233115e-05, |
|
"loss": 0.6787, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.21468785405158997, |
|
"learning_rate": 3.820637966568675e-05, |
|
"loss": 0.6603, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.18202567100524902, |
|
"learning_rate": 3.81349439836475e-05, |
|
"loss": 0.6487, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.21424496173858643, |
|
"learning_rate": 3.806335985545434e-05, |
|
"loss": 0.6674, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.19061969220638275, |
|
"learning_rate": 3.7991628090123933e-05, |
|
"loss": 0.651, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.20192697644233704, |
|
"learning_rate": 3.7919749498341477e-05, |
|
"loss": 0.661, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.1942390650510788, |
|
"learning_rate": 3.784772489245155e-05, |
|
"loss": 0.6688, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.19686420261859894, |
|
"learning_rate": 3.777555508644893e-05, |
|
"loss": 0.6492, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.2590291202068329, |
|
"learning_rate": 3.770324089596937e-05, |
|
"loss": 0.6597, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.208131343126297, |
|
"learning_rate": 3.763078313828043e-05, |
|
"loss": 0.6695, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.215104341506958, |
|
"learning_rate": 3.755818263227219e-05, |
|
"loss": 0.6521, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.22334003448486328, |
|
"learning_rate": 3.748544019844803e-05, |
|
"loss": 0.6775, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.2614120841026306, |
|
"learning_rate": 3.741255665891534e-05, |
|
"loss": 0.6643, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.22348462045192719, |
|
"learning_rate": 3.733953283737625e-05, |
|
"loss": 0.6598, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.21082797646522522, |
|
"learning_rate": 3.726636955911825e-05, |
|
"loss": 0.6697, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.20834925770759583, |
|
"learning_rate": 3.719306765100499e-05, |
|
"loss": 0.6562, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.223398357629776, |
|
"learning_rate": 3.711962794146682e-05, |
|
"loss": 0.6403, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.1990179866552353, |
|
"learning_rate": 3.704605126049147e-05, |
|
"loss": 0.6662, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.20885860919952393, |
|
"learning_rate": 3.6972338439614676e-05, |
|
"loss": 0.6669, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.21816882491111755, |
|
"learning_rate": 3.6898490311910774e-05, |
|
"loss": 0.6378, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.21286895871162415, |
|
"learning_rate": 3.6824507711983294e-05, |
|
"loss": 0.633, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.20113909244537354, |
|
"learning_rate": 3.6750391475955506e-05, |
|
"loss": 0.6561, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.1805492788553238, |
|
"learning_rate": 3.6676142441461e-05, |
|
"loss": 0.6478, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.198676198720932, |
|
"learning_rate": 3.66017614476342e-05, |
|
"loss": 0.6477, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.18248610198497772, |
|
"learning_rate": 3.6527249335100856e-05, |
|
"loss": 0.6476, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.20229819416999817, |
|
"learning_rate": 3.645260694596862e-05, |
|
"loss": 0.6675, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.24611905217170715, |
|
"learning_rate": 3.637783512381745e-05, |
|
"loss": 0.6546, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.1821327954530716, |
|
"learning_rate": 3.6302934713690114e-05, |
|
"loss": 0.6482, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.19002540409564972, |
|
"learning_rate": 3.622790656208263e-05, |
|
"loss": 0.6468, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.20899777114391327, |
|
"learning_rate": 3.615275151693471e-05, |
|
"loss": 0.6649, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.1918490082025528, |
|
"learning_rate": 3.607747042762016e-05, |
|
"loss": 0.6554, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.2790839970111847, |
|
"learning_rate": 3.600206414493728e-05, |
|
"loss": 0.6607, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.21416689455509186, |
|
"learning_rate": 3.592653352109929e-05, |
|
"loss": 0.6537, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.19238290190696716, |
|
"learning_rate": 3.5850879409724624e-05, |
|
"loss": 0.6566, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.24673143029212952, |
|
"learning_rate": 3.577510266582737e-05, |
|
"loss": 0.6599, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.1863541454076767, |
|
"learning_rate": 3.569920414580754e-05, |
|
"loss": 0.6593, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.2196236401796341, |
|
"learning_rate": 3.56231847074414e-05, |
|
"loss": 0.6483, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.18300732970237732, |
|
"learning_rate": 3.5547045209871806e-05, |
|
"loss": 0.6359, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.18719661235809326, |
|
"learning_rate": 3.5470786513598476e-05, |
|
"loss": 0.6707, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.1748167872428894, |
|
"learning_rate": 3.539440948046827e-05, |
|
"loss": 0.6516, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.21993610262870789, |
|
"learning_rate": 3.531791497366543e-05, |
|
"loss": 0.6539, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.20704199373722076, |
|
"learning_rate": 3.524130385770186e-05, |
|
"loss": 0.6734, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.23367135226726532, |
|
"learning_rate": 3.516457699840733e-05, |
|
"loss": 0.6855, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.1824340671300888, |
|
"learning_rate": 3.50877352629197e-05, |
|
"loss": 0.6575, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.22375932335853577, |
|
"learning_rate": 3.50107795196751e-05, |
|
"loss": 0.6487, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.23603610694408417, |
|
"learning_rate": 3.4933710638398156e-05, |
|
"loss": 0.6505, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.22663302719593048, |
|
"learning_rate": 3.485652949009212e-05, |
|
"loss": 0.6569, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.21095499396324158, |
|
"learning_rate": 3.4779236947029055e-05, |
|
"loss": 0.6657, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.2422657608985901, |
|
"learning_rate": 3.470183388273995e-05, |
|
"loss": 0.645, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.19883093237876892, |
|
"learning_rate": 3.462432117200489e-05, |
|
"loss": 0.6647, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"eval_loss": 0.6818509697914124, |
|
"eval_runtime": 42.9789, |
|
"eval_samples_per_second": 46.534, |
|
"eval_steps_per_second": 0.372, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.23546861112117767, |
|
"learning_rate": 3.454669969084312e-05, |
|
"loss": 0.6594, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.20284776389598846, |
|
"learning_rate": 3.446897031650316e-05, |
|
"loss": 0.65, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.19946908950805664, |
|
"learning_rate": 3.4391133927452925e-05, |
|
"loss": 0.6573, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.2393037974834442, |
|
"learning_rate": 3.431319140336975e-05, |
|
"loss": 0.6502, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.2206243872642517, |
|
"learning_rate": 3.423514362513048e-05, |
|
"loss": 0.6704, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.20536164939403534, |
|
"learning_rate": 3.415699147480149e-05, |
|
"loss": 0.6629, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.22283408045768738, |
|
"learning_rate": 3.407873583562873e-05, |
|
"loss": 0.6789, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.21526776254177094, |
|
"learning_rate": 3.4000377592027754e-05, |
|
"loss": 0.6608, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.2566728889942169, |
|
"learning_rate": 3.3921917629573695e-05, |
|
"loss": 0.6624, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.21391907334327698, |
|
"learning_rate": 3.384335683499129e-05, |
|
"loss": 0.6536, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.21451455354690552, |
|
"learning_rate": 3.376469609614484e-05, |
|
"loss": 0.648, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.19541354477405548, |
|
"learning_rate": 3.368593630202818e-05, |
|
"loss": 0.657, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.2338065356016159, |
|
"learning_rate": 3.360707834275459e-05, |
|
"loss": 0.6686, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.1928481012582779, |
|
"learning_rate": 3.3528123109546844e-05, |
|
"loss": 0.6496, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.23110058903694153, |
|
"learning_rate": 3.344907149472702e-05, |
|
"loss": 0.6556, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.22316649556159973, |
|
"learning_rate": 3.336992439170649e-05, |
|
"loss": 0.663, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.1999579668045044, |
|
"learning_rate": 3.3290682694975775e-05, |
|
"loss": 0.6436, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.276755154132843, |
|
"learning_rate": 3.321134730009446e-05, |
|
"loss": 0.6355, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.24972842633724213, |
|
"learning_rate": 3.313191910368111e-05, |
|
"loss": 0.6454, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.2169172465801239, |
|
"learning_rate": 3.3052399003403046e-05, |
|
"loss": 0.6419, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.20602068305015564, |
|
"learning_rate": 3.297278789796629e-05, |
|
"loss": 0.6459, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.25669389963150024, |
|
"learning_rate": 3.289308668710532e-05, |
|
"loss": 0.6675, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.21008487045764923, |
|
"learning_rate": 3.281329627157302e-05, |
|
"loss": 0.642, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.21287119388580322, |
|
"learning_rate": 3.2733417553130384e-05, |
|
"loss": 0.6348, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.21862877905368805, |
|
"learning_rate": 3.2653451434536394e-05, |
|
"loss": 0.6579, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.19740484654903412, |
|
"learning_rate": 3.2573398819537767e-05, |
|
"loss": 0.6428, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.2695826292037964, |
|
"learning_rate": 3.2493260612858803e-05, |
|
"loss": 0.658, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.19435593485832214, |
|
"learning_rate": 3.2413037720191096e-05, |
|
"loss": 0.6615, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.21996192634105682, |
|
"learning_rate": 3.233273104818337e-05, |
|
"loss": 0.6588, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.26496437191963196, |
|
"learning_rate": 3.225234150443114e-05, |
|
"loss": 0.6548, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.2110251486301422, |
|
"learning_rate": 3.217186999746654e-05, |
|
"loss": 0.6618, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.19939462840557098, |
|
"learning_rate": 3.209131743674803e-05, |
|
"loss": 0.6531, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.19813968241214752, |
|
"learning_rate": 3.201068473265007e-05, |
|
"loss": 0.6484, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.2121964991092682, |
|
"learning_rate": 3.192997279645291e-05, |
|
"loss": 0.6556, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.19998233020305634, |
|
"learning_rate": 3.1849182540332214e-05, |
|
"loss": 0.6554, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.23478324711322784, |
|
"learning_rate": 3.176831487734882e-05, |
|
"loss": 0.637, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.19482719898223877, |
|
"learning_rate": 3.168737072143838e-05, |
|
"loss": 0.6464, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.21184729039669037, |
|
"learning_rate": 3.160635098740103e-05, |
|
"loss": 0.6584, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.27235257625579834, |
|
"learning_rate": 3.152525659089106e-05, |
|
"loss": 0.6597, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.22343479096889496, |
|
"learning_rate": 3.1444088448406584e-05, |
|
"loss": 0.6673, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.19164462387561798, |
|
"learning_rate": 3.136284747727916e-05, |
|
"loss": 0.6617, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.21662816405296326, |
|
"learning_rate": 3.128153459566341e-05, |
|
"loss": 0.6505, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.23168237507343292, |
|
"learning_rate": 3.1200150722526697e-05, |
|
"loss": 0.6479, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.19140580296516418, |
|
"learning_rate": 3.111869677763865e-05, |
|
"loss": 0.6413, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.25583407282829285, |
|
"learning_rate": 3.1037173681560874e-05, |
|
"loss": 0.6443, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.1979406476020813, |
|
"learning_rate": 3.095558235563647e-05, |
|
"loss": 0.6668, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.21006809175014496, |
|
"learning_rate": 3.0873923721979645e-05, |
|
"loss": 0.6505, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.2352473884820938, |
|
"learning_rate": 3.07921987034653e-05, |
|
"loss": 0.6552, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.22581614553928375, |
|
"learning_rate": 3.07104082237186e-05, |
|
"loss": 0.6489, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.21772272884845734, |
|
"learning_rate": 3.06285532071045e-05, |
|
"loss": 0.6721, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"eval_loss": 0.6797133684158325, |
|
"eval_runtime": 43.0653, |
|
"eval_samples_per_second": 46.441, |
|
"eval_steps_per_second": 0.372, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.20226365327835083, |
|
"learning_rate": 3.054663457871736e-05, |
|
"loss": 0.6723, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.2123214453458786, |
|
"learning_rate": 3.046465326437043e-05, |
|
"loss": 0.6479, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.21145904064178467, |
|
"learning_rate": 3.0382610190585435e-05, |
|
"loss": 0.6508, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.2079765647649765, |
|
"learning_rate": 3.030050628458206e-05, |
|
"loss": 0.6494, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.24468085169792175, |
|
"learning_rate": 3.0218342474267513e-05, |
|
"loss": 0.6628, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.21137215197086334, |
|
"learning_rate": 3.0136119688225996e-05, |
|
"loss": 0.6463, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.20148161053657532, |
|
"learning_rate": 3.0053838855708243e-05, |
|
"loss": 0.6635, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.21683721244335175, |
|
"learning_rate": 2.9971500906621027e-05, |
|
"loss": 0.6585, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.1892833113670349, |
|
"learning_rate": 2.988910677151659e-05, |
|
"loss": 0.6621, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.3029578924179077, |
|
"learning_rate": 2.980665738158221e-05, |
|
"loss": 0.6742, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.2016330510377884, |
|
"learning_rate": 2.972415366862959e-05, |
|
"loss": 0.6686, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.20674502849578857, |
|
"learning_rate": 2.964159656508441e-05, |
|
"loss": 0.6577, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.20945307612419128, |
|
"learning_rate": 2.9558987003975736e-05, |
|
"loss": 0.6673, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.2278815060853958, |
|
"learning_rate": 2.9476325918925485e-05, |
|
"loss": 0.6585, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.21968629956245422, |
|
"learning_rate": 2.9393614244137875e-05, |
|
"loss": 0.6547, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.22052909433841705, |
|
"learning_rate": 2.9310852914388875e-05, |
|
"loss": 0.6706, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.24963468313217163, |
|
"learning_rate": 2.9228042865015647e-05, |
|
"loss": 0.635, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.21189726889133453, |
|
"learning_rate": 2.914518503190595e-05, |
|
"loss": 0.6529, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.19712771475315094, |
|
"learning_rate": 2.9062280351487587e-05, |
|
"loss": 0.6589, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.23704534769058228, |
|
"learning_rate": 2.8979329760717788e-05, |
|
"loss": 0.6459, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.3048954904079437, |
|
"learning_rate": 2.8896334197072667e-05, |
|
"loss": 0.6468, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.30929285287857056, |
|
"learning_rate": 2.8813294598536606e-05, |
|
"loss": 0.651, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.2547236382961273, |
|
"learning_rate": 2.8730211903591636e-05, |
|
"loss": 0.6374, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.23137128353118896, |
|
"learning_rate": 2.8647087051206862e-05, |
|
"loss": 0.6486, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.21269945800304413, |
|
"learning_rate": 2.856392098082783e-05, |
|
"loss": 0.6751, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.19768312573432922, |
|
"learning_rate": 2.8480714632365906e-05, |
|
"loss": 0.6511, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.2293408066034317, |
|
"learning_rate": 2.83974689461877e-05, |
|
"loss": 0.6337, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.22362858057022095, |
|
"learning_rate": 2.8314184863104347e-05, |
|
"loss": 0.6449, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.22398819029331207, |
|
"learning_rate": 2.8230863324360977e-05, |
|
"loss": 0.6437, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.22831834852695465, |
|
"learning_rate": 2.8147505271626002e-05, |
|
"loss": 0.6493, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.2500370740890503, |
|
"learning_rate": 2.8064111646980524e-05, |
|
"loss": 0.6451, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.21840785443782806, |
|
"learning_rate": 2.7980683392907632e-05, |
|
"loss": 0.6558, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.21673984825611115, |
|
"learning_rate": 2.7897221452281813e-05, |
|
"loss": 0.6493, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.22077977657318115, |
|
"learning_rate": 2.7813726768358263e-05, |
|
"loss": 0.6443, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.22384098172187805, |
|
"learning_rate": 2.7730200284762215e-05, |
|
"loss": 0.6525, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.2188693881034851, |
|
"learning_rate": 2.7646642945478314e-05, |
|
"loss": 0.649, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.21913747489452362, |
|
"learning_rate": 2.7563055694839884e-05, |
|
"loss": 0.6543, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.20839491486549377, |
|
"learning_rate": 2.7479439477518344e-05, |
|
"loss": 0.6462, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.21764138340950012, |
|
"learning_rate": 2.7395795238512446e-05, |
|
"loss": 0.6527, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.21299828588962555, |
|
"learning_rate": 2.7312123923137667e-05, |
|
"loss": 0.638, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.20629416406154633, |
|
"learning_rate": 2.7228426477015447e-05, |
|
"loss": 0.6592, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.23245516419410706, |
|
"learning_rate": 2.714470384606258e-05, |
|
"loss": 0.6424, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.2076595425605774, |
|
"learning_rate": 2.706095697648048e-05, |
|
"loss": 0.6335, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.19132575392723083, |
|
"learning_rate": 2.6977186814744503e-05, |
|
"loss": 0.649, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.21425828337669373, |
|
"learning_rate": 2.6893394307593228e-05, |
|
"loss": 0.6485, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.24599085748195648, |
|
"learning_rate": 2.680958040201778e-05, |
|
"loss": 0.6719, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.2181704044342041, |
|
"learning_rate": 2.6725746045251126e-05, |
|
"loss": 0.6403, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.2156282663345337, |
|
"learning_rate": 2.6641892184757365e-05, |
|
"loss": 0.6414, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.210015669465065, |
|
"learning_rate": 2.655801976822102e-05, |
|
"loss": 0.6616, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.22476404905319214, |
|
"learning_rate": 2.6474129743536323e-05, |
|
"loss": 0.6642, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"eval_loss": 0.6780422329902649, |
|
"eval_runtime": 42.8508, |
|
"eval_samples_per_second": 46.674, |
|
"eval_steps_per_second": 0.373, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.2253371626138687, |
|
"learning_rate": 2.6390223058796497e-05, |
|
"loss": 0.6704, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.2072548270225525, |
|
"learning_rate": 2.6306300662283073e-05, |
|
"loss": 0.6319, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.1958586722612381, |
|
"learning_rate": 2.6222363502455133e-05, |
|
"loss": 0.6462, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.19193071126937866, |
|
"learning_rate": 2.6138412527938617e-05, |
|
"loss": 0.6408, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.235664963722229, |
|
"learning_rate": 2.605444868751558e-05, |
|
"loss": 0.6585, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.27757498621940613, |
|
"learning_rate": 2.5970472930113515e-05, |
|
"loss": 0.6437, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.19725674390792847, |
|
"learning_rate": 2.588648620479455e-05, |
|
"loss": 0.6533, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.20764590799808502, |
|
"learning_rate": 2.5802489460744817e-05, |
|
"loss": 0.6473, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.2171824425458908, |
|
"learning_rate": 2.571848364726363e-05, |
|
"loss": 0.6344, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.20881757140159607, |
|
"learning_rate": 2.563446971375283e-05, |
|
"loss": 0.6554, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.2502697706222534, |
|
"learning_rate": 2.5550448609706035e-05, |
|
"loss": 0.6767, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.26100224256515503, |
|
"learning_rate": 2.546642128469787e-05, |
|
"loss": 0.6651, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.22215646505355835, |
|
"learning_rate": 2.5382388688373288e-05, |
|
"loss": 0.6371, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.2387877255678177, |
|
"learning_rate": 2.529835177043682e-05, |
|
"loss": 0.6422, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.23950129747390747, |
|
"learning_rate": 2.5214311480641823e-05, |
|
"loss": 0.6602, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.2006569504737854, |
|
"learning_rate": 2.513026876877978e-05, |
|
"loss": 0.6513, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.1996818333864212, |
|
"learning_rate": 2.5046224584669537e-05, |
|
"loss": 0.6509, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.21665816009044647, |
|
"learning_rate": 2.496217987814656e-05, |
|
"loss": 0.6698, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.24070510268211365, |
|
"learning_rate": 2.4878135599052265e-05, |
|
"loss": 0.6668, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.19807234406471252, |
|
"learning_rate": 2.47940926972232e-05, |
|
"loss": 0.6494, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.2191707193851471, |
|
"learning_rate": 2.4710052122480345e-05, |
|
"loss": 0.6356, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.19569790363311768, |
|
"learning_rate": 2.4626014824618415e-05, |
|
"loss": 0.6587, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.2184823751449585, |
|
"learning_rate": 2.4541981753395045e-05, |
|
"loss": 0.6644, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.22695264220237732, |
|
"learning_rate": 2.445795385852015e-05, |
|
"loss": 0.6327, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.20581942796707153, |
|
"learning_rate": 2.4373932089645117e-05, |
|
"loss": 0.6506, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.21353425085544586, |
|
"learning_rate": 2.428991739635208e-05, |
|
"loss": 0.6466, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.252079039812088, |
|
"learning_rate": 2.420591072814326e-05, |
|
"loss": 0.6559, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.19642478227615356, |
|
"learning_rate": 2.4121913034430123e-05, |
|
"loss": 0.6441, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.19868357479572296, |
|
"learning_rate": 2.4037925264522766e-05, |
|
"loss": 0.6424, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.20668402314186096, |
|
"learning_rate": 2.3953948367619085e-05, |
|
"loss": 0.6698, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.21798357367515564, |
|
"learning_rate": 2.38699832927941e-05, |
|
"loss": 0.6346, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.23406192660331726, |
|
"learning_rate": 2.3786030988989257e-05, |
|
"loss": 0.6535, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.20458374917507172, |
|
"learning_rate": 2.3702092405001627e-05, |
|
"loss": 0.6426, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.2875809073448181, |
|
"learning_rate": 2.3618168489473258e-05, |
|
"loss": 0.6344, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.21651776134967804, |
|
"learning_rate": 2.3534260190880396e-05, |
|
"loss": 0.6403, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.22318010032176971, |
|
"learning_rate": 2.3450368457522787e-05, |
|
"loss": 0.6662, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.20754455029964447, |
|
"learning_rate": 2.3366494237513e-05, |
|
"loss": 0.64, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.20914232730865479, |
|
"learning_rate": 2.3282638478765634e-05, |
|
"loss": 0.6374, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.22443871200084686, |
|
"learning_rate": 2.3198802128986673e-05, |
|
"loss": 0.6576, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.2213885337114334, |
|
"learning_rate": 2.3114986135662728e-05, |
|
"loss": 0.6434, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.2547857463359833, |
|
"learning_rate": 2.3031191446050348e-05, |
|
"loss": 0.6519, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.2463701069355011, |
|
"learning_rate": 2.2947419007165355e-05, |
|
"loss": 0.642, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.24427345395088196, |
|
"learning_rate": 2.286366976577205e-05, |
|
"loss": 0.6661, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.20179685950279236, |
|
"learning_rate": 2.2779944668372596e-05, |
|
"loss": 0.6619, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.2376372367143631, |
|
"learning_rate": 2.2696244661196285e-05, |
|
"loss": 0.6364, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.22518636286258698, |
|
"learning_rate": 2.2612570690188828e-05, |
|
"loss": 0.6619, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.21611256897449493, |
|
"learning_rate": 2.252892370100172e-05, |
|
"loss": 0.665, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.19709035754203796, |
|
"learning_rate": 2.2445304638981483e-05, |
|
"loss": 0.6461, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.21152737736701965, |
|
"learning_rate": 2.236171444915905e-05, |
|
"loss": 0.6564, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.23283518850803375, |
|
"learning_rate": 2.227815407623903e-05, |
|
"loss": 0.6653, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"eval_loss": 0.6763524413108826, |
|
"eval_runtime": 43.1507, |
|
"eval_samples_per_second": 46.349, |
|
"eval_steps_per_second": 0.371, |
|
"step": 6000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10345, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 1.519096937367942e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|