|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.6367949393779653, |
|
"global_step": 2500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.599516044570672e-07, |
|
"loss": 2.3879, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6830783438602115e-07, |
|
"loss": 2.3525, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.316921656139787e-07, |
|
"loss": 2.2352, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 5.766640643149751e-07, |
|
"loss": 2.2506, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6.115469789851804e-07, |
|
"loss": 2.182, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.400483955429327e-07, |
|
"loss": 2.1697, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 6.641459998837218e-07, |
|
"loss": 2.1285, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 6.850202942439291e-07, |
|
"loss": 2.1158, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 7.034327267708902e-07, |
|
"loss": 2.1371, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.199032089141344e-07, |
|
"loss": 2.1271, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.348025723496237e-07, |
|
"loss": 2.101, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 7.484046254718867e-07, |
|
"loss": 2.1176, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 7.60917301394197e-07, |
|
"loss": 2.0376, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.725022298126757e-07, |
|
"loss": 2.1025, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 7.832875401420919e-07, |
|
"loss": 2.1164, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 7.933765241728831e-07, |
|
"loss": 2.0634, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 8.028536679096446e-07, |
|
"loss": 2.0979, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 8.117889566998443e-07, |
|
"loss": 2.0704, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 8.202410148252902e-07, |
|
"loss": 2.0474, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 8.282594388430883e-07, |
|
"loss": 2.0705, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 8.358865610406332e-07, |
|
"loss": 2.0443, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.431588022785777e-07, |
|
"loss": 2.1009, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.501077238654499e-07, |
|
"loss": 2.0686, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.567608554008407e-07, |
|
"loss": 2.0261, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.631423535132934e-07, |
|
"loss": 2.0785, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"eval_webgpt_accuracy": 0.5190179277190983, |
|
"eval_webgpt_loss": 2.201171875, |
|
"eval_webgpt_runtime": 429.5981, |
|
"eval_webgpt_samples_per_second": 9.115, |
|
"eval_webgpt_steps_per_second": 0.761, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"eval_prompt_dialogue_accuracy": 0.5585842097604062, |
|
"eval_prompt_dialogue_loss": 1.857421875, |
|
"eval_prompt_dialogue_runtime": 1052.8834, |
|
"eval_prompt_dialogue_samples_per_second": 9.791, |
|
"eval_prompt_dialogue_steps_per_second": 0.817, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 8.692735313231511e-07, |
|
"loss": 2.0753, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 8.751732879278018e-07, |
|
"loss": 2.0926, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 8.808584597416296e-07, |
|
"loss": 2.0557, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 8.863441101555992e-07, |
|
"loss": 1.969, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 8.916437700710459e-07, |
|
"loss": 2.049, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 8.967696389785231e-07, |
|
"loss": 2.063, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.017327541018372e-07, |
|
"loss": 2.0055, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.065431335065352e-07, |
|
"loss": 2.0109, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.112098978385987e-07, |
|
"loss": 2.0601, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 9.157413744118348e-07, |
|
"loss": 2.0343, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.201451866287983e-07, |
|
"loss": 2.0123, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.244283311473209e-07, |
|
"loss": 2.0567, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.285972447542441e-07, |
|
"loss": 2.0171, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.326578625511087e-07, |
|
"loss": 2.0287, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.366156687720423e-07, |
|
"loss": 2.0357, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.404757413257927e-07, |
|
"loss": 2.0556, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.442427909695872e-07, |
|
"loss": 2.0166, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.479211958729468e-07, |
|
"loss": 2.028, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.515150322075316e-07, |
|
"loss": 2.0044, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.550281012990034e-07, |
|
"loss": 2.0301, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.584639537944038e-07, |
|
"loss": 2.0321, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.61825911230254e-07, |
|
"loss": 1.9889, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 9.651170853297947e-07, |
|
"loss": 2.0272, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.683403953103762e-07, |
|
"loss": 2.0319, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.714985834422475e-07, |
|
"loss": 2.0251, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"eval_webgpt_accuracy": 0.522718880616189, |
|
"eval_webgpt_loss": 2.181640625, |
|
"eval_webgpt_runtime": 433.6644, |
|
"eval_webgpt_samples_per_second": 9.03, |
|
"eval_webgpt_steps_per_second": 0.754, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"eval_prompt_dialogue_accuracy": 0.5654158849986294, |
|
"eval_prompt_dialogue_loss": 1.810546875, |
|
"eval_prompt_dialogue_runtime": 1051.7495, |
|
"eval_prompt_dialogue_samples_per_second": 9.802, |
|
"eval_prompt_dialogue_steps_per_second": 0.818, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.745942290665561e-07, |
|
"loss": 2.0089, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.776297612521052e-07, |
|
"loss": 2.0385, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.80607470246476e-07, |
|
"loss": 1.9865, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.835295178567558e-07, |
|
"loss": 1.9859, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.863979468777368e-07, |
|
"loss": 2.0115, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.892146896705837e-07, |
|
"loss": 2.004, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.919815759822018e-07, |
|
"loss": 2.0179, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.94700340084553e-07, |
|
"loss": 2.0083, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.97372627303682e-07, |
|
"loss": 2.0227, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.999999999999997e-07, |
|
"loss": 1.9993, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 9.959893048128342e-07, |
|
"loss": 2.0316, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.915329768270946e-07, |
|
"loss": 1.9877, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.870766488413547e-07, |
|
"loss": 1.9862, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.82620320855615e-07, |
|
"loss": 2.0064, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 9.781639928698751e-07, |
|
"loss": 2.0837, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.737076648841355e-07, |
|
"loss": 1.9865, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.692513368983956e-07, |
|
"loss": 2.0137, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.64795008912656e-07, |
|
"loss": 2.0423, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 9.603386809269163e-07, |
|
"loss": 1.992, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.558823529411764e-07, |
|
"loss": 1.9865, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.514260249554367e-07, |
|
"loss": 2.0152, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.46969696969697e-07, |
|
"loss": 2.0183, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.425133689839572e-07, |
|
"loss": 2.0077, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.380570409982175e-07, |
|
"loss": 1.9948, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.336007130124776e-07, |
|
"loss": 2.0004, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"eval_webgpt_accuracy": 0.5246072651558654, |
|
"eval_webgpt_loss": 2.171875, |
|
"eval_webgpt_runtime": 427.6682, |
|
"eval_webgpt_samples_per_second": 9.157, |
|
"eval_webgpt_steps_per_second": 0.765, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"eval_prompt_dialogue_accuracy": 0.5691780443314355, |
|
"eval_prompt_dialogue_loss": 1.78515625, |
|
"eval_prompt_dialogue_runtime": 1055.9887, |
|
"eval_prompt_dialogue_samples_per_second": 9.762, |
|
"eval_prompt_dialogue_steps_per_second": 0.814, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.29144385026738e-07, |
|
"loss": 2.0092, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.246880570409982e-07, |
|
"loss": 1.9648, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.202317290552585e-07, |
|
"loss": 2.0095, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.157754010695186e-07, |
|
"loss": 1.9805, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.11319073083779e-07, |
|
"loss": 1.999, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.068627450980392e-07, |
|
"loss": 2.0067, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.024064171122994e-07, |
|
"loss": 1.9908, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.979500891265597e-07, |
|
"loss": 1.9952, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 8.9349376114082e-07, |
|
"loss": 1.9924, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 8.890374331550802e-07, |
|
"loss": 2.022, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 8.845811051693403e-07, |
|
"loss": 2.0003, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 8.801247771836007e-07, |
|
"loss": 2.0315, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 8.756684491978609e-07, |
|
"loss": 1.9881, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.712121212121211e-07, |
|
"loss": 1.946, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.667557932263814e-07, |
|
"loss": 2.0094, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.622994652406417e-07, |
|
"loss": 1.9853, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 8.578431372549019e-07, |
|
"loss": 1.9821, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.533868092691621e-07, |
|
"loss": 1.9795, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.489304812834224e-07, |
|
"loss": 1.9971, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 8.444741532976827e-07, |
|
"loss": 2.0708, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 8.400178253119428e-07, |
|
"loss": 1.9623, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 8.355614973262032e-07, |
|
"loss": 1.9546, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 8.311051693404634e-07, |
|
"loss": 1.9878, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 8.266488413547237e-07, |
|
"loss": 1.9843, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 8.221925133689838e-07, |
|
"loss": 1.9807, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"eval_webgpt_accuracy": 0.5253906990392405, |
|
"eval_webgpt_loss": 2.16796875, |
|
"eval_webgpt_runtime": 428.1397, |
|
"eval_webgpt_samples_per_second": 9.147, |
|
"eval_webgpt_steps_per_second": 0.764, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"eval_prompt_dialogue_accuracy": 0.5718087356550049, |
|
"eval_prompt_dialogue_loss": 1.7685546875, |
|
"eval_prompt_dialogue_runtime": 1050.2103, |
|
"eval_prompt_dialogue_samples_per_second": 9.816, |
|
"eval_prompt_dialogue_steps_per_second": 0.819, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.177361853832442e-07, |
|
"loss": 1.9773, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.132798573975044e-07, |
|
"loss": 2.0443, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.088235294117646e-07, |
|
"loss": 1.9528, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.043672014260249e-07, |
|
"loss": 1.9365, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 7.999108734402852e-07, |
|
"loss": 1.9579, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 7.954545454545454e-07, |
|
"loss": 1.974, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 7.909982174688057e-07, |
|
"loss": 1.9537, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.865418894830659e-07, |
|
"loss": 1.9373, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.820855614973262e-07, |
|
"loss": 1.9555, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.776292335115863e-07, |
|
"loss": 1.973, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.731729055258467e-07, |
|
"loss": 1.9401, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.687165775401069e-07, |
|
"loss": 1.9508, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.642602495543672e-07, |
|
"loss": 1.934, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 7.598039215686273e-07, |
|
"loss": 1.9325, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 7.553475935828877e-07, |
|
"loss": 1.9914, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 7.508912655971479e-07, |
|
"loss": 1.978, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.464349376114081e-07, |
|
"loss": 1.9442, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.419786096256684e-07, |
|
"loss": 1.9598, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.375222816399287e-07, |
|
"loss": 1.9529, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.330659536541889e-07, |
|
"loss": 1.9675, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.286096256684492e-07, |
|
"loss": 1.9433, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.241532976827094e-07, |
|
"loss": 1.9002, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.196969696969697e-07, |
|
"loss": 1.9349, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.152406417112298e-07, |
|
"loss": 1.9414, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.107843137254902e-07, |
|
"loss": 1.9555, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"eval_webgpt_accuracy": 0.526057600642472, |
|
"eval_webgpt_loss": 2.1640625, |
|
"eval_webgpt_runtime": 428.0509, |
|
"eval_webgpt_samples_per_second": 9.148, |
|
"eval_webgpt_steps_per_second": 0.764, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"eval_prompt_dialogue_accuracy": 0.5739159961148795, |
|
"eval_prompt_dialogue_loss": 1.755859375, |
|
"eval_prompt_dialogue_runtime": 1048.4647, |
|
"eval_prompt_dialogue_samples_per_second": 9.832, |
|
"eval_prompt_dialogue_steps_per_second": 0.82, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 7.063279857397504e-07, |
|
"loss": 1.983, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 7.018716577540107e-07, |
|
"loss": 1.983, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.974153297682709e-07, |
|
"loss": 1.9449, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.929590017825312e-07, |
|
"loss": 1.9886, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.885026737967914e-07, |
|
"loss": 1.9334, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.840463458110517e-07, |
|
"loss": 1.9732, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.795900178253119e-07, |
|
"loss": 1.9457, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.751336898395722e-07, |
|
"loss": 1.9691, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.706773618538324e-07, |
|
"loss": 2.0199, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 6.662210338680927e-07, |
|
"loss": 1.9492, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 6.617647058823529e-07, |
|
"loss": 1.9553, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 6.573083778966132e-07, |
|
"loss": 1.9766, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 6.528520499108734e-07, |
|
"loss": 1.9496, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 6.483957219251337e-07, |
|
"loss": 1.9677, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 6.439393939393939e-07, |
|
"loss": 1.9043, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 6.394830659536542e-07, |
|
"loss": 1.9571, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 6.350267379679144e-07, |
|
"loss": 1.9631, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 6.305704099821747e-07, |
|
"loss": 1.9587, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 6.261140819964349e-07, |
|
"loss": 1.9621, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 6.216577540106952e-07, |
|
"loss": 1.932, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 6.172014260249554e-07, |
|
"loss": 1.9682, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 6.127450980392157e-07, |
|
"loss": 1.9636, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.082887700534759e-07, |
|
"loss": 1.9538, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 6.038324420677362e-07, |
|
"loss": 1.9676, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 5.998217468805704e-07, |
|
"loss": 1.9459, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"eval_webgpt_accuracy": 0.526490033682041, |
|
"eval_webgpt_loss": 2.162109375, |
|
"eval_webgpt_runtime": 435.1199, |
|
"eval_webgpt_samples_per_second": 9.0, |
|
"eval_webgpt_steps_per_second": 0.752, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"eval_prompt_dialogue_accuracy": 0.5751027402282565, |
|
"eval_prompt_dialogue_loss": 1.748046875, |
|
"eval_prompt_dialogue_runtime": 1051.1439, |
|
"eval_prompt_dialogue_samples_per_second": 9.807, |
|
"eval_prompt_dialogue_steps_per_second": 0.818, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 5.953654188948306e-07, |
|
"loss": 1.9592, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 5.909090909090909e-07, |
|
"loss": 1.9291, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 5.864527629233511e-07, |
|
"loss": 1.9082, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 5.819964349376115e-07, |
|
"loss": 1.9357, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 5.775401069518716e-07, |
|
"loss": 1.9502, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 5.730837789661319e-07, |
|
"loss": 1.9892, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 5.686274509803921e-07, |
|
"loss": 1.983, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 5.641711229946524e-07, |
|
"loss": 1.9745, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 5.597147950089126e-07, |
|
"loss": 1.9541, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 5.552584670231729e-07, |
|
"loss": 1.9482, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 5.508021390374332e-07, |
|
"loss": 1.9341, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 5.463458110516934e-07, |
|
"loss": 1.971, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 5.418894830659536e-07, |
|
"loss": 1.9179, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 5.374331550802139e-07, |
|
"loss": 1.9049, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 5.329768270944741e-07, |
|
"loss": 1.9407, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 5.285204991087344e-07, |
|
"loss": 1.9338, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 5.240641711229946e-07, |
|
"loss": 1.9874, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 5.19607843137255e-07, |
|
"loss": 1.9699, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 5.151515151515151e-07, |
|
"loss": 1.9128, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.106951871657754e-07, |
|
"loss": 1.9382, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 5.062388591800356e-07, |
|
"loss": 1.9514, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 5.01782531194296e-07, |
|
"loss": 1.9499, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.973262032085561e-07, |
|
"loss": 1.9458, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.928698752228163e-07, |
|
"loss": 1.9095, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.884135472370767e-07, |
|
"loss": 1.9237, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"eval_webgpt_accuracy": 0.5268101464515921, |
|
"eval_webgpt_loss": 2.16015625, |
|
"eval_webgpt_runtime": 427.8942, |
|
"eval_webgpt_samples_per_second": 9.152, |
|
"eval_webgpt_steps_per_second": 0.764, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"eval_prompt_dialogue_accuracy": 0.5762533856613786, |
|
"eval_prompt_dialogue_loss": 1.740234375, |
|
"eval_prompt_dialogue_runtime": 1049.6135, |
|
"eval_prompt_dialogue_samples_per_second": 9.822, |
|
"eval_prompt_dialogue_steps_per_second": 0.819, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.839572192513369e-07, |
|
"loss": 1.9181, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.795008912655971e-07, |
|
"loss": 1.9393, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.750445632798574e-07, |
|
"loss": 1.9278, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.705882352941176e-07, |
|
"loss": 1.972, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.6613190730837784e-07, |
|
"loss": 1.9545, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.616755793226381e-07, |
|
"loss": 1.9324, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.5721925133689835e-07, |
|
"loss": 1.9856, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.5276292335115863e-07, |
|
"loss": 1.9013, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.4830659536541886e-07, |
|
"loss": 1.9001, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.4385026737967914e-07, |
|
"loss": 1.9326, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.3939393939393937e-07, |
|
"loss": 1.9358, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.3493761140819965e-07, |
|
"loss": 2.0019, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.304812834224599e-07, |
|
"loss": 1.941, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.260249554367201e-07, |
|
"loss": 1.9325, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.2112299465240644e-07, |
|
"loss": 2.0644, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.1666666666666667e-07, |
|
"loss": 1.9158, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.122103386809269e-07, |
|
"loss": 1.9409, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.0775401069518717e-07, |
|
"loss": 1.926, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.032976827094474e-07, |
|
"loss": 1.9212, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.988413547237077e-07, |
|
"loss": 1.9745, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.943850267379679e-07, |
|
"loss": 1.9079, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.899286987522282e-07, |
|
"loss": 1.9336, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.854723707664884e-07, |
|
"loss": 1.9242, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.8101604278074864e-07, |
|
"loss": 1.9357, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.765597147950089e-07, |
|
"loss": 1.9036, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_webgpt_accuracy": 0.5268452465359726, |
|
"eval_webgpt_loss": 2.158203125, |
|
"eval_webgpt_runtime": 428.2029, |
|
"eval_webgpt_samples_per_second": 9.145, |
|
"eval_webgpt_steps_per_second": 0.764, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"eval_prompt_dialogue_accuracy": 0.5769595660938633, |
|
"eval_prompt_dialogue_loss": 1.7373046875, |
|
"eval_prompt_dialogue_runtime": 1052.6454, |
|
"eval_prompt_dialogue_samples_per_second": 9.793, |
|
"eval_prompt_dialogue_steps_per_second": 0.817, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.7210338680926915e-07, |
|
"loss": 1.8764, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.6764705882352943e-07, |
|
"loss": 1.9615, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.6319073083778966e-07, |
|
"loss": 1.914, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.5873440285204994e-07, |
|
"loss": 1.8962, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.542780748663101e-07, |
|
"loss": 1.8999, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.4982174688057034e-07, |
|
"loss": 1.9456, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.453654188948306e-07, |
|
"loss": 1.9347, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.4090909090909085e-07, |
|
"loss": 1.9102, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.3645276292335113e-07, |
|
"loss": 1.9355, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3199643493761136e-07, |
|
"loss": 1.9204, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.2754010695187164e-07, |
|
"loss": 1.9396, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.2308377896613187e-07, |
|
"loss": 1.9057, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.1862745098039215e-07, |
|
"loss": 1.9241, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.141711229946524e-07, |
|
"loss": 1.9383, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.097147950089126e-07, |
|
"loss": 1.9197, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.052584670231729e-07, |
|
"loss": 1.8983, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.008021390374331e-07, |
|
"loss": 1.9677, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.963458110516934e-07, |
|
"loss": 1.9104, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.918894830659536e-07, |
|
"loss": 1.8994, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.874331550802139e-07, |
|
"loss": 1.9157, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.829768270944741e-07, |
|
"loss": 1.918, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.7852049910873435e-07, |
|
"loss": 1.9053, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.7406417112299463e-07, |
|
"loss": 1.8939, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6960784313725486e-07, |
|
"loss": 1.8959, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.6515151515151514e-07, |
|
"loss": 1.9326, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"eval_webgpt_accuracy": 0.5270740990861342, |
|
"eval_webgpt_loss": 2.158203125, |
|
"eval_webgpt_runtime": 429.6721, |
|
"eval_webgpt_samples_per_second": 9.114, |
|
"eval_webgpt_steps_per_second": 0.761, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"eval_prompt_dialogue_accuracy": 0.577335217985265, |
|
"eval_prompt_dialogue_loss": 1.7333984375, |
|
"eval_prompt_dialogue_runtime": 1050.2434, |
|
"eval_prompt_dialogue_samples_per_second": 9.816, |
|
"eval_prompt_dialogue_steps_per_second": 0.819, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.6069518716577537e-07, |
|
"loss": 1.8933, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.5623885918003565e-07, |
|
"loss": 1.9186, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.517825311942959e-07, |
|
"loss": 1.9149, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.473262032085561e-07, |
|
"loss": 1.9288, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.428698752228164e-07, |
|
"loss": 1.9082, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.3841354723707664e-07, |
|
"loss": 1.9195, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.339572192513369e-07, |
|
"loss": 1.9262, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.2950089126559712e-07, |
|
"loss": 1.9096, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.2504456327985737e-07, |
|
"loss": 1.913, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.2058823529411763e-07, |
|
"loss": 1.9242, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.1613190730837788e-07, |
|
"loss": 1.9175, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.1167557932263814e-07, |
|
"loss": 1.8917, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.072192513368984e-07, |
|
"loss": 1.9024, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 2.0276292335115864e-07, |
|
"loss": 1.9243, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.9830659536541887e-07, |
|
"loss": 1.9269, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.9385026737967912e-07, |
|
"loss": 1.961, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.8939393939393938e-07, |
|
"loss": 1.9414, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.8493761140819963e-07, |
|
"loss": 1.9306, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.804812834224599e-07, |
|
"loss": 1.9385, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.7602495543672014e-07, |
|
"loss": 1.9304, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.715686274509804e-07, |
|
"loss": 1.9214, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.6711229946524065e-07, |
|
"loss": 1.9854, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.6265597147950088e-07, |
|
"loss": 1.91, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.5819964349376113e-07, |
|
"loss": 1.9317, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.5463458110516933e-07, |
|
"loss": 1.9087, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"eval_webgpt_accuracy": 0.5270825231063856, |
|
"eval_webgpt_loss": 2.15625, |
|
"eval_webgpt_runtime": 426.8235, |
|
"eval_webgpt_samples_per_second": 9.175, |
|
"eval_webgpt_steps_per_second": 0.766, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"eval_prompt_dialogue_accuracy": 0.5776905643690232, |
|
"eval_prompt_dialogue_loss": 1.7314453125, |
|
"eval_prompt_dialogue_runtime": 1054.4289, |
|
"eval_prompt_dialogue_samples_per_second": 9.777, |
|
"eval_prompt_dialogue_steps_per_second": 0.816, |
|
"step": 2500 |
|
} |
|
], |
|
"max_steps": 2844, |
|
"num_train_epochs": 3, |
|
"total_flos": 147144228831232.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|