|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9981331673926571, |
|
"eval_steps": 101, |
|
"global_step": 401, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 53.25, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.9039, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 3.1495237350463867, |
|
"eval_runtime": 1687.5261, |
|
"eval_samples_per_second": 5.968, |
|
"eval_steps_per_second": 0.746, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 57.75, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.1549, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 45.75, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.85, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 43.75, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.8815, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 26.875, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.7001, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 15.8125, |
|
"learning_rate": 3e-06, |
|
"loss": 1.4792, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 12.25, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.6061, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 10.3125, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.4366, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 8.0, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.4531, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.0, |
|
"learning_rate": 5e-06, |
|
"loss": 1.5216, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.96875, |
|
"learning_rate": 4.9999193036470786e-06, |
|
"loss": 1.2768, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 4.999677219797834e-06, |
|
"loss": 1.3153, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.875, |
|
"learning_rate": 4.999273764080493e-06, |
|
"loss": 1.425, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 4.9987089625409815e-06, |
|
"loss": 1.3616, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 4.997982851641236e-06, |
|
"loss": 1.3301, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 4.99709547825686e-06, |
|
"loss": 1.4655, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 4.996046899674091e-06, |
|
"loss": 1.3226, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.75, |
|
"learning_rate": 4.9948371835861e-06, |
|
"loss": 1.3406, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.4375, |
|
"learning_rate": 4.993466408088629e-06, |
|
"loss": 1.304, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.75, |
|
"learning_rate": 4.991934661674945e-06, |
|
"loss": 1.2712, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 4.990242043230129e-06, |
|
"loss": 1.3149, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 4.988388662024687e-06, |
|
"loss": 1.2938, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 4.9863746377075025e-06, |
|
"loss": 1.242, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 4.98420010029811e-06, |
|
"loss": 1.2161, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 4.981865190178299e-06, |
|
"loss": 1.294, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.125, |
|
"learning_rate": 4.979370058083056e-06, |
|
"loss": 1.1972, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 4.976714865090827e-06, |
|
"loss": 1.2295, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 4.973899782613127e-06, |
|
"loss": 1.1852, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 4.970924992383465e-06, |
|
"loss": 1.323, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 4.96779068644562e-06, |
|
"loss": 1.3008, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.875, |
|
"learning_rate": 4.964497067141239e-06, |
|
"loss": 1.2526, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 4.961044347096773e-06, |
|
"loss": 1.3551, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 4.957432749209755e-06, |
|
"loss": 1.1906, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 4.953662506634408e-06, |
|
"loss": 1.2391, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.0, |
|
"learning_rate": 4.949733862766591e-06, |
|
"loss": 1.1611, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 4.94564707122809e-06, |
|
"loss": 1.213, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 4.941402395850244e-06, |
|
"loss": 1.1394, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 4.93700011065691e-06, |
|
"loss": 1.2633, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 4.9324404998467755e-06, |
|
"loss": 1.1892, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 4.927723857775012e-06, |
|
"loss": 1.2087, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 4.922850488934268e-06, |
|
"loss": 1.2898, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 4.917820707935019e-06, |
|
"loss": 1.2064, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 4.912634839485251e-06, |
|
"loss": 1.1967, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.625, |
|
"learning_rate": 4.907293218369499e-06, |
|
"loss": 1.215, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.0, |
|
"learning_rate": 4.901796189427238e-06, |
|
"loss": 1.1548, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 4.896144107530618e-06, |
|
"loss": 1.1007, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 4.890337337561556e-06, |
|
"loss": 1.0993, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.25, |
|
"learning_rate": 4.884376254388179e-06, |
|
"loss": 1.0616, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 4.878261242840624e-06, |
|
"loss": 1.1689, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 4.8719926976861934e-06, |
|
"loss": 1.1147, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 4.865571023603875e-06, |
|
"loss": 1.0578, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 4.858996635158211e-06, |
|
"loss": 1.1676, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 4.852269956772536e-06, |
|
"loss": 1.0989, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 4.845391422701582e-06, |
|
"loss": 1.2259, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 4.838361477003439e-06, |
|
"loss": 1.1906, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 4.83118057351089e-06, |
|
"loss": 1.0726, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 4.823849175802114e-06, |
|
"loss": 1.1111, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 4.816367757170754e-06, |
|
"loss": 1.3362, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 4.808736800595372e-06, |
|
"loss": 1.1101, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 4.800956798708256e-06, |
|
"loss": 1.131, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 4.793028253763633e-06, |
|
"loss": 1.0771, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 4.784951677605229e-06, |
|
"loss": 1.1331, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 4.776727591633235e-06, |
|
"loss": 1.1704, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.875, |
|
"learning_rate": 4.76835652677065e-06, |
|
"loss": 1.1436, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 4.759839023428994e-06, |
|
"loss": 1.1931, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 4.751175631473433e-06, |
|
"loss": 1.2202, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 4.742366910187275e-06, |
|
"loss": 1.2113, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 4.733413428235864e-06, |
|
"loss": 1.1091, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 4.724315763629874e-06, |
|
"loss": 1.0952, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 4.715074503687987e-06, |
|
"loss": 1.2604, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 4.705690244998981e-06, |
|
"loss": 1.177, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 4.696163593383217e-06, |
|
"loss": 1.1761, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 4.686495163853528e-06, |
|
"loss": 1.2345, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 4.676685580575516e-06, |
|
"loss": 1.1289, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 4.6667354768272565e-06, |
|
"loss": 1.1536, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 4.656645494958415e-06, |
|
"loss": 1.1625, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.875, |
|
"learning_rate": 4.646416286348783e-06, |
|
"loss": 1.1441, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 4.636048511366222e-06, |
|
"loss": 1.0877, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 4.625542839324036e-06, |
|
"loss": 0.998, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 4.61489994843776e-06, |
|
"loss": 1.1651, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 4.604120525781376e-06, |
|
"loss": 1.1657, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 4.593205267242962e-06, |
|
"loss": 1.1362, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 4.582154877479762e-06, |
|
"loss": 1.1169, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 4.570970069872695e-06, |
|
"loss": 1.1175, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 4.55965156648031e-06, |
|
"loss": 1.0583, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 4.548200097992161e-06, |
|
"loss": 1.0436, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 4.5366164036816415e-06, |
|
"loss": 1.036, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 4.524901231358261e-06, |
|
"loss": 1.2137, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 4.375, |
|
"learning_rate": 4.513055337319361e-06, |
|
"loss": 1.1137, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 4.501079486301299e-06, |
|
"loss": 1.1229, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 4.488974451430077e-06, |
|
"loss": 1.0657, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 4.4767410141714265e-06, |
|
"loss": 1.1326, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 4.464379964280365e-06, |
|
"loss": 1.1957, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.75, |
|
"learning_rate": 4.451892099750206e-06, |
|
"loss": 1.1726, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 4.43927822676105e-06, |
|
"loss": 1.0951, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 4.426539159627733e-06, |
|
"loss": 1.1512, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 4.413675720747262e-06, |
|
"loss": 1.0428, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.875, |
|
"learning_rate": 4.40068874054572e-06, |
|
"loss": 1.1005, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 4.387579057424655e-06, |
|
"loss": 1.0811, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 4.374347517706963e-06, |
|
"loss": 1.1457, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 4.36099497558224e-06, |
|
"loss": 0.9933, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 1.2402405738830566, |
|
"eval_runtime": 1697.4894, |
|
"eval_samples_per_second": 5.933, |
|
"eval_steps_per_second": 0.742, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 4.3475222930516484e-06, |
|
"loss": 1.0559, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 4.3339303398722645e-06, |
|
"loss": 1.1506, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 4.320219993500927e-06, |
|
"loss": 1.0673, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 4.306392139037598e-06, |
|
"loss": 1.0358, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 4.292447669168215e-06, |
|
"loss": 1.2526, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 4.278387484107067e-06, |
|
"loss": 1.1494, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 4.26421249153868e-06, |
|
"loss": 1.1066, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 4.2499236065592154e-06, |
|
"loss": 1.129, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 3.875, |
|
"learning_rate": 4.235521751617398e-06, |
|
"loss": 0.9961, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 4.221007856454963e-06, |
|
"loss": 1.1145, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 4.206382858046636e-06, |
|
"loss": 1.1684, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 4.191647700539642e-06, |
|
"loss": 1.1654, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 4.176803335192757e-06, |
|
"loss": 1.0714, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 4.1618507203149e-06, |
|
"loss": 1.0937, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 4.1467908212032586e-06, |
|
"loss": 1.0441, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 4.1316246100809786e-06, |
|
"loss": 1.0788, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 4.116353066034403e-06, |
|
"loss": 1.0276, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 4.100977174949856e-06, |
|
"loss": 1.0827, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 4.085497929450005e-06, |
|
"loss": 1.0748, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 4.069916328829779e-06, |
|
"loss": 1.0433, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 4.0, |
|
"learning_rate": 4.0542333789918474e-06, |
|
"loss": 1.0983, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 4.038450092381697e-06, |
|
"loss": 1.1221, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 4.125, |
|
"learning_rate": 4.02256748792226e-06, |
|
"loss": 1.0123, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 4.006586590948141e-06, |
|
"loss": 1.0935, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 3.990508433139421e-06, |
|
"loss": 1.0307, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 3.97433405245506e-06, |
|
"loss": 1.0694, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 3.958064493065881e-06, |
|
"loss": 0.9683, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 3.941700805287169e-06, |
|
"loss": 1.1073, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 3.925244045510863e-06, |
|
"loss": 0.9876, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.875, |
|
"learning_rate": 3.9086952761373605e-06, |
|
"loss": 1.0082, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 3.892055565506929e-06, |
|
"loss": 0.9779, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 3.8753259878307355e-06, |
|
"loss": 1.0722, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 3.858507623121506e-06, |
|
"loss": 1.0229, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 3.841601557123793e-06, |
|
"loss": 1.081, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 3.824608881243893e-06, |
|
"loss": 1.2165, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 3.807530692479382e-06, |
|
"loss": 1.1001, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 3.7903680933482965e-06, |
|
"loss": 1.0578, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 3.7731221918179645e-06, |
|
"loss": 0.9931, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 3.7557941012334687e-06, |
|
"loss": 1.0884, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 3.7383849402457807e-06, |
|
"loss": 1.0741, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 3.72089583273954e-06, |
|
"loss": 1.1207, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 3.703327907760499e-06, |
|
"loss": 1.057, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 3.685682299442638e-06, |
|
"loss": 1.0551, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 3.6679601469349456e-06, |
|
"loss": 0.9792, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 3.650162594327881e-06, |
|
"loss": 0.967, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 3.632290790579512e-06, |
|
"loss": 1.1393, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 3.6143458894413463e-06, |
|
"loss": 1.0892, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 3.75, |
|
"learning_rate": 3.596329049383843e-06, |
|
"loss": 1.0478, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 3.5782414335216297e-06, |
|
"loss": 1.0014, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 3.5600842095384125e-06, |
|
"loss": 1.0601, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 3.541858549611596e-06, |
|
"loss": 1.1093, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 3.523565630336607e-06, |
|
"loss": 0.9811, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 3.505206632650944e-06, |
|
"loss": 1.1626, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 3.875, |
|
"learning_rate": 3.48678274175793e-06, |
|
"loss": 1.0518, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 3.75, |
|
"learning_rate": 3.468295147050207e-06, |
|
"loss": 1.0028, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 3.4497450420329486e-06, |
|
"loss": 1.0596, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 3.431133624246812e-06, |
|
"loss": 1.08, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 3.4124620951906283e-06, |
|
"loss": 1.02, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 3.3937316602438352e-06, |
|
"loss": 1.0798, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 3.3749435285886638e-06, |
|
"loss": 1.1169, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 3.75, |
|
"learning_rate": 3.356098913132077e-06, |
|
"loss": 1.0584, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 3.3371990304274654e-06, |
|
"loss": 1.0337, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 3.3182451005961146e-06, |
|
"loss": 1.1088, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.0, |
|
"learning_rate": 3.2992383472484318e-06, |
|
"loss": 0.9837, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 3.280179997404958e-06, |
|
"loss": 1.0732, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 3.2610712814171535e-06, |
|
"loss": 1.1187, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.125, |
|
"learning_rate": 3.2419134328879703e-06, |
|
"loss": 1.104, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 3.2227076885922137e-06, |
|
"loss": 0.9941, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 3.2034552883966972e-06, |
|
"loss": 1.1258, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 3.184157475180208e-06, |
|
"loss": 0.9484, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 3.1648154947532607e-06, |
|
"loss": 1.0749, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 3.1454305957776797e-06, |
|
"loss": 0.9447, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 3.126004029685984e-06, |
|
"loss": 1.1009, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 3.106537050600601e-06, |
|
"loss": 1.0355, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 3.0870309152529006e-06, |
|
"loss": 1.0504, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 3.0674868829020692e-06, |
|
"loss": 1.0575, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 3.0479062152538137e-06, |
|
"loss": 1.0417, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 3.0282901763789064e-06, |
|
"loss": 0.962, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 3.0086400326315853e-06, |
|
"loss": 1.0601, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 2.988957052567797e-06, |
|
"loss": 1.0731, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 2.969242506863306e-06, |
|
"loss": 1.0532, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 2.949497668231663e-06, |
|
"loss": 0.995, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 2.929723811342042e-06, |
|
"loss": 0.9706, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 2.9099222127369496e-06, |
|
"loss": 1.0618, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 2.890094150749818e-06, |
|
"loss": 1.0437, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 2.870240905422476e-06, |
|
"loss": 1.0314, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 3.75, |
|
"learning_rate": 2.850363758422519e-06, |
|
"loss": 0.9562, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 2.830463992960561e-06, |
|
"loss": 1.1011, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.125, |
|
"learning_rate": 2.8105428937073997e-06, |
|
"loss": 0.9929, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 3.78125, |
|
"learning_rate": 2.7906017467110806e-06, |
|
"loss": 1.0573, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 2.770641839313871e-06, |
|
"loss": 1.0478, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 2.750664460069157e-06, |
|
"loss": 1.047, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 2.730670898658255e-06, |
|
"loss": 1.0945, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 2.710662445807156e-06, |
|
"loss": 0.9587, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 2.6906403932031973e-06, |
|
"loss": 1.0648, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 2.670606033411678e-06, |
|
"loss": 1.1022, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 2.6505606597924122e-06, |
|
"loss": 1.1515, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 2.630505566416235e-06, |
|
"loss": 1.0288, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 4.125, |
|
"learning_rate": 2.610442047981462e-06, |
|
"loss": 0.9577, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 2.5903713997303033e-06, |
|
"loss": 0.9455, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.75, |
|
"learning_rate": 2.5702949173652515e-06, |
|
"loss": 0.9439, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 1.1682730913162231, |
|
"eval_runtime": 1697.6225, |
|
"eval_samples_per_second": 5.932, |
|
"eval_steps_per_second": 0.742, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 2.550213896965431e-06, |
|
"loss": 1.0939, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 2.53012963490293e-06, |
|
"loss": 1.0256, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 2.5100434277591077e-06, |
|
"loss": 1.0595, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 2.4899565722408927e-06, |
|
"loss": 1.1182, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 2.4698703650970706e-06, |
|
"loss": 1.0328, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 2.449786103034569e-06, |
|
"loss": 0.9872, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 2.4297050826347498e-06, |
|
"loss": 1.0855, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 2.409628600269697e-06, |
|
"loss": 1.0982, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 2.3895579520185393e-06, |
|
"loss": 1.1318, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 2.3694944335837653e-06, |
|
"loss": 0.9962, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 2.349439340207588e-06, |
|
"loss": 0.968, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 2.3293939665883233e-06, |
|
"loss": 0.9732, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 2.3093596067968027e-06, |
|
"loss": 0.9373, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 2.2893375541928447e-06, |
|
"loss": 1.0306, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 2.269329101341745e-06, |
|
"loss": 1.1107, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 3.875, |
|
"learning_rate": 2.249335539930843e-06, |
|
"loss": 1.0235, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 2.2293581606861298e-06, |
|
"loss": 1.0638, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 2.20939825328892e-06, |
|
"loss": 1.0233, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 2.1894571062926008e-06, |
|
"loss": 0.9618, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 2.1695360070394396e-06, |
|
"loss": 0.9921, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 2.1496362415774814e-06, |
|
"loss": 1.0143, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 2.1297590945775244e-06, |
|
"loss": 0.9744, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 2.1099058492501825e-06, |
|
"loss": 1.0126, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 2.0900777872630513e-06, |
|
"loss": 1.1166, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 2.070276188657959e-06, |
|
"loss": 0.9741, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 2.0505023317683374e-06, |
|
"loss": 0.9359, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 2.0307574931366954e-06, |
|
"loss": 1.0426, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 2.011042947432204e-06, |
|
"loss": 1.0191, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.991359967368416e-06, |
|
"loss": 1.0692, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.875, |
|
"learning_rate": 1.9717098236210944e-06, |
|
"loss": 0.9858, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 1.9520937847461867e-06, |
|
"loss": 0.9709, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 1.9325131170979316e-06, |
|
"loss": 1.1824, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 1.9129690847471e-06, |
|
"loss": 0.9967, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.8934629493994005e-06, |
|
"loss": 0.9896, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.8739959703140167e-06, |
|
"loss": 1.0194, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 1.8545694042223205e-06, |
|
"loss": 0.9915, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.8351845052467403e-06, |
|
"loss": 1.0117, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.625, |
|
"learning_rate": 1.8158425248197931e-06, |
|
"loss": 0.9527, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.375, |
|
"learning_rate": 1.7965447116033036e-06, |
|
"loss": 0.9097, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 1.7772923114077871e-06, |
|
"loss": 0.9966, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 1.7580865671120295e-06, |
|
"loss": 1.0222, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 1.738928718582847e-06, |
|
"loss": 0.9598, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.625, |
|
"learning_rate": 1.7198200025950423e-06, |
|
"loss": 1.0023, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 1.7007616527515695e-06, |
|
"loss": 1.0632, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 1.6817548994038862e-06, |
|
"loss": 1.054, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 1.6628009695725348e-06, |
|
"loss": 1.0083, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 1.643901086867924e-06, |
|
"loss": 1.0685, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.6250564714113364e-06, |
|
"loss": 0.9871, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 1.606268339756166e-06, |
|
"loss": 1.0499, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.75, |
|
"learning_rate": 1.5875379048093723e-06, |
|
"loss": 1.0568, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 1.568866375753188e-06, |
|
"loss": 1.0213, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 1.5502549579670522e-06, |
|
"loss": 1.0704, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 1.5317048529497938e-06, |
|
"loss": 1.055, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 1.5132172582420713e-06, |
|
"loss": 0.9842, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.75, |
|
"learning_rate": 1.4947933673490566e-06, |
|
"loss": 1.0064, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 1.4764343696633933e-06, |
|
"loss": 1.0286, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 1.4581414503884051e-06, |
|
"loss": 0.9656, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.439915790461588e-06, |
|
"loss": 1.0944, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.625, |
|
"learning_rate": 1.421758566478371e-06, |
|
"loss": 1.0269, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.875, |
|
"learning_rate": 1.4036709506161577e-06, |
|
"loss": 0.9061, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.3856541105586545e-06, |
|
"loss": 0.9597, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.3677092094204886e-06, |
|
"loss": 1.0282, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.3498374056721198e-06, |
|
"loss": 1.0582, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 1.332039853065055e-06, |
|
"loss": 1.0319, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 1.3143177005573626e-06, |
|
"loss": 1.0384, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.75, |
|
"learning_rate": 1.2966720922395015e-06, |
|
"loss": 0.9468, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 1.2791041672604609e-06, |
|
"loss": 1.0335, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 1.2616150597542197e-06, |
|
"loss": 0.9515, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 1.244205898766532e-06, |
|
"loss": 1.0266, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 3.625, |
|
"learning_rate": 1.2268778081820363e-06, |
|
"loss": 1.0266, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 1.2096319066517037e-06, |
|
"loss": 0.8807, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.0, |
|
"learning_rate": 1.192469307520619e-06, |
|
"loss": 1.066, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 1.1753911187561075e-06, |
|
"loss": 1.002, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 1.1583984428762076e-06, |
|
"loss": 1.006, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 1.1414923768784952e-06, |
|
"loss": 1.0167, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 1.1246740121692649e-06, |
|
"loss": 1.0791, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 1.1079444344930717e-06, |
|
"loss": 1.013, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.75, |
|
"learning_rate": 1.0913047238626394e-06, |
|
"loss": 1.0024, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.0, |
|
"learning_rate": 1.0747559544891376e-06, |
|
"loss": 1.0406, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 1.0582991947128324e-06, |
|
"loss": 1.0518, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 3.75, |
|
"learning_rate": 1.0419355069341206e-06, |
|
"loss": 1.0486, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 1.025665947544941e-06, |
|
"loss": 0.9368, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 1.0094915668605786e-06, |
|
"loss": 0.958, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 9.934134090518593e-07, |
|
"loss": 1.0873, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 9.774325120777406e-07, |
|
"loss": 1.0053, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 9.61549907618304e-07, |
|
"loss": 1.1148, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 9.457666210081537e-07, |
|
"loss": 1.1638, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 9.300836711702224e-07, |
|
"loss": 1.017, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 9.145020705499947e-07, |
|
"loss": 1.0333, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 8.990228250501446e-07, |
|
"loss": 1.099, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 8.836469339655979e-07, |
|
"loss": 0.9876, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 8.68375389919022e-07, |
|
"loss": 1.0734, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 8.532091787967428e-07, |
|
"loss": 1.1269, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 8.38149279685101e-07, |
|
"loss": 0.9446, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 8.231966648072431e-07, |
|
"loss": 1.0824, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 8.083522994603593e-07, |
|
"loss": 1.0151, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 7.936171419533653e-07, |
|
"loss": 1.0191, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 7.789921435450373e-07, |
|
"loss": 1.0482, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 7.644782483826016e-07, |
|
"loss": 1.0616, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 7.500763934407851e-07, |
|
"loss": 0.958, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 7.357875084613208e-07, |
|
"loss": 0.9762, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 1.1502219438552856, |
|
"eval_runtime": 1698.3617, |
|
"eval_samples_per_second": 5.93, |
|
"eval_steps_per_second": 0.741, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 7.216125158929337e-07, |
|
"loss": 1.0086, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 7.075523308317863e-07, |
|
"loss": 0.9599, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 6.936078609624023e-07, |
|
"loss": 0.9755, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 6.797800064990734e-07, |
|
"loss": 0.8239, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 6.660696601277369e-07, |
|
"loss": 1.0655, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 6.524777069483526e-07, |
|
"loss": 1.0598, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 6.390050244177615e-07, |
|
"loss": 1.0022, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 6.256524822930379e-07, |
|
"loss": 1.0651, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 6.124209425753455e-07, |
|
"loss": 0.9499, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 5.993112594542813e-07, |
|
"loss": 0.9701, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 5.863242792527385e-07, |
|
"loss": 1.0464, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 5.734608403722674e-07, |
|
"loss": 1.0288, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 5.607217732389503e-07, |
|
"loss": 1.0578, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 5.481079002497946e-07, |
|
"loss": 1.0421, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.25, |
|
"learning_rate": 5.356200357196362e-07, |
|
"loss": 1.0306, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 5.232589858285734e-07, |
|
"loss": 1.0028, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 4.25, |
|
"learning_rate": 5.110255485699237e-07, |
|
"loss": 0.9517, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.625, |
|
"learning_rate": 4.989205136987007e-07, |
|
"loss": 0.9468, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 4.869446626806404e-07, |
|
"loss": 1.0133, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 4.750987686417405e-07, |
|
"loss": 0.9474, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.625, |
|
"learning_rate": 4.633835963183583e-07, |
|
"loss": 0.9537, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 4.5179990200784006e-07, |
|
"loss": 0.9295, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 4.4034843351969005e-07, |
|
"loss": 1.0759, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 4.2902993012730543e-07, |
|
"loss": 0.9895, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 4.178451225202396e-07, |
|
"loss": 1.0568, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 4.067947327570379e-07, |
|
"loss": 1.0846, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.875, |
|
"learning_rate": 3.958794742186245e-07, |
|
"loss": 1.0305, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.625, |
|
"learning_rate": 3.851000515622408e-07, |
|
"loss": 0.9988, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 3.7445716067596506e-07, |
|
"loss": 1.0955, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 3.639514886337786e-07, |
|
"loss": 1.0371, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 3.5358371365121766e-07, |
|
"loss": 1.0718, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 3.43354505041586e-07, |
|
"loss": 0.9978, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 3.332645231727441e-07, |
|
"loss": 1.0067, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 3.2331441942448446e-07, |
|
"loss": 0.9888, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.484375, |
|
"learning_rate": 3.135048361464721e-07, |
|
"loss": 0.9309, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 3.0383640661678344e-07, |
|
"loss": 1.0667, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 2.943097550010204e-07, |
|
"loss": 1.0255, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 2.8492549631201376e-07, |
|
"loss": 1.03, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 2.7568423637012646e-07, |
|
"loss": 1.019, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 2.6658657176413527e-07, |
|
"loss": 1.017, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 3.703125, |
|
"learning_rate": 2.5763308981272525e-07, |
|
"loss": 1.0516, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 2.488243685265676e-07, |
|
"loss": 0.9736, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 2.401609765710064e-07, |
|
"loss": 0.9969, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 2.3164347322935127e-07, |
|
"loss": 0.9944, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 2.232724083667645e-07, |
|
"loss": 1.0454, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 2.1504832239477192e-07, |
|
"loss": 0.8774, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 2.0697174623636795e-07, |
|
"loss": 1.0629, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.9904320129174343e-07, |
|
"loss": 0.993, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 1.9126319940462935e-07, |
|
"loss": 1.0039, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 1.836322428292464e-07, |
|
"loss": 0.9873, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 1.761508241978871e-07, |
|
"loss": 1.1026, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 1.6881942648911077e-07, |
|
"loss": 1.0244, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 3.75, |
|
"learning_rate": 1.6163852299656142e-07, |
|
"loss": 1.0486, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.5460857729841877e-07, |
|
"loss": 0.9962, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 3.375, |
|
"learning_rate": 1.477300432274645e-07, |
|
"loss": 1.0021, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 1.4100336484178995e-07, |
|
"loss": 0.9262, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 1.344289763961257e-07, |
|
"loss": 1.1243, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 1.2800730231380675e-07, |
|
"loss": 1.0262, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 1.2173875715937726e-07, |
|
"loss": 0.9252, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 3.5, |
|
"learning_rate": 1.1562374561182143e-07, |
|
"loss": 0.9864, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 1.0966266243844392e-07, |
|
"loss": 1.0454, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.75, |
|
"learning_rate": 1.0385589246938194e-07, |
|
"loss": 0.9809, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.75, |
|
"learning_rate": 9.82038105727623e-08, |
|
"loss": 1.0173, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 9.270678163050218e-08, |
|
"loss": 1.0639, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.984375, |
|
"learning_rate": 8.736516051474992e-08, |
|
"loss": 0.9877, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.625, |
|
"learning_rate": 8.217929206498137e-08, |
|
"loss": 0.9758, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 7.71495110657322e-08, |
|
"loss": 1.0034, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.953125, |
|
"learning_rate": 7.227614222498875e-08, |
|
"loss": 1.1404, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 6.755950015322477e-08, |
|
"loss": 1.0372, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 6.299988934309026e-08, |
|
"loss": 0.975, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 5.859760414975601e-08, |
|
"loss": 0.9614, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 5.435292877190995e-08, |
|
"loss": 1.0394, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.640625, |
|
"learning_rate": 5.026613723340956e-08, |
|
"loss": 0.9794, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.890625, |
|
"learning_rate": 4.6337493365592736e-08, |
|
"loss": 1.015, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 4.256725079024554e-08, |
|
"loss": 1.087, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.625, |
|
"learning_rate": 3.8955652903228114e-08, |
|
"loss": 0.9599, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.875, |
|
"learning_rate": 3.550293285876222e-08, |
|
"loss": 0.964, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 3.220931355438051e-08, |
|
"loss": 0.9747, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.515625, |
|
"learning_rate": 2.907500761653581e-08, |
|
"loss": 1.0822, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 3.609375, |
|
"learning_rate": 2.6100217386873794e-08, |
|
"loss": 1.0306, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 2.3285134909173113e-08, |
|
"loss": 1.0334, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 2.0629941916944785e-08, |
|
"loss": 1.0246, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 1.8134809821701016e-08, |
|
"loss": 0.9324, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.8125, |
|
"learning_rate": 1.579989970189044e-08, |
|
"loss": 1.1022, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 1.3625362292497835e-08, |
|
"loss": 1.0338, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.09375, |
|
"learning_rate": 1.1611337975313553e-08, |
|
"loss": 0.9934, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 9.757956769871624e-09, |
|
"loss": 0.9822, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 8.065338325054795e-09, |
|
"loss": 1.0552, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 6.533591911371262e-09, |
|
"loss": 0.9611, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 5.162816413900873e-09, |
|
"loss": 0.954, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 3.75, |
|
"learning_rate": 3.953100325909953e-09, |
|
"loss": 1.0505, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 2.9045217431397655e-09, |
|
"loss": 1.0135, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.59375, |
|
"learning_rate": 2.017148358763876e-09, |
|
"loss": 1.0444, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 1.2910374590194286e-09, |
|
"loss": 1.0298, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.625, |
|
"learning_rate": 7.262359195070456e-10, |
|
"loss": 1.1188, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 3.227802021665838e-10, |
|
"loss": 0.9863, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 8.069635292179412e-11, |
|
"loss": 1.0449, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 3.4375, |
|
"learning_rate": 0.0, |
|
"loss": 0.9839, |
|
"step": 401 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 401, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"total_flos": 4.4848246137752125e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|