|
{ |
|
"best_metric": 1.704933762550354, |
|
"best_model_checkpoint": "runs/deepseek-full-hard/checkpoint-50000", |
|
"epoch": 2.5, |
|
"eval_steps": 2500, |
|
"global_step": 100000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 24.3125, |
|
"learning_rate": 2e-05, |
|
"loss": 1.9182, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 13.9296875, |
|
"learning_rate": 4e-05, |
|
"loss": 1.9313, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.0, |
|
"learning_rate": 6e-05, |
|
"loss": 1.9142, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 10.0703125, |
|
"learning_rate": 8e-05, |
|
"loss": 1.9665, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.0703125, |
|
"learning_rate": 0.0001, |
|
"loss": 1.9457, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 13.6484375, |
|
"learning_rate": 9.989949748743719e-05, |
|
"loss": 1.9216, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 7.98046875, |
|
"learning_rate": 9.979899497487438e-05, |
|
"loss": 1.9332, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.02734375, |
|
"learning_rate": 9.969849246231156e-05, |
|
"loss": 1.9248, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.359375, |
|
"learning_rate": 9.959798994974875e-05, |
|
"loss": 1.9724, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.6328125, |
|
"learning_rate": 9.949748743718594e-05, |
|
"loss": 1.9189, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.0390625, |
|
"learning_rate": 9.939698492462311e-05, |
|
"loss": 1.9435, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.85546875, |
|
"learning_rate": 9.929648241206031e-05, |
|
"loss": 1.9003, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 11.4296875, |
|
"learning_rate": 9.91959798994975e-05, |
|
"loss": 1.8982, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.10546875, |
|
"learning_rate": 9.909547738693468e-05, |
|
"loss": 1.8895, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.8671875, |
|
"learning_rate": 9.899497487437186e-05, |
|
"loss": 1.9317, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.546875, |
|
"learning_rate": 9.889447236180906e-05, |
|
"loss": 1.8948, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 9.879396984924624e-05, |
|
"loss": 1.8883, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.732421875, |
|
"learning_rate": 9.869346733668342e-05, |
|
"loss": 1.8749, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 9.140625, |
|
"learning_rate": 9.85929648241206e-05, |
|
"loss": 1.8626, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 8.7265625, |
|
"learning_rate": 9.84924623115578e-05, |
|
"loss": 1.8761, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.890625, |
|
"learning_rate": 9.839195979899497e-05, |
|
"loss": 1.8842, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 7.0234375, |
|
"learning_rate": 9.829145728643216e-05, |
|
"loss": 1.9056, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 8.0078125, |
|
"learning_rate": 9.819095477386935e-05, |
|
"loss": 1.9, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 8.734375, |
|
"learning_rate": 9.809045226130655e-05, |
|
"loss": 1.9253, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 9.1640625, |
|
"learning_rate": 9.798994974874372e-05, |
|
"loss": 1.9373, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"eval_loss": 1.9465688467025757, |
|
"eval_runtime": 58.7017, |
|
"eval_samples_per_second": 17.035, |
|
"eval_steps_per_second": 4.259, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 8.359375, |
|
"learning_rate": 9.788944723618091e-05, |
|
"loss": 1.9574, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.40234375, |
|
"learning_rate": 9.77889447236181e-05, |
|
"loss": 1.8468, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 7.328125, |
|
"learning_rate": 9.768844221105528e-05, |
|
"loss": 1.8394, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 9.2578125, |
|
"learning_rate": 9.758793969849247e-05, |
|
"loss": 1.8358, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 8.53125, |
|
"learning_rate": 9.748743718592965e-05, |
|
"loss": 1.8669, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.25390625, |
|
"learning_rate": 9.738693467336684e-05, |
|
"loss": 1.8772, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 9.728643216080403e-05, |
|
"loss": 1.8632, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 10.2578125, |
|
"learning_rate": 9.718592964824121e-05, |
|
"loss": 1.8813, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.78515625, |
|
"learning_rate": 9.70854271356784e-05, |
|
"loss": 1.8944, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.18359375, |
|
"learning_rate": 9.698492462311559e-05, |
|
"loss": 1.8789, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.2578125, |
|
"learning_rate": 9.688442211055276e-05, |
|
"loss": 1.8867, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 12.1171875, |
|
"learning_rate": 9.678391959798996e-05, |
|
"loss": 1.8426, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 8.875, |
|
"learning_rate": 9.668341708542715e-05, |
|
"loss": 1.8427, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.8203125, |
|
"learning_rate": 9.658291457286432e-05, |
|
"loss": 1.8416, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.94140625, |
|
"learning_rate": 9.64824120603015e-05, |
|
"loss": 1.8496, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 8.1328125, |
|
"learning_rate": 9.63819095477387e-05, |
|
"loss": 1.865, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 8.578125, |
|
"learning_rate": 9.628140703517589e-05, |
|
"loss": 1.8689, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.1171875, |
|
"learning_rate": 9.618090452261306e-05, |
|
"loss": 1.8711, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 7.08203125, |
|
"learning_rate": 9.608040201005025e-05, |
|
"loss": 1.8723, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.77734375, |
|
"learning_rate": 9.597989949748745e-05, |
|
"loss": 1.8686, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.6875, |
|
"learning_rate": 9.587939698492462e-05, |
|
"loss": 1.8718, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 9.5, |
|
"learning_rate": 9.577889447236181e-05, |
|
"loss": 1.8171, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 11.125, |
|
"learning_rate": 9.5678391959799e-05, |
|
"loss": 1.9202, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 8.09375, |
|
"learning_rate": 9.55778894472362e-05, |
|
"loss": 1.8685, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 7.26171875, |
|
"learning_rate": 9.547738693467337e-05, |
|
"loss": 1.8742, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"eval_loss": 1.8054884672164917, |
|
"eval_runtime": 58.5502, |
|
"eval_samples_per_second": 17.079, |
|
"eval_steps_per_second": 4.27, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.7734375, |
|
"learning_rate": 9.537688442211056e-05, |
|
"loss": 1.8839, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 9.90625, |
|
"learning_rate": 9.527638190954774e-05, |
|
"loss": 1.8702, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.0078125, |
|
"learning_rate": 9.517587939698493e-05, |
|
"loss": 1.8273, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 10.109375, |
|
"learning_rate": 9.507537688442212e-05, |
|
"loss": 1.8637, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.84765625, |
|
"learning_rate": 9.49748743718593e-05, |
|
"loss": 1.8654, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.609375, |
|
"learning_rate": 9.487437185929649e-05, |
|
"loss": 1.8233, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.83984375, |
|
"learning_rate": 9.477386934673366e-05, |
|
"loss": 1.8636, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 7.73046875, |
|
"learning_rate": 9.467336683417086e-05, |
|
"loss": 1.8542, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 9.46875, |
|
"learning_rate": 9.457286432160805e-05, |
|
"loss": 1.8727, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 8.828125, |
|
"learning_rate": 9.447236180904523e-05, |
|
"loss": 1.8413, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 9.125, |
|
"learning_rate": 9.437185929648241e-05, |
|
"loss": 1.8322, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.12890625, |
|
"learning_rate": 9.427135678391961e-05, |
|
"loss": 1.862, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 8.6640625, |
|
"learning_rate": 9.41708542713568e-05, |
|
"loss": 1.8537, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.04296875, |
|
"learning_rate": 9.407035175879397e-05, |
|
"loss": 1.847, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 7.30078125, |
|
"learning_rate": 9.396984924623115e-05, |
|
"loss": 1.8379, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.3046875, |
|
"learning_rate": 9.386934673366835e-05, |
|
"loss": 1.8492, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 9.671875, |
|
"learning_rate": 9.376884422110554e-05, |
|
"loss": 1.845, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.0546875, |
|
"learning_rate": 9.366834170854271e-05, |
|
"loss": 1.8557, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.66015625, |
|
"learning_rate": 9.35678391959799e-05, |
|
"loss": 1.8256, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 7.19921875, |
|
"learning_rate": 9.34673366834171e-05, |
|
"loss": 1.8081, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 7.00390625, |
|
"learning_rate": 9.336683417085427e-05, |
|
"loss": 1.8326, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 7.5390625, |
|
"learning_rate": 9.326633165829146e-05, |
|
"loss": 1.8197, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 6.75, |
|
"learning_rate": 9.316582914572864e-05, |
|
"loss": 1.81, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 9.28125, |
|
"learning_rate": 9.306532663316585e-05, |
|
"loss": 1.8062, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.9765625, |
|
"learning_rate": 9.296482412060302e-05, |
|
"loss": 1.8546, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"eval_loss": 1.7942283153533936, |
|
"eval_runtime": 58.5803, |
|
"eval_samples_per_second": 17.071, |
|
"eval_steps_per_second": 4.268, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.27734375, |
|
"learning_rate": 9.28643216080402e-05, |
|
"loss": 1.8519, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 9.65625, |
|
"learning_rate": 9.276381909547739e-05, |
|
"loss": 1.8272, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.54296875, |
|
"learning_rate": 9.266331658291458e-05, |
|
"loss": 1.8251, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.80859375, |
|
"learning_rate": 9.256281407035176e-05, |
|
"loss": 1.7912, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 12.078125, |
|
"learning_rate": 9.246231155778895e-05, |
|
"loss": 1.8036, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 8.5546875, |
|
"learning_rate": 9.236180904522614e-05, |
|
"loss": 1.8208, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 8.59375, |
|
"learning_rate": 9.226130653266331e-05, |
|
"loss": 1.8396, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 9.0078125, |
|
"learning_rate": 9.216080402010051e-05, |
|
"loss": 1.8225, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.7421875, |
|
"learning_rate": 9.20603015075377e-05, |
|
"loss": 1.7912, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 9.8828125, |
|
"learning_rate": 9.195979899497488e-05, |
|
"loss": 1.8061, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 4.69140625, |
|
"learning_rate": 9.185929648241206e-05, |
|
"loss": 1.7964, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 7.5625, |
|
"learning_rate": 9.175879396984926e-05, |
|
"loss": 1.8634, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 5.3046875, |
|
"learning_rate": 9.165829145728644e-05, |
|
"loss": 1.8434, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 6.30859375, |
|
"learning_rate": 9.155778894472362e-05, |
|
"loss": 1.8358, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.71484375, |
|
"learning_rate": 9.14572864321608e-05, |
|
"loss": 1.8229, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 9.875, |
|
"learning_rate": 9.1356783919598e-05, |
|
"loss": 1.8039, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.8828125, |
|
"learning_rate": 9.125628140703519e-05, |
|
"loss": 1.8105, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 5.0859375, |
|
"learning_rate": 9.115577889447236e-05, |
|
"loss": 1.8134, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 4.5078125, |
|
"learning_rate": 9.105527638190955e-05, |
|
"loss": 1.8096, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 6.5, |
|
"learning_rate": 9.095477386934675e-05, |
|
"loss": 1.7956, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.234375, |
|
"learning_rate": 9.085427135678392e-05, |
|
"loss": 1.8053, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 9.6171875, |
|
"learning_rate": 9.075376884422111e-05, |
|
"loss": 1.8205, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 5.046875, |
|
"learning_rate": 9.06532663316583e-05, |
|
"loss": 1.8414, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.66796875, |
|
"learning_rate": 9.055276381909548e-05, |
|
"loss": 1.8026, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.7421875, |
|
"learning_rate": 9.045226130653267e-05, |
|
"loss": 1.8133, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 1.7820364236831665, |
|
"eval_runtime": 58.5615, |
|
"eval_samples_per_second": 17.076, |
|
"eval_steps_per_second": 4.269, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 6.05078125, |
|
"learning_rate": 9.035175879396985e-05, |
|
"loss": 1.8395, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 6.93359375, |
|
"learning_rate": 9.025125628140704e-05, |
|
"loss": 1.7843, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 9.09375, |
|
"learning_rate": 9.015075376884423e-05, |
|
"loss": 1.8097, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.5078125, |
|
"learning_rate": 9.005025125628141e-05, |
|
"loss": 1.824, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 5.0703125, |
|
"learning_rate": 8.99497487437186e-05, |
|
"loss": 1.7863, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 5.8203125, |
|
"learning_rate": 8.984924623115579e-05, |
|
"loss": 1.863, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 8.974874371859296e-05, |
|
"loss": 1.8308, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 5.1171875, |
|
"learning_rate": 8.964824120603016e-05, |
|
"loss": 1.7928, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 4.72265625, |
|
"learning_rate": 8.954773869346734e-05, |
|
"loss": 1.836, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 5.35546875, |
|
"learning_rate": 8.944723618090453e-05, |
|
"loss": 1.7759, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.04296875, |
|
"learning_rate": 8.93467336683417e-05, |
|
"loss": 1.8561, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.49609375, |
|
"learning_rate": 8.92462311557789e-05, |
|
"loss": 1.7855, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 4.94140625, |
|
"learning_rate": 8.914572864321609e-05, |
|
"loss": 1.7924, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 7.515625, |
|
"learning_rate": 8.904522613065326e-05, |
|
"loss": 1.7844, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 10.3515625, |
|
"learning_rate": 8.894472361809045e-05, |
|
"loss": 1.7828, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.5078125, |
|
"learning_rate": 8.884422110552765e-05, |
|
"loss": 1.8021, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.91015625, |
|
"learning_rate": 8.874371859296484e-05, |
|
"loss": 1.7883, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 6.11328125, |
|
"learning_rate": 8.864321608040201e-05, |
|
"loss": 1.8127, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 7.7734375, |
|
"learning_rate": 8.85427135678392e-05, |
|
"loss": 1.7899, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 5.88671875, |
|
"learning_rate": 8.84422110552764e-05, |
|
"loss": 1.8298, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 7.35546875, |
|
"learning_rate": 8.834170854271357e-05, |
|
"loss": 1.8107, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 8.984375, |
|
"learning_rate": 8.824120603015076e-05, |
|
"loss": 1.7587, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 7.0078125, |
|
"learning_rate": 8.814070351758794e-05, |
|
"loss": 1.8383, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 4.87890625, |
|
"learning_rate": 8.804020100502513e-05, |
|
"loss": 1.8341, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 7.375, |
|
"learning_rate": 8.793969849246232e-05, |
|
"loss": 1.7689, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"eval_loss": 1.7876064777374268, |
|
"eval_runtime": 58.5853, |
|
"eval_samples_per_second": 17.069, |
|
"eval_steps_per_second": 4.267, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.21484375, |
|
"learning_rate": 8.78391959798995e-05, |
|
"loss": 1.7756, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 4.9765625, |
|
"learning_rate": 8.773869346733669e-05, |
|
"loss": 1.8252, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 8.1484375, |
|
"learning_rate": 8.763819095477387e-05, |
|
"loss": 1.8265, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 8.753768844221106e-05, |
|
"loss": 1.7877, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.40234375, |
|
"learning_rate": 8.743718592964825e-05, |
|
"loss": 1.7729, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.43359375, |
|
"learning_rate": 8.733668341708543e-05, |
|
"loss": 1.8078, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 5.33984375, |
|
"learning_rate": 8.723618090452261e-05, |
|
"loss": 1.8294, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 7.26171875, |
|
"learning_rate": 8.713567839195981e-05, |
|
"loss": 1.7883, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 10.5859375, |
|
"learning_rate": 8.7035175879397e-05, |
|
"loss": 1.8201, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.0546875, |
|
"learning_rate": 8.693467336683418e-05, |
|
"loss": 1.747, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 4.40234375, |
|
"learning_rate": 8.683417085427135e-05, |
|
"loss": 1.8044, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 5.55078125, |
|
"learning_rate": 8.673366834170855e-05, |
|
"loss": 1.7687, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 8.663316582914574e-05, |
|
"loss": 1.7941, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.046875, |
|
"learning_rate": 8.653266331658291e-05, |
|
"loss": 1.7781, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 8.3828125, |
|
"learning_rate": 8.64321608040201e-05, |
|
"loss": 1.7927, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 5.33984375, |
|
"learning_rate": 8.63316582914573e-05, |
|
"loss": 1.7793, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.73828125, |
|
"learning_rate": 8.623115577889449e-05, |
|
"loss": 1.7785, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.859375, |
|
"learning_rate": 8.613065326633166e-05, |
|
"loss": 1.7857, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.09765625, |
|
"learning_rate": 8.603015075376884e-05, |
|
"loss": 1.836, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 8.71875, |
|
"learning_rate": 8.592964824120603e-05, |
|
"loss": 1.8173, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.703125, |
|
"learning_rate": 8.582914572864322e-05, |
|
"loss": 1.8225, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.14453125, |
|
"learning_rate": 8.57286432160804e-05, |
|
"loss": 1.7832, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.51953125, |
|
"learning_rate": 8.562814070351759e-05, |
|
"loss": 1.7766, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 5.1484375, |
|
"learning_rate": 8.552763819095478e-05, |
|
"loss": 1.7659, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 8.542713567839196e-05, |
|
"loss": 1.8042, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"eval_loss": 1.7957104444503784, |
|
"eval_runtime": 58.7068, |
|
"eval_samples_per_second": 17.034, |
|
"eval_steps_per_second": 4.258, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.37109375, |
|
"learning_rate": 8.532663316582915e-05, |
|
"loss": 1.7498, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 6.34765625, |
|
"learning_rate": 8.522613065326634e-05, |
|
"loss": 1.8001, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 7.6953125, |
|
"learning_rate": 8.512562814070352e-05, |
|
"loss": 1.8107, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 4.55859375, |
|
"learning_rate": 8.502512562814071e-05, |
|
"loss": 1.8115, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.4296875, |
|
"learning_rate": 8.49246231155779e-05, |
|
"loss": 1.7869, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.62890625, |
|
"learning_rate": 8.482412060301508e-05, |
|
"loss": 1.7731, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 8.6171875, |
|
"learning_rate": 8.472361809045226e-05, |
|
"loss": 1.8287, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 6.0703125, |
|
"learning_rate": 8.462311557788946e-05, |
|
"loss": 1.7866, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.0625, |
|
"learning_rate": 8.452261306532664e-05, |
|
"loss": 1.7941, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 6.69140625, |
|
"learning_rate": 8.442211055276383e-05, |
|
"loss": 1.807, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 8.4321608040201e-05, |
|
"loss": 1.7723, |
|
"step": 16100 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 4.01171875, |
|
"learning_rate": 8.42211055276382e-05, |
|
"loss": 1.7878, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 8.21875, |
|
"learning_rate": 8.412060301507539e-05, |
|
"loss": 1.7906, |
|
"step": 16300 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.55859375, |
|
"learning_rate": 8.402010050251256e-05, |
|
"loss": 1.8018, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 7.1796875, |
|
"learning_rate": 8.391959798994975e-05, |
|
"loss": 1.8287, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.76953125, |
|
"learning_rate": 8.381909547738695e-05, |
|
"loss": 1.8516, |
|
"step": 16600 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.8671875, |
|
"learning_rate": 8.371859296482413e-05, |
|
"loss": 1.7676, |
|
"step": 16700 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 5.66015625, |
|
"learning_rate": 8.36180904522613e-05, |
|
"loss": 1.7963, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 6.27734375, |
|
"learning_rate": 8.351758793969849e-05, |
|
"loss": 1.8459, |
|
"step": 16900 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 7.44140625, |
|
"learning_rate": 8.341708542713568e-05, |
|
"loss": 1.7887, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 7.2109375, |
|
"learning_rate": 8.331658291457287e-05, |
|
"loss": 1.7455, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 8.321608040201005e-05, |
|
"loss": 1.7657, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 8.2578125, |
|
"learning_rate": 8.311557788944724e-05, |
|
"loss": 1.7667, |
|
"step": 17300 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 8.301507537688443e-05, |
|
"loss": 1.8137, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.91796875, |
|
"learning_rate": 8.291457286432161e-05, |
|
"loss": 1.7911, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"eval_loss": 1.7952567338943481, |
|
"eval_runtime": 58.854, |
|
"eval_samples_per_second": 16.991, |
|
"eval_steps_per_second": 4.248, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 8.5859375, |
|
"learning_rate": 8.28140703517588e-05, |
|
"loss": 1.7892, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.16796875, |
|
"learning_rate": 8.271356783919599e-05, |
|
"loss": 1.7493, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.68359375, |
|
"learning_rate": 8.261306532663317e-05, |
|
"loss": 1.785, |
|
"step": 17800 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.93359375, |
|
"learning_rate": 8.251256281407036e-05, |
|
"loss": 1.807, |
|
"step": 17900 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 11.2109375, |
|
"learning_rate": 8.241206030150754e-05, |
|
"loss": 1.8133, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.25, |
|
"learning_rate": 8.231155778894473e-05, |
|
"loss": 1.8227, |
|
"step": 18100 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 8.46875, |
|
"learning_rate": 8.22110552763819e-05, |
|
"loss": 1.795, |
|
"step": 18200 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 3.587890625, |
|
"learning_rate": 8.21105527638191e-05, |
|
"loss": 1.7832, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.296875, |
|
"learning_rate": 8.201005025125629e-05, |
|
"loss": 1.7968, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.015625, |
|
"learning_rate": 8.190954773869348e-05, |
|
"loss": 1.8195, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 4.75, |
|
"learning_rate": 8.180904522613065e-05, |
|
"loss": 1.782, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 7.046875, |
|
"learning_rate": 8.170854271356785e-05, |
|
"loss": 1.7568, |
|
"step": 18700 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.26953125, |
|
"learning_rate": 8.160804020100504e-05, |
|
"loss": 1.8259, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.890625, |
|
"learning_rate": 8.150753768844221e-05, |
|
"loss": 1.7702, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.28515625, |
|
"learning_rate": 8.14070351758794e-05, |
|
"loss": 1.7551, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.140625, |
|
"learning_rate": 8.130653266331658e-05, |
|
"loss": 1.7674, |
|
"step": 19100 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.94921875, |
|
"learning_rate": 8.120603015075378e-05, |
|
"loss": 1.7653, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.109375, |
|
"learning_rate": 8.110552763819096e-05, |
|
"loss": 1.7855, |
|
"step": 19300 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.9921875, |
|
"learning_rate": 8.100502512562814e-05, |
|
"loss": 1.7272, |
|
"step": 19400 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 7.34765625, |
|
"learning_rate": 8.090452261306533e-05, |
|
"loss": 1.8004, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 7.56640625, |
|
"learning_rate": 8.080402010050251e-05, |
|
"loss": 1.7634, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 8.84375, |
|
"learning_rate": 8.07035175879397e-05, |
|
"loss": 1.7221, |
|
"step": 19700 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 6.00390625, |
|
"learning_rate": 8.060301507537689e-05, |
|
"loss": 1.8102, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 8.050251256281407e-05, |
|
"loss": 1.7403, |
|
"step": 19900 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 7.57421875, |
|
"learning_rate": 8.040201005025126e-05, |
|
"loss": 1.7996, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 1.774701714515686, |
|
"eval_runtime": 58.6217, |
|
"eval_samples_per_second": 17.059, |
|
"eval_steps_per_second": 4.265, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 5.9296875, |
|
"learning_rate": 8.030150753768845e-05, |
|
"loss": 1.8128, |
|
"step": 20100 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 3.328125, |
|
"learning_rate": 8.020100502512563e-05, |
|
"loss": 1.822, |
|
"step": 20200 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 9.859375, |
|
"learning_rate": 8.010050251256282e-05, |
|
"loss": 1.8077, |
|
"step": 20300 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 6.625, |
|
"learning_rate": 8e-05, |
|
"loss": 1.759, |
|
"step": 20400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 7.989949748743719e-05, |
|
"loss": 1.8109, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 5.9921875, |
|
"learning_rate": 7.979899497487438e-05, |
|
"loss": 1.7575, |
|
"step": 20600 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 5.34765625, |
|
"learning_rate": 7.969849246231155e-05, |
|
"loss": 1.7939, |
|
"step": 20700 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.98046875, |
|
"learning_rate": 7.959798994974875e-05, |
|
"loss": 1.7786, |
|
"step": 20800 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.318359375, |
|
"learning_rate": 7.949748743718594e-05, |
|
"loss": 1.783, |
|
"step": 20900 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 4.359375, |
|
"learning_rate": 7.939698492462313e-05, |
|
"loss": 1.7968, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.01171875, |
|
"learning_rate": 7.92964824120603e-05, |
|
"loss": 1.7869, |
|
"step": 21100 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 7.66796875, |
|
"learning_rate": 7.91959798994975e-05, |
|
"loss": 1.8054, |
|
"step": 21200 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 6.296875, |
|
"learning_rate": 7.909547738693468e-05, |
|
"loss": 1.7957, |
|
"step": 21300 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 6.23828125, |
|
"learning_rate": 7.899497487437186e-05, |
|
"loss": 1.8123, |
|
"step": 21400 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 4.2265625, |
|
"learning_rate": 7.889447236180904e-05, |
|
"loss": 1.79, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 4.39453125, |
|
"learning_rate": 7.879396984924623e-05, |
|
"loss": 1.7505, |
|
"step": 21600 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 9.0546875, |
|
"learning_rate": 7.869346733668343e-05, |
|
"loss": 1.7862, |
|
"step": 21700 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 9.75, |
|
"learning_rate": 7.85929648241206e-05, |
|
"loss": 1.8352, |
|
"step": 21800 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.9765625, |
|
"learning_rate": 7.849246231155779e-05, |
|
"loss": 1.7609, |
|
"step": 21900 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 10.21875, |
|
"learning_rate": 7.839195979899498e-05, |
|
"loss": 1.8483, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 5.8515625, |
|
"learning_rate": 7.829145728643216e-05, |
|
"loss": 1.7301, |
|
"step": 22100 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 5.76953125, |
|
"learning_rate": 7.819095477386935e-05, |
|
"loss": 1.7806, |
|
"step": 22200 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 7.39453125, |
|
"learning_rate": 7.809045226130654e-05, |
|
"loss": 1.7654, |
|
"step": 22300 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 6.19921875, |
|
"learning_rate": 7.798994974874372e-05, |
|
"loss": 1.7647, |
|
"step": 22400 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 6.296875, |
|
"learning_rate": 7.788944723618091e-05, |
|
"loss": 1.7572, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 1.7940764427185059, |
|
"eval_runtime": 58.6091, |
|
"eval_samples_per_second": 17.062, |
|
"eval_steps_per_second": 4.266, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 9.2109375, |
|
"learning_rate": 7.77889447236181e-05, |
|
"loss": 1.8022, |
|
"step": 22600 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 8.140625, |
|
"learning_rate": 7.768844221105528e-05, |
|
"loss": 1.7874, |
|
"step": 22700 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.5078125, |
|
"learning_rate": 7.758793969849247e-05, |
|
"loss": 1.7889, |
|
"step": 22800 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 6.44921875, |
|
"learning_rate": 7.748743718592966e-05, |
|
"loss": 1.7439, |
|
"step": 22900 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 7.6328125, |
|
"learning_rate": 7.738693467336684e-05, |
|
"loss": 1.7804, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 7.578125, |
|
"learning_rate": 7.728643216080403e-05, |
|
"loss": 1.7768, |
|
"step": 23100 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 8.234375, |
|
"learning_rate": 7.71859296482412e-05, |
|
"loss": 1.7694, |
|
"step": 23200 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 4.140625, |
|
"learning_rate": 7.70854271356784e-05, |
|
"loss": 1.7481, |
|
"step": 23300 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 4.75390625, |
|
"learning_rate": 7.698492462311559e-05, |
|
"loss": 1.7657, |
|
"step": 23400 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 7.4921875, |
|
"learning_rate": 7.688442211055277e-05, |
|
"loss": 1.7717, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 6.15234375, |
|
"learning_rate": 7.678391959798995e-05, |
|
"loss": 1.7387, |
|
"step": 23600 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.33203125, |
|
"learning_rate": 7.668341708542713e-05, |
|
"loss": 1.7737, |
|
"step": 23700 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 4.20703125, |
|
"learning_rate": 7.658291457286433e-05, |
|
"loss": 1.7786, |
|
"step": 23800 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 3.861328125, |
|
"learning_rate": 7.64824120603015e-05, |
|
"loss": 1.7953, |
|
"step": 23900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.7421875, |
|
"learning_rate": 7.638190954773869e-05, |
|
"loss": 1.7508, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 7.1484375, |
|
"learning_rate": 7.628140703517588e-05, |
|
"loss": 1.7496, |
|
"step": 24100 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 4.08984375, |
|
"learning_rate": 7.618090452261307e-05, |
|
"loss": 1.7959, |
|
"step": 24200 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 7.608040201005025e-05, |
|
"loss": 1.7997, |
|
"step": 24300 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 7.9140625, |
|
"learning_rate": 7.597989949748744e-05, |
|
"loss": 1.7578, |
|
"step": 24400 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 4.0546875, |
|
"learning_rate": 7.587939698492463e-05, |
|
"loss": 1.7787, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 7.00390625, |
|
"learning_rate": 7.577889447236181e-05, |
|
"loss": 1.7563, |
|
"step": 24600 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 5.078125, |
|
"learning_rate": 7.5678391959799e-05, |
|
"loss": 1.7971, |
|
"step": 24700 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 7.171875, |
|
"learning_rate": 7.557788944723618e-05, |
|
"loss": 1.7662, |
|
"step": 24800 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 5.6015625, |
|
"learning_rate": 7.547738693467337e-05, |
|
"loss": 1.7887, |
|
"step": 24900 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 5.7109375, |
|
"learning_rate": 7.537688442211056e-05, |
|
"loss": 1.8288, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"eval_loss": 1.7818756103515625, |
|
"eval_runtime": 58.7925, |
|
"eval_samples_per_second": 17.009, |
|
"eval_steps_per_second": 4.252, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 4.39453125, |
|
"learning_rate": 7.527638190954774e-05, |
|
"loss": 1.7837, |
|
"step": 25100 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 4.3671875, |
|
"learning_rate": 7.517587939698493e-05, |
|
"loss": 1.767, |
|
"step": 25200 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 5.31640625, |
|
"learning_rate": 7.507537688442212e-05, |
|
"loss": 1.7601, |
|
"step": 25300 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 4.54296875, |
|
"learning_rate": 7.49748743718593e-05, |
|
"loss": 1.7574, |
|
"step": 25400 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 4.890625, |
|
"learning_rate": 7.487437185929649e-05, |
|
"loss": 1.7523, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 7.71484375, |
|
"learning_rate": 7.477386934673368e-05, |
|
"loss": 1.7567, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.18359375, |
|
"learning_rate": 7.467336683417085e-05, |
|
"loss": 1.7937, |
|
"step": 25700 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.81640625, |
|
"learning_rate": 7.457286432160805e-05, |
|
"loss": 1.7273, |
|
"step": 25800 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.94921875, |
|
"learning_rate": 7.447236180904524e-05, |
|
"loss": 1.8141, |
|
"step": 25900 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.439453125, |
|
"learning_rate": 7.437185929648241e-05, |
|
"loss": 1.7697, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.55859375, |
|
"learning_rate": 7.42713567839196e-05, |
|
"loss": 1.7679, |
|
"step": 26100 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 6.6640625, |
|
"learning_rate": 7.417085427135678e-05, |
|
"loss": 1.8057, |
|
"step": 26200 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 3.77734375, |
|
"learning_rate": 7.407035175879398e-05, |
|
"loss": 1.7748, |
|
"step": 26300 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 5.78515625, |
|
"learning_rate": 7.396984924623115e-05, |
|
"loss": 1.7825, |
|
"step": 26400 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.49609375, |
|
"learning_rate": 7.386934673366834e-05, |
|
"loss": 1.7735, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 6.53515625, |
|
"learning_rate": 7.376884422110553e-05, |
|
"loss": 1.7663, |
|
"step": 26600 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 3.6875, |
|
"learning_rate": 7.366834170854271e-05, |
|
"loss": 1.7844, |
|
"step": 26700 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.16015625, |
|
"learning_rate": 7.35678391959799e-05, |
|
"loss": 1.797, |
|
"step": 26800 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 7.1640625, |
|
"learning_rate": 7.346733668341709e-05, |
|
"loss": 1.8147, |
|
"step": 26900 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 3.8671875, |
|
"learning_rate": 7.336683417085427e-05, |
|
"loss": 1.7814, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 7.89453125, |
|
"learning_rate": 7.326633165829146e-05, |
|
"loss": 1.7911, |
|
"step": 27100 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.95703125, |
|
"learning_rate": 7.316582914572865e-05, |
|
"loss": 1.7882, |
|
"step": 27200 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 5.84765625, |
|
"learning_rate": 7.306532663316583e-05, |
|
"loss": 1.7886, |
|
"step": 27300 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 7.3046875, |
|
"learning_rate": 7.296482412060302e-05, |
|
"loss": 1.7752, |
|
"step": 27400 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.328125, |
|
"learning_rate": 7.28643216080402e-05, |
|
"loss": 1.7561, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"eval_loss": 1.7702152729034424, |
|
"eval_runtime": 58.6028, |
|
"eval_samples_per_second": 17.064, |
|
"eval_steps_per_second": 4.266, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.05078125, |
|
"learning_rate": 7.276381909547739e-05, |
|
"loss": 1.8037, |
|
"step": 27600 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 7.266331658291458e-05, |
|
"loss": 1.7701, |
|
"step": 27700 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 6.7265625, |
|
"learning_rate": 7.256281407035177e-05, |
|
"loss": 1.7891, |
|
"step": 27800 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 5.3515625, |
|
"learning_rate": 7.246231155778895e-05, |
|
"loss": 1.7706, |
|
"step": 27900 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 5.18359375, |
|
"learning_rate": 7.236180904522614e-05, |
|
"loss": 1.7655, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 5.33984375, |
|
"learning_rate": 7.226130653266333e-05, |
|
"loss": 1.7846, |
|
"step": 28100 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 8.046875, |
|
"learning_rate": 7.21608040201005e-05, |
|
"loss": 1.7997, |
|
"step": 28200 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.5, |
|
"learning_rate": 7.206030150753768e-05, |
|
"loss": 1.7938, |
|
"step": 28300 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.861328125, |
|
"learning_rate": 7.195979899497488e-05, |
|
"loss": 1.7676, |
|
"step": 28400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 4.6484375, |
|
"learning_rate": 7.185929648241206e-05, |
|
"loss": 1.7469, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 7.175879396984924e-05, |
|
"loss": 1.7756, |
|
"step": 28600 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.4765625, |
|
"learning_rate": 7.165829145728643e-05, |
|
"loss": 1.8028, |
|
"step": 28700 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.734375, |
|
"learning_rate": 7.155778894472363e-05, |
|
"loss": 1.7702, |
|
"step": 28800 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 5.33203125, |
|
"learning_rate": 7.14572864321608e-05, |
|
"loss": 1.7662, |
|
"step": 28900 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.783203125, |
|
"learning_rate": 7.135678391959799e-05, |
|
"loss": 1.7767, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 5.734375, |
|
"learning_rate": 7.125628140703518e-05, |
|
"loss": 1.7829, |
|
"step": 29100 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.79296875, |
|
"learning_rate": 7.115577889447236e-05, |
|
"loss": 1.7596, |
|
"step": 29200 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 5.20703125, |
|
"learning_rate": 7.105527638190955e-05, |
|
"loss": 1.7136, |
|
"step": 29300 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 7.37109375, |
|
"learning_rate": 7.095477386934674e-05, |
|
"loss": 1.7612, |
|
"step": 29400 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.88671875, |
|
"learning_rate": 7.085427135678392e-05, |
|
"loss": 1.768, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 3.248046875, |
|
"learning_rate": 7.075376884422111e-05, |
|
"loss": 1.7706, |
|
"step": 29600 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.6171875, |
|
"learning_rate": 7.06532663316583e-05, |
|
"loss": 1.7715, |
|
"step": 29700 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 5.38671875, |
|
"learning_rate": 7.055276381909548e-05, |
|
"loss": 1.7381, |
|
"step": 29800 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 6.9375, |
|
"learning_rate": 7.045226130653267e-05, |
|
"loss": 1.7993, |
|
"step": 29900 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 6.6171875, |
|
"learning_rate": 7.035175879396985e-05, |
|
"loss": 1.7474, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 1.79494309425354, |
|
"eval_runtime": 58.6639, |
|
"eval_samples_per_second": 17.046, |
|
"eval_steps_per_second": 4.262, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 6.90234375, |
|
"learning_rate": 7.025125628140704e-05, |
|
"loss": 1.7628, |
|
"step": 30100 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.24609375, |
|
"learning_rate": 7.015075376884423e-05, |
|
"loss": 1.7704, |
|
"step": 30200 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 7.00502512562814e-05, |
|
"loss": 1.7551, |
|
"step": 30300 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 8.1328125, |
|
"learning_rate": 6.99497487437186e-05, |
|
"loss": 1.7783, |
|
"step": 30400 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 4.6328125, |
|
"learning_rate": 6.984924623115579e-05, |
|
"loss": 1.7669, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 3.54296875, |
|
"learning_rate": 6.974874371859297e-05, |
|
"loss": 1.7497, |
|
"step": 30600 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 3.130859375, |
|
"learning_rate": 6.964824120603015e-05, |
|
"loss": 1.7455, |
|
"step": 30700 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 6.28515625, |
|
"learning_rate": 6.954773869346733e-05, |
|
"loss": 1.7537, |
|
"step": 30800 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 6.5390625, |
|
"learning_rate": 6.944723618090453e-05, |
|
"loss": 1.7651, |
|
"step": 30900 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 7.15234375, |
|
"learning_rate": 6.93467336683417e-05, |
|
"loss": 1.7822, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.40625, |
|
"learning_rate": 6.924623115577889e-05, |
|
"loss": 1.7987, |
|
"step": 31100 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.0546875, |
|
"learning_rate": 6.914572864321608e-05, |
|
"loss": 1.7787, |
|
"step": 31200 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 6.83203125, |
|
"learning_rate": 6.904522613065328e-05, |
|
"loss": 1.7629, |
|
"step": 31300 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 8.703125, |
|
"learning_rate": 6.894472361809045e-05, |
|
"loss": 1.7253, |
|
"step": 31400 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 6.3359375, |
|
"learning_rate": 6.884422110552764e-05, |
|
"loss": 1.7446, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 7.13671875, |
|
"learning_rate": 6.874371859296482e-05, |
|
"loss": 1.7621, |
|
"step": 31600 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 6.875, |
|
"learning_rate": 6.864321608040201e-05, |
|
"loss": 1.7461, |
|
"step": 31700 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 6.109375, |
|
"learning_rate": 6.85427135678392e-05, |
|
"loss": 1.795, |
|
"step": 31800 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 4.515625, |
|
"learning_rate": 6.844221105527638e-05, |
|
"loss": 1.7707, |
|
"step": 31900 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 6.69140625, |
|
"learning_rate": 6.834170854271357e-05, |
|
"loss": 1.7683, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 3.7734375, |
|
"learning_rate": 6.824120603015076e-05, |
|
"loss": 1.7407, |
|
"step": 32100 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 7.03515625, |
|
"learning_rate": 6.814070351758794e-05, |
|
"loss": 1.7581, |
|
"step": 32200 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 7.68359375, |
|
"learning_rate": 6.804020100502513e-05, |
|
"loss": 1.7809, |
|
"step": 32300 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 6.17578125, |
|
"learning_rate": 6.793969849246232e-05, |
|
"loss": 1.7748, |
|
"step": 32400 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 7.1484375, |
|
"learning_rate": 6.78391959798995e-05, |
|
"loss": 1.7814, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"eval_loss": 1.7562289237976074, |
|
"eval_runtime": 58.6717, |
|
"eval_samples_per_second": 17.044, |
|
"eval_steps_per_second": 4.261, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 5.6796875, |
|
"learning_rate": 6.773869346733669e-05, |
|
"loss": 1.7753, |
|
"step": 32600 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 6.26171875, |
|
"learning_rate": 6.763819095477388e-05, |
|
"loss": 1.7479, |
|
"step": 32700 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 4.37109375, |
|
"learning_rate": 6.753768844221105e-05, |
|
"loss": 1.8101, |
|
"step": 32800 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 4.2421875, |
|
"learning_rate": 6.743718592964824e-05, |
|
"loss": 1.7543, |
|
"step": 32900 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 3.560546875, |
|
"learning_rate": 6.733668341708544e-05, |
|
"loss": 1.7124, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 6.723618090452262e-05, |
|
"loss": 1.7396, |
|
"step": 33100 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 6.2890625, |
|
"learning_rate": 6.71356783919598e-05, |
|
"loss": 1.7732, |
|
"step": 33200 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 6.703517587939698e-05, |
|
"loss": 1.7662, |
|
"step": 33300 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 4.69140625, |
|
"learning_rate": 6.693467336683418e-05, |
|
"loss": 1.7547, |
|
"step": 33400 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 6.50390625, |
|
"learning_rate": 6.683417085427135e-05, |
|
"loss": 1.7678, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.205078125, |
|
"learning_rate": 6.673366834170854e-05, |
|
"loss": 1.7886, |
|
"step": 33600 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 6.663316582914573e-05, |
|
"loss": 1.7663, |
|
"step": 33700 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 6.640625, |
|
"learning_rate": 6.653266331658293e-05, |
|
"loss": 1.7383, |
|
"step": 33800 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.8515625, |
|
"learning_rate": 6.64321608040201e-05, |
|
"loss": 1.7691, |
|
"step": 33900 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 4.2578125, |
|
"learning_rate": 6.633165829145729e-05, |
|
"loss": 1.7573, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 2.880859375, |
|
"learning_rate": 6.623115577889447e-05, |
|
"loss": 1.7703, |
|
"step": 34100 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 6.613065326633166e-05, |
|
"loss": 1.781, |
|
"step": 34200 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 4.01953125, |
|
"learning_rate": 6.603015075376885e-05, |
|
"loss": 1.728, |
|
"step": 34300 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 6.93359375, |
|
"learning_rate": 6.592964824120603e-05, |
|
"loss": 1.7715, |
|
"step": 34400 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 4.55078125, |
|
"learning_rate": 6.582914572864322e-05, |
|
"loss": 1.7755, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 6.57286432160804e-05, |
|
"loss": 1.7887, |
|
"step": 34600 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 7.51953125, |
|
"learning_rate": 6.562814070351759e-05, |
|
"loss": 1.7574, |
|
"step": 34700 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.30859375, |
|
"learning_rate": 6.552763819095478e-05, |
|
"loss": 1.7129, |
|
"step": 34800 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.5859375, |
|
"learning_rate": 6.542713567839197e-05, |
|
"loss": 1.7443, |
|
"step": 34900 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 6.532663316582915e-05, |
|
"loss": 1.7575, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"eval_loss": 1.7717657089233398, |
|
"eval_runtime": 58.5137, |
|
"eval_samples_per_second": 17.09, |
|
"eval_steps_per_second": 4.273, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.7890625, |
|
"learning_rate": 6.522613065326634e-05, |
|
"loss": 1.7768, |
|
"step": 35100 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 6.58984375, |
|
"learning_rate": 6.512562814070352e-05, |
|
"loss": 1.7784, |
|
"step": 35200 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.98046875, |
|
"learning_rate": 6.50251256281407e-05, |
|
"loss": 1.7156, |
|
"step": 35300 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 7.83203125, |
|
"learning_rate": 6.492462311557788e-05, |
|
"loss": 1.7136, |
|
"step": 35400 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.390625, |
|
"learning_rate": 6.482412060301508e-05, |
|
"loss": 1.7739, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 6.472361809045227e-05, |
|
"loss": 1.7445, |
|
"step": 35600 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 5.24609375, |
|
"learning_rate": 6.462311557788944e-05, |
|
"loss": 1.7398, |
|
"step": 35700 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 5.81640625, |
|
"learning_rate": 6.452261306532663e-05, |
|
"loss": 1.7367, |
|
"step": 35800 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 9.03125, |
|
"learning_rate": 6.442211055276383e-05, |
|
"loss": 1.7496, |
|
"step": 35900 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 10.4609375, |
|
"learning_rate": 6.4321608040201e-05, |
|
"loss": 1.7613, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 6.48046875, |
|
"learning_rate": 6.422110552763819e-05, |
|
"loss": 1.7279, |
|
"step": 36100 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 6.412060301507538e-05, |
|
"loss": 1.7665, |
|
"step": 36200 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 5.59765625, |
|
"learning_rate": 6.402010050251258e-05, |
|
"loss": 1.7672, |
|
"step": 36300 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.73828125, |
|
"learning_rate": 6.391959798994975e-05, |
|
"loss": 1.7223, |
|
"step": 36400 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 6.381909547738694e-05, |
|
"loss": 1.7493, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 6.0546875, |
|
"learning_rate": 6.371859296482412e-05, |
|
"loss": 1.7542, |
|
"step": 36600 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.54296875, |
|
"learning_rate": 6.361809045226131e-05, |
|
"loss": 1.7434, |
|
"step": 36700 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 3.837890625, |
|
"learning_rate": 6.35175879396985e-05, |
|
"loss": 1.7627, |
|
"step": 36800 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.46484375, |
|
"learning_rate": 6.341708542713568e-05, |
|
"loss": 1.759, |
|
"step": 36900 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.89453125, |
|
"learning_rate": 6.331658291457287e-05, |
|
"loss": 1.7065, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.9921875, |
|
"learning_rate": 6.321608040201005e-05, |
|
"loss": 1.7447, |
|
"step": 37100 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.95703125, |
|
"learning_rate": 6.311557788944724e-05, |
|
"loss": 1.7706, |
|
"step": 37200 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 4.89453125, |
|
"learning_rate": 6.301507537688443e-05, |
|
"loss": 1.8006, |
|
"step": 37300 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.66015625, |
|
"learning_rate": 6.291457286432161e-05, |
|
"loss": 1.7311, |
|
"step": 37400 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 6.28140703517588e-05, |
|
"loss": 1.756, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"eval_loss": 1.722901701927185, |
|
"eval_runtime": 58.6285, |
|
"eval_samples_per_second": 17.057, |
|
"eval_steps_per_second": 4.264, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.828125, |
|
"learning_rate": 6.271356783919599e-05, |
|
"loss": 1.7585, |
|
"step": 37600 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.26953125, |
|
"learning_rate": 6.261306532663317e-05, |
|
"loss": 1.7415, |
|
"step": 37700 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.6640625, |
|
"learning_rate": 6.251256281407035e-05, |
|
"loss": 1.8039, |
|
"step": 37800 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.43359375, |
|
"learning_rate": 6.241206030150753e-05, |
|
"loss": 1.7832, |
|
"step": 37900 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 4.63671875, |
|
"learning_rate": 6.231155778894473e-05, |
|
"loss": 1.7409, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 4.0546875, |
|
"learning_rate": 6.221105527638192e-05, |
|
"loss": 1.7652, |
|
"step": 38100 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 7.16015625, |
|
"learning_rate": 6.211055276381909e-05, |
|
"loss": 1.7609, |
|
"step": 38200 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 6.201005025125628e-05, |
|
"loss": 1.7524, |
|
"step": 38300 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.3359375, |
|
"learning_rate": 6.190954773869348e-05, |
|
"loss": 1.7825, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 4.17578125, |
|
"learning_rate": 6.180904522613065e-05, |
|
"loss": 1.773, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 6.170854271356784e-05, |
|
"loss": 1.7343, |
|
"step": 38600 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 5.09765625, |
|
"learning_rate": 6.160804020100502e-05, |
|
"loss": 1.7402, |
|
"step": 38700 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 5.07421875, |
|
"learning_rate": 6.150753768844222e-05, |
|
"loss": 1.7091, |
|
"step": 38800 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.8671875, |
|
"learning_rate": 6.14070351758794e-05, |
|
"loss": 1.7008, |
|
"step": 38900 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.67578125, |
|
"learning_rate": 6.130653266331658e-05, |
|
"loss": 1.7596, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.1015625, |
|
"learning_rate": 6.120603015075377e-05, |
|
"loss": 1.7664, |
|
"step": 39100 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 6.110552763819096e-05, |
|
"loss": 1.7649, |
|
"step": 39200 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.12890625, |
|
"learning_rate": 6.1005025125628143e-05, |
|
"loss": 1.763, |
|
"step": 39300 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 4.3046875, |
|
"learning_rate": 6.090452261306533e-05, |
|
"loss": 1.734, |
|
"step": 39400 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.650390625, |
|
"learning_rate": 6.080402010050251e-05, |
|
"loss": 1.6931, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 6.070351758793971e-05, |
|
"loss": 1.7677, |
|
"step": 39600 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.58984375, |
|
"learning_rate": 6.060301507537689e-05, |
|
"loss": 1.7173, |
|
"step": 39700 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.9599609375, |
|
"learning_rate": 6.0502512562814076e-05, |
|
"loss": 1.7413, |
|
"step": 39800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 6.78125, |
|
"learning_rate": 6.0402010050251256e-05, |
|
"loss": 1.7572, |
|
"step": 39900 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 7.78125, |
|
"learning_rate": 6.030150753768844e-05, |
|
"loss": 1.7379, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.736067295074463, |
|
"eval_runtime": 58.5709, |
|
"eval_samples_per_second": 17.073, |
|
"eval_steps_per_second": 4.268, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 5.5546875, |
|
"learning_rate": 6.0201005025125635e-05, |
|
"loss": 1.7405, |
|
"step": 40100 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 4.4609375, |
|
"learning_rate": 6.0100502512562815e-05, |
|
"loss": 1.7797, |
|
"step": 40200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 7.5703125, |
|
"learning_rate": 6e-05, |
|
"loss": 1.7335, |
|
"step": 40300 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 6.08203125, |
|
"learning_rate": 5.989949748743718e-05, |
|
"loss": 1.7507, |
|
"step": 40400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 4.37890625, |
|
"learning_rate": 5.979899497487438e-05, |
|
"loss": 1.7076, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 4.1171875, |
|
"learning_rate": 5.969849246231156e-05, |
|
"loss": 1.7574, |
|
"step": 40600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 3.7890625, |
|
"learning_rate": 5.959798994974875e-05, |
|
"loss": 1.75, |
|
"step": 40700 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 5.9609375, |
|
"learning_rate": 5.949748743718593e-05, |
|
"loss": 1.7911, |
|
"step": 40800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 6.52734375, |
|
"learning_rate": 5.939698492462312e-05, |
|
"loss": 1.7292, |
|
"step": 40900 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 4.48828125, |
|
"learning_rate": 5.929648241206031e-05, |
|
"loss": 1.7336, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 5.9195979899497487e-05, |
|
"loss": 1.7322, |
|
"step": 41100 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 5.909547738693467e-05, |
|
"loss": 1.7528, |
|
"step": 41200 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 4.54296875, |
|
"learning_rate": 5.8994974874371866e-05, |
|
"loss": 1.7232, |
|
"step": 41300 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 6.11328125, |
|
"learning_rate": 5.889447236180905e-05, |
|
"loss": 1.6972, |
|
"step": 41400 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 6.6640625, |
|
"learning_rate": 5.879396984924623e-05, |
|
"loss": 1.7163, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 3.345703125, |
|
"learning_rate": 5.869346733668342e-05, |
|
"loss": 1.7457, |
|
"step": 41600 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 3.603515625, |
|
"learning_rate": 5.859296482412061e-05, |
|
"loss": 1.7534, |
|
"step": 41700 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 6.234375, |
|
"learning_rate": 5.849246231155779e-05, |
|
"loss": 1.7467, |
|
"step": 41800 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 4.12109375, |
|
"learning_rate": 5.839195979899498e-05, |
|
"loss": 1.7112, |
|
"step": 41900 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 3.712890625, |
|
"learning_rate": 5.829145728643216e-05, |
|
"loss": 1.7927, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 3.259765625, |
|
"learning_rate": 5.819095477386936e-05, |
|
"loss": 1.7359, |
|
"step": 42100 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 7.0703125, |
|
"learning_rate": 5.809045226130654e-05, |
|
"loss": 1.7463, |
|
"step": 42200 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 5.9140625, |
|
"learning_rate": 5.7989949748743724e-05, |
|
"loss": 1.7415, |
|
"step": 42300 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 6.02734375, |
|
"learning_rate": 5.7889447236180904e-05, |
|
"loss": 1.7429, |
|
"step": 42400 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 3.603515625, |
|
"learning_rate": 5.778894472361809e-05, |
|
"loss": 1.7432, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"eval_loss": 1.719661831855774, |
|
"eval_runtime": 58.5951, |
|
"eval_samples_per_second": 17.066, |
|
"eval_steps_per_second": 4.267, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 3.814453125, |
|
"learning_rate": 5.7688442211055284e-05, |
|
"loss": 1.7588, |
|
"step": 42600 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 3.80859375, |
|
"learning_rate": 5.7587939698492464e-05, |
|
"loss": 1.7605, |
|
"step": 42700 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 4.453125, |
|
"learning_rate": 5.748743718592965e-05, |
|
"loss": 1.733, |
|
"step": 42800 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 5.4765625, |
|
"learning_rate": 5.738693467336683e-05, |
|
"loss": 1.702, |
|
"step": 42900 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 4.71484375, |
|
"learning_rate": 5.728643216080403e-05, |
|
"loss": 1.7601, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 5.718592964824121e-05, |
|
"loss": 1.7724, |
|
"step": 43100 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 5.125, |
|
"learning_rate": 5.7085427135678396e-05, |
|
"loss": 1.7171, |
|
"step": 43200 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 4.109375, |
|
"learning_rate": 5.6984924623115576e-05, |
|
"loss": 1.7536, |
|
"step": 43300 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 5.15234375, |
|
"learning_rate": 5.688442211055277e-05, |
|
"loss": 1.7804, |
|
"step": 43400 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 4.98828125, |
|
"learning_rate": 5.6783919597989955e-05, |
|
"loss": 1.7804, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 4.6484375, |
|
"learning_rate": 5.6683417085427135e-05, |
|
"loss": 1.7586, |
|
"step": 43600 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 6.4140625, |
|
"learning_rate": 5.658291457286432e-05, |
|
"loss": 1.7365, |
|
"step": 43700 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 3.33984375, |
|
"learning_rate": 5.6482412060301515e-05, |
|
"loss": 1.7381, |
|
"step": 43800 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 4.29296875, |
|
"learning_rate": 5.63819095477387e-05, |
|
"loss": 1.7308, |
|
"step": 43900 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 5.703125, |
|
"learning_rate": 5.628140703517588e-05, |
|
"loss": 1.7421, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 5.48828125, |
|
"learning_rate": 5.618090452261307e-05, |
|
"loss": 1.7377, |
|
"step": 44100 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 6.125, |
|
"learning_rate": 5.608040201005026e-05, |
|
"loss": 1.7009, |
|
"step": 44200 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.26953125, |
|
"learning_rate": 5.597989949748744e-05, |
|
"loss": 1.7114, |
|
"step": 44300 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 6.7578125, |
|
"learning_rate": 5.587939698492463e-05, |
|
"loss": 1.7266, |
|
"step": 44400 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.55078125, |
|
"learning_rate": 5.577889447236181e-05, |
|
"loss": 1.7497, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 4.0703125, |
|
"learning_rate": 5.567839195979899e-05, |
|
"loss": 1.7285, |
|
"step": 44600 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 5.0390625, |
|
"learning_rate": 5.5577889447236186e-05, |
|
"loss": 1.7311, |
|
"step": 44700 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 4.8046875, |
|
"learning_rate": 5.547738693467337e-05, |
|
"loss": 1.7544, |
|
"step": 44800 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 5.578125, |
|
"learning_rate": 5.537688442211055e-05, |
|
"loss": 1.706, |
|
"step": 44900 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 3.46875, |
|
"learning_rate": 5.527638190954774e-05, |
|
"loss": 1.6826, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"eval_loss": 1.756588101387024, |
|
"eval_runtime": 58.5831, |
|
"eval_samples_per_second": 17.07, |
|
"eval_steps_per_second": 4.267, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 5.4140625, |
|
"learning_rate": 5.517587939698493e-05, |
|
"loss": 1.7379, |
|
"step": 45100 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 3.04296875, |
|
"learning_rate": 5.507537688442211e-05, |
|
"loss": 1.7533, |
|
"step": 45200 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 4.421875, |
|
"learning_rate": 5.49748743718593e-05, |
|
"loss": 1.7273, |
|
"step": 45300 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 5.487437185929648e-05, |
|
"loss": 1.7534, |
|
"step": 45400 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 5.477386934673368e-05, |
|
"loss": 1.7731, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 5.467336683417086e-05, |
|
"loss": 1.7151, |
|
"step": 45600 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 4.171875, |
|
"learning_rate": 5.4572864321608045e-05, |
|
"loss": 1.7643, |
|
"step": 45700 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 5.21875, |
|
"learning_rate": 5.4472361809045224e-05, |
|
"loss": 1.74, |
|
"step": 45800 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 4.8515625, |
|
"learning_rate": 5.437185929648242e-05, |
|
"loss": 1.7517, |
|
"step": 45900 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 4.109375, |
|
"learning_rate": 5.4271356783919604e-05, |
|
"loss": 1.7648, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 4.51171875, |
|
"learning_rate": 5.4170854271356784e-05, |
|
"loss": 1.7025, |
|
"step": 46100 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 5.63671875, |
|
"learning_rate": 5.407035175879397e-05, |
|
"loss": 1.6961, |
|
"step": 46200 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 3.171875, |
|
"learning_rate": 5.3969849246231163e-05, |
|
"loss": 1.74, |
|
"step": 46300 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 3.279296875, |
|
"learning_rate": 5.386934673366835e-05, |
|
"loss": 1.7582, |
|
"step": 46400 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 4.80859375, |
|
"learning_rate": 5.376884422110553e-05, |
|
"loss": 1.7057, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 2.744140625, |
|
"learning_rate": 5.3668341708542716e-05, |
|
"loss": 1.7378, |
|
"step": 46600 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 4.34765625, |
|
"learning_rate": 5.356783919597991e-05, |
|
"loss": 1.7267, |
|
"step": 46700 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 3.021484375, |
|
"learning_rate": 5.346733668341709e-05, |
|
"loss": 1.7465, |
|
"step": 46800 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 6.91796875, |
|
"learning_rate": 5.3366834170854276e-05, |
|
"loss": 1.7399, |
|
"step": 46900 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 7.6484375, |
|
"learning_rate": 5.3266331658291455e-05, |
|
"loss": 1.7236, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 4.51953125, |
|
"learning_rate": 5.316582914572864e-05, |
|
"loss": 1.7199, |
|
"step": 47100 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 2.994140625, |
|
"learning_rate": 5.3065326633165835e-05, |
|
"loss": 1.7073, |
|
"step": 47200 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 3.33984375, |
|
"learning_rate": 5.296482412060302e-05, |
|
"loss": 1.7313, |
|
"step": 47300 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 3.443359375, |
|
"learning_rate": 5.28643216080402e-05, |
|
"loss": 1.702, |
|
"step": 47400 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 3.958984375, |
|
"learning_rate": 5.276381909547739e-05, |
|
"loss": 1.7091, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"eval_loss": 1.7188657522201538, |
|
"eval_runtime": 58.5581, |
|
"eval_samples_per_second": 17.077, |
|
"eval_steps_per_second": 4.269, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 3.6640625, |
|
"learning_rate": 5.266331658291458e-05, |
|
"loss": 1.7334, |
|
"step": 47600 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 4.73046875, |
|
"learning_rate": 5.256281407035176e-05, |
|
"loss": 1.7369, |
|
"step": 47700 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 6.57421875, |
|
"learning_rate": 5.246231155778895e-05, |
|
"loss": 1.6972, |
|
"step": 47800 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 4.62890625, |
|
"learning_rate": 5.236180904522613e-05, |
|
"loss": 1.75, |
|
"step": 47900 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 3.353515625, |
|
"learning_rate": 5.226130653266332e-05, |
|
"loss": 1.7259, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 5.00390625, |
|
"learning_rate": 5.2160804020100507e-05, |
|
"loss": 1.7427, |
|
"step": 48100 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 3.01953125, |
|
"learning_rate": 5.206030150753769e-05, |
|
"loss": 1.6955, |
|
"step": 48200 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 4.51953125, |
|
"learning_rate": 5.195979899497487e-05, |
|
"loss": 1.7535, |
|
"step": 48300 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 4.23046875, |
|
"learning_rate": 5.1859296482412066e-05, |
|
"loss": 1.7482, |
|
"step": 48400 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 5.175879396984925e-05, |
|
"loss": 1.6747, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 5.165829145728643e-05, |
|
"loss": 1.6639, |
|
"step": 48600 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 3.064453125, |
|
"learning_rate": 5.155778894472362e-05, |
|
"loss": 1.7246, |
|
"step": 48700 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 5.145728643216081e-05, |
|
"loss": 1.7406, |
|
"step": 48800 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 3.166015625, |
|
"learning_rate": 5.135678391959799e-05, |
|
"loss": 1.7114, |
|
"step": 48900 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 5.05859375, |
|
"learning_rate": 5.125628140703518e-05, |
|
"loss": 1.7422, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 4.33984375, |
|
"learning_rate": 5.1155778894472365e-05, |
|
"loss": 1.7329, |
|
"step": 49100 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 6.41796875, |
|
"learning_rate": 5.1055276381909544e-05, |
|
"loss": 1.7284, |
|
"step": 49200 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 5.095477386934674e-05, |
|
"loss": 1.6935, |
|
"step": 49300 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 5.109375, |
|
"learning_rate": 5.0854271356783924e-05, |
|
"loss": 1.7171, |
|
"step": 49400 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 4.97265625, |
|
"learning_rate": 5.0753768844221104e-05, |
|
"loss": 1.7192, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 4.36328125, |
|
"learning_rate": 5.065326633165829e-05, |
|
"loss": 1.775, |
|
"step": 49600 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 5.0552763819095484e-05, |
|
"loss": 1.7524, |
|
"step": 49700 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 3.193359375, |
|
"learning_rate": 5.045226130653266e-05, |
|
"loss": 1.7293, |
|
"step": 49800 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 3.62890625, |
|
"learning_rate": 5.035175879396985e-05, |
|
"loss": 1.7557, |
|
"step": 49900 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 5.51171875, |
|
"learning_rate": 5.0251256281407036e-05, |
|
"loss": 1.7435, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_loss": 1.704933762550354, |
|
"eval_runtime": 58.601, |
|
"eval_samples_per_second": 17.065, |
|
"eval_steps_per_second": 4.266, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 5.359375, |
|
"learning_rate": 5.015075376884423e-05, |
|
"loss": 1.7273, |
|
"step": 50100 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 4.5390625, |
|
"learning_rate": 5.005025125628141e-05, |
|
"loss": 1.724, |
|
"step": 50200 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 3.623046875, |
|
"learning_rate": 4.9949748743718596e-05, |
|
"loss": 1.7173, |
|
"step": 50300 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 2.8046875, |
|
"learning_rate": 4.984924623115578e-05, |
|
"loss": 1.6995, |
|
"step": 50400 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 6.1796875, |
|
"learning_rate": 4.974874371859297e-05, |
|
"loss": 1.7042, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 5.25390625, |
|
"learning_rate": 4.9648241206030155e-05, |
|
"loss": 1.7254, |
|
"step": 50600 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 3.73828125, |
|
"learning_rate": 4.954773869346734e-05, |
|
"loss": 1.7108, |
|
"step": 50700 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 4.36328125, |
|
"learning_rate": 4.944723618090453e-05, |
|
"loss": 1.7461, |
|
"step": 50800 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 4.19140625, |
|
"learning_rate": 4.934673366834171e-05, |
|
"loss": 1.7104, |
|
"step": 50900 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 5.2890625, |
|
"learning_rate": 4.92462311557789e-05, |
|
"loss": 1.7259, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 5.20703125, |
|
"learning_rate": 4.914572864321608e-05, |
|
"loss": 1.7518, |
|
"step": 51100 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 3.57421875, |
|
"learning_rate": 4.9045226130653274e-05, |
|
"loss": 1.7187, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 4.90234375, |
|
"learning_rate": 4.8944723618090454e-05, |
|
"loss": 1.7393, |
|
"step": 51300 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 3.529296875, |
|
"learning_rate": 4.884422110552764e-05, |
|
"loss": 1.7273, |
|
"step": 51400 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 3.962890625, |
|
"learning_rate": 4.874371859296483e-05, |
|
"loss": 1.6996, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 4.08984375, |
|
"learning_rate": 4.864321608040201e-05, |
|
"loss": 1.7031, |
|
"step": 51600 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 4.13671875, |
|
"learning_rate": 4.85427135678392e-05, |
|
"loss": 1.7343, |
|
"step": 51700 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 4.77734375, |
|
"learning_rate": 4.844221105527638e-05, |
|
"loss": 1.7065, |
|
"step": 51800 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 4.59765625, |
|
"learning_rate": 4.834170854271357e-05, |
|
"loss": 1.6781, |
|
"step": 51900 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 4.7890625, |
|
"learning_rate": 4.824120603015075e-05, |
|
"loss": 1.7131, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 6.28515625, |
|
"learning_rate": 4.8140703517587946e-05, |
|
"loss": 1.6985, |
|
"step": 52100 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 4.33203125, |
|
"learning_rate": 4.8040201005025125e-05, |
|
"loss": 1.7307, |
|
"step": 52200 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 4.01171875, |
|
"learning_rate": 4.793969849246231e-05, |
|
"loss": 1.7056, |
|
"step": 52300 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 4.61328125, |
|
"learning_rate": 4.78391959798995e-05, |
|
"loss": 1.7456, |
|
"step": 52400 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 4.2890625, |
|
"learning_rate": 4.7738693467336685e-05, |
|
"loss": 1.7229, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"eval_loss": 1.7370753288269043, |
|
"eval_runtime": 58.6544, |
|
"eval_samples_per_second": 17.049, |
|
"eval_steps_per_second": 4.262, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 4.54296875, |
|
"learning_rate": 4.763819095477387e-05, |
|
"loss": 1.7175, |
|
"step": 52600 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 4.32421875, |
|
"learning_rate": 4.753768844221106e-05, |
|
"loss": 1.7241, |
|
"step": 52700 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 4.32421875, |
|
"learning_rate": 4.7437185929648244e-05, |
|
"loss": 1.6976, |
|
"step": 52800 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 3.71875, |
|
"learning_rate": 4.733668341708543e-05, |
|
"loss": 1.7485, |
|
"step": 52900 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 5.2578125, |
|
"learning_rate": 4.723618090452262e-05, |
|
"loss": 1.7102, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 2.486328125, |
|
"learning_rate": 4.7135678391959804e-05, |
|
"loss": 1.7248, |
|
"step": 53100 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 6.1171875, |
|
"learning_rate": 4.703517587939698e-05, |
|
"loss": 1.7252, |
|
"step": 53200 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 5.68359375, |
|
"learning_rate": 4.6934673366834177e-05, |
|
"loss": 1.7336, |
|
"step": 53300 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 5.1875, |
|
"learning_rate": 4.6834170854271356e-05, |
|
"loss": 1.7501, |
|
"step": 53400 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 3.189453125, |
|
"learning_rate": 4.673366834170855e-05, |
|
"loss": 1.7592, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 4.95703125, |
|
"learning_rate": 4.663316582914573e-05, |
|
"loss": 1.688, |
|
"step": 53600 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 7.1171875, |
|
"learning_rate": 4.653266331658292e-05, |
|
"loss": 1.755, |
|
"step": 53700 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 4.6640625, |
|
"learning_rate": 4.64321608040201e-05, |
|
"loss": 1.7362, |
|
"step": 53800 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 3.55078125, |
|
"learning_rate": 4.633165829145729e-05, |
|
"loss": 1.7053, |
|
"step": 53900 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 6.41015625, |
|
"learning_rate": 4.6231155778894475e-05, |
|
"loss": 1.7132, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 4.18359375, |
|
"learning_rate": 4.6130653266331655e-05, |
|
"loss": 1.7621, |
|
"step": 54100 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 2.72265625, |
|
"learning_rate": 4.603015075376885e-05, |
|
"loss": 1.7436, |
|
"step": 54200 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 5.37109375, |
|
"learning_rate": 4.592964824120603e-05, |
|
"loss": 1.7017, |
|
"step": 54300 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 4.66796875, |
|
"learning_rate": 4.582914572864322e-05, |
|
"loss": 1.7306, |
|
"step": 54400 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 4.34765625, |
|
"learning_rate": 4.57286432160804e-05, |
|
"loss": 1.7237, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 9.7109375, |
|
"learning_rate": 4.5628140703517594e-05, |
|
"loss": 1.7009, |
|
"step": 54600 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 3.259765625, |
|
"learning_rate": 4.5527638190954774e-05, |
|
"loss": 1.7532, |
|
"step": 54700 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 4.542713567839196e-05, |
|
"loss": 1.7429, |
|
"step": 54800 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 4.68359375, |
|
"learning_rate": 4.532663316582915e-05, |
|
"loss": 1.7798, |
|
"step": 54900 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 5.5546875, |
|
"learning_rate": 4.522613065326633e-05, |
|
"loss": 1.7245, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"eval_loss": 1.7362700700759888, |
|
"eval_runtime": 58.6321, |
|
"eval_samples_per_second": 17.055, |
|
"eval_steps_per_second": 4.264, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 3.94140625, |
|
"learning_rate": 4.512562814070352e-05, |
|
"loss": 1.7203, |
|
"step": 55100 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 4.69140625, |
|
"learning_rate": 4.5025125628140706e-05, |
|
"loss": 1.7484, |
|
"step": 55200 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 4.10546875, |
|
"learning_rate": 4.492462311557789e-05, |
|
"loss": 1.7313, |
|
"step": 55300 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 3.724609375, |
|
"learning_rate": 4.482412060301508e-05, |
|
"loss": 1.7061, |
|
"step": 55400 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 3.71484375, |
|
"learning_rate": 4.4723618090452266e-05, |
|
"loss": 1.7461, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 3.076171875, |
|
"learning_rate": 4.462311557788945e-05, |
|
"loss": 1.7684, |
|
"step": 55600 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 4.34765625, |
|
"learning_rate": 4.452261306532663e-05, |
|
"loss": 1.7063, |
|
"step": 55700 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 3.41796875, |
|
"learning_rate": 4.4422110552763825e-05, |
|
"loss": 1.7364, |
|
"step": 55800 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 5.27734375, |
|
"learning_rate": 4.4321608040201005e-05, |
|
"loss": 1.7168, |
|
"step": 55900 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 3.84375, |
|
"learning_rate": 4.42211055276382e-05, |
|
"loss": 1.7301, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 6.27734375, |
|
"learning_rate": 4.412060301507538e-05, |
|
"loss": 1.7066, |
|
"step": 56100 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 3.95703125, |
|
"learning_rate": 4.4020100502512564e-05, |
|
"loss": 1.7104, |
|
"step": 56200 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 4.391959798994975e-05, |
|
"loss": 1.7539, |
|
"step": 56300 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 5.83203125, |
|
"learning_rate": 4.381909547738694e-05, |
|
"loss": 1.7399, |
|
"step": 56400 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 4.64453125, |
|
"learning_rate": 4.3718592964824124e-05, |
|
"loss": 1.7089, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 3.45703125, |
|
"learning_rate": 4.3618090452261303e-05, |
|
"loss": 1.7053, |
|
"step": 56600 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 4.40234375, |
|
"learning_rate": 4.35175879396985e-05, |
|
"loss": 1.7312, |
|
"step": 56700 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 2.7578125, |
|
"learning_rate": 4.3417085427135676e-05, |
|
"loss": 1.7082, |
|
"step": 56800 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 4.61328125, |
|
"learning_rate": 4.331658291457287e-05, |
|
"loss": 1.745, |
|
"step": 56900 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 4.109375, |
|
"learning_rate": 4.321608040201005e-05, |
|
"loss": 1.7146, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 4.046875, |
|
"learning_rate": 4.311557788944724e-05, |
|
"loss": 1.7337, |
|
"step": 57100 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 4.78515625, |
|
"learning_rate": 4.301507537688442e-05, |
|
"loss": 1.7125, |
|
"step": 57200 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 4.23046875, |
|
"learning_rate": 4.291457286432161e-05, |
|
"loss": 1.7671, |
|
"step": 57300 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 4.8984375, |
|
"learning_rate": 4.2814070351758795e-05, |
|
"loss": 1.7294, |
|
"step": 57400 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 3.240234375, |
|
"learning_rate": 4.271356783919598e-05, |
|
"loss": 1.7164, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"eval_loss": 1.744708776473999, |
|
"eval_runtime": 58.6198, |
|
"eval_samples_per_second": 17.059, |
|
"eval_steps_per_second": 4.265, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 4.93359375, |
|
"learning_rate": 4.261306532663317e-05, |
|
"loss": 1.7333, |
|
"step": 57600 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 4.6796875, |
|
"learning_rate": 4.2512562814070355e-05, |
|
"loss": 1.7108, |
|
"step": 57700 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 3.76171875, |
|
"learning_rate": 4.241206030150754e-05, |
|
"loss": 1.7145, |
|
"step": 57800 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 6.94921875, |
|
"learning_rate": 4.231155778894473e-05, |
|
"loss": 1.6886, |
|
"step": 57900 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 4.87109375, |
|
"learning_rate": 4.2211055276381914e-05, |
|
"loss": 1.7467, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 4.41015625, |
|
"learning_rate": 4.21105527638191e-05, |
|
"loss": 1.7528, |
|
"step": 58100 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 4.41796875, |
|
"learning_rate": 4.201005025125628e-05, |
|
"loss": 1.7404, |
|
"step": 58200 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 7.68359375, |
|
"learning_rate": 4.1909547738693474e-05, |
|
"loss": 1.7604, |
|
"step": 58300 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 3.150390625, |
|
"learning_rate": 4.180904522613065e-05, |
|
"loss": 1.7439, |
|
"step": 58400 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 4.08984375, |
|
"learning_rate": 4.170854271356784e-05, |
|
"loss": 1.7652, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 3.55859375, |
|
"learning_rate": 4.1608040201005026e-05, |
|
"loss": 1.7625, |
|
"step": 58600 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 3.861328125, |
|
"learning_rate": 4.150753768844221e-05, |
|
"loss": 1.7361, |
|
"step": 58700 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 3.23828125, |
|
"learning_rate": 4.14070351758794e-05, |
|
"loss": 1.7801, |
|
"step": 58800 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 4.08984375, |
|
"learning_rate": 4.1306532663316586e-05, |
|
"loss": 1.7234, |
|
"step": 58900 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 3.767578125, |
|
"learning_rate": 4.120603015075377e-05, |
|
"loss": 1.7603, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 5.3359375, |
|
"learning_rate": 4.110552763819095e-05, |
|
"loss": 1.7808, |
|
"step": 59100 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 5.85546875, |
|
"learning_rate": 4.1005025125628145e-05, |
|
"loss": 1.7813, |
|
"step": 59200 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 3.25, |
|
"learning_rate": 4.0904522613065325e-05, |
|
"loss": 1.7306, |
|
"step": 59300 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 4.1171875, |
|
"learning_rate": 4.080402010050252e-05, |
|
"loss": 1.7836, |
|
"step": 59400 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 3.69921875, |
|
"learning_rate": 4.07035175879397e-05, |
|
"loss": 1.7855, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 4.7109375, |
|
"learning_rate": 4.060301507537689e-05, |
|
"loss": 1.7847, |
|
"step": 59600 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 3.251953125, |
|
"learning_rate": 4.050251256281407e-05, |
|
"loss": 1.8871, |
|
"step": 59700 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 4.44140625, |
|
"learning_rate": 4.040201005025126e-05, |
|
"loss": 1.8368, |
|
"step": 59800 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 3.55078125, |
|
"learning_rate": 4.0301507537688444e-05, |
|
"loss": 1.8294, |
|
"step": 59900 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 3.552734375, |
|
"learning_rate": 4.020100502512563e-05, |
|
"loss": 1.7986, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"eval_loss": 1.7843643426895142, |
|
"eval_runtime": 58.5367, |
|
"eval_samples_per_second": 17.083, |
|
"eval_steps_per_second": 4.271, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 4.546875, |
|
"learning_rate": 4.010050251256282e-05, |
|
"loss": 1.7845, |
|
"step": 60100 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 4.25, |
|
"learning_rate": 4e-05, |
|
"loss": 1.834, |
|
"step": 60200 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 8.1953125, |
|
"learning_rate": 3.989949748743719e-05, |
|
"loss": 1.808, |
|
"step": 60300 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 3.494140625, |
|
"learning_rate": 3.9798994974874376e-05, |
|
"loss": 1.8805, |
|
"step": 60400 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 4.015625, |
|
"learning_rate": 3.969849246231156e-05, |
|
"loss": 1.8606, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 6.55859375, |
|
"learning_rate": 3.959798994974875e-05, |
|
"loss": 1.7817, |
|
"step": 60600 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 5.57421875, |
|
"learning_rate": 3.949748743718593e-05, |
|
"loss": 1.8351, |
|
"step": 60700 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 3.21484375, |
|
"learning_rate": 3.9396984924623115e-05, |
|
"loss": 1.8236, |
|
"step": 60800 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 2.5859375, |
|
"learning_rate": 3.92964824120603e-05, |
|
"loss": 1.8198, |
|
"step": 60900 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 3.919597989949749e-05, |
|
"loss": 1.8035, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 2.71484375, |
|
"learning_rate": 3.9095477386934675e-05, |
|
"loss": 1.8054, |
|
"step": 61100 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 13.3828125, |
|
"learning_rate": 3.899497487437186e-05, |
|
"loss": 1.8435, |
|
"step": 61200 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 3.889447236180905e-05, |
|
"loss": 1.8414, |
|
"step": 61300 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 3.046875, |
|
"learning_rate": 3.8793969849246234e-05, |
|
"loss": 1.8838, |
|
"step": 61400 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 3.85546875, |
|
"learning_rate": 3.869346733668342e-05, |
|
"loss": 1.8571, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 5.16015625, |
|
"learning_rate": 3.85929648241206e-05, |
|
"loss": 1.8139, |
|
"step": 61600 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 3.8492462311557794e-05, |
|
"loss": 1.8639, |
|
"step": 61700 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 2.37890625, |
|
"learning_rate": 3.8391959798994973e-05, |
|
"loss": 1.7987, |
|
"step": 61800 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 4.12890625, |
|
"learning_rate": 3.829145728643217e-05, |
|
"loss": 1.8564, |
|
"step": 61900 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 3.77734375, |
|
"learning_rate": 3.8190954773869346e-05, |
|
"loss": 1.8095, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 5.171875, |
|
"learning_rate": 3.809045226130653e-05, |
|
"loss": 1.8027, |
|
"step": 62100 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 4.16796875, |
|
"learning_rate": 3.798994974874372e-05, |
|
"loss": 1.8267, |
|
"step": 62200 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 5.40234375, |
|
"learning_rate": 3.7889447236180906e-05, |
|
"loss": 1.8248, |
|
"step": 62300 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 3.44140625, |
|
"learning_rate": 3.778894472361809e-05, |
|
"loss": 1.8484, |
|
"step": 62400 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 3.912109375, |
|
"learning_rate": 3.768844221105528e-05, |
|
"loss": 1.8161, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"eval_loss": 1.8444896936416626, |
|
"eval_runtime": 58.636, |
|
"eval_samples_per_second": 17.054, |
|
"eval_steps_per_second": 4.264, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 3.50390625, |
|
"learning_rate": 3.7587939698492465e-05, |
|
"loss": 1.7952, |
|
"step": 62600 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 4.15234375, |
|
"learning_rate": 3.748743718592965e-05, |
|
"loss": 1.805, |
|
"step": 62700 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 4.1015625, |
|
"learning_rate": 3.738693467336684e-05, |
|
"loss": 1.8244, |
|
"step": 62800 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 4.23828125, |
|
"learning_rate": 3.7286432160804025e-05, |
|
"loss": 1.8567, |
|
"step": 62900 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 4.33203125, |
|
"learning_rate": 3.7185929648241204e-05, |
|
"loss": 1.8527, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 3.052734375, |
|
"learning_rate": 3.708542713567839e-05, |
|
"loss": 1.806, |
|
"step": 63100 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 4.98046875, |
|
"learning_rate": 3.698492462311558e-05, |
|
"loss": 1.8273, |
|
"step": 63200 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 4.546875, |
|
"learning_rate": 3.6884422110552764e-05, |
|
"loss": 1.8037, |
|
"step": 63300 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 4.125, |
|
"learning_rate": 3.678391959798995e-05, |
|
"loss": 1.8474, |
|
"step": 63400 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 2.88671875, |
|
"learning_rate": 3.668341708542714e-05, |
|
"loss": 1.7854, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 4.6015625, |
|
"learning_rate": 3.658291457286432e-05, |
|
"loss": 1.8236, |
|
"step": 63600 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 5.2421875, |
|
"learning_rate": 3.648241206030151e-05, |
|
"loss": 1.8051, |
|
"step": 63700 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 3.6381909547738696e-05, |
|
"loss": 1.8332, |
|
"step": 63800 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 4.1484375, |
|
"learning_rate": 3.628140703517588e-05, |
|
"loss": 1.8186, |
|
"step": 63900 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 6.1953125, |
|
"learning_rate": 3.618090452261307e-05, |
|
"loss": 1.8325, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 4.15234375, |
|
"learning_rate": 3.608040201005025e-05, |
|
"loss": 1.8177, |
|
"step": 64100 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 39.5, |
|
"learning_rate": 3.597989949748744e-05, |
|
"loss": 1.8247, |
|
"step": 64200 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 4.69921875, |
|
"learning_rate": 3.587939698492462e-05, |
|
"loss": 1.8047, |
|
"step": 64300 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 2.970703125, |
|
"learning_rate": 3.5778894472361815e-05, |
|
"loss": 1.7688, |
|
"step": 64400 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 4.10546875, |
|
"learning_rate": 3.5678391959798995e-05, |
|
"loss": 1.771, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 5.24609375, |
|
"learning_rate": 3.557788944723618e-05, |
|
"loss": 1.7495, |
|
"step": 64600 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 3.18359375, |
|
"learning_rate": 3.547738693467337e-05, |
|
"loss": 1.8133, |
|
"step": 64700 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 3.455078125, |
|
"learning_rate": 3.5376884422110554e-05, |
|
"loss": 1.8051, |
|
"step": 64800 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 4.05859375, |
|
"learning_rate": 3.527638190954774e-05, |
|
"loss": 1.8147, |
|
"step": 64900 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 5.01171875, |
|
"learning_rate": 3.517587939698493e-05, |
|
"loss": 1.8228, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"eval_loss": 1.8052270412445068, |
|
"eval_runtime": 58.6557, |
|
"eval_samples_per_second": 17.049, |
|
"eval_steps_per_second": 4.262, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 4.40234375, |
|
"learning_rate": 3.5075376884422114e-05, |
|
"loss": 1.7692, |
|
"step": 65100 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 4.375, |
|
"learning_rate": 3.49748743718593e-05, |
|
"loss": 1.7714, |
|
"step": 65200 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 1.9951171875, |
|
"learning_rate": 3.487437185929649e-05, |
|
"loss": 1.8194, |
|
"step": 65300 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 3.4773869346733667e-05, |
|
"loss": 1.812, |
|
"step": 65400 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 4.09765625, |
|
"learning_rate": 3.467336683417085e-05, |
|
"loss": 1.829, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 3.533203125, |
|
"learning_rate": 3.457286432160804e-05, |
|
"loss": 1.7845, |
|
"step": 65600 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 3.3515625, |
|
"learning_rate": 3.4472361809045226e-05, |
|
"loss": 1.7663, |
|
"step": 65700 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 3.55859375, |
|
"learning_rate": 3.437185929648241e-05, |
|
"loss": 1.7931, |
|
"step": 65800 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 6.09765625, |
|
"learning_rate": 3.42713567839196e-05, |
|
"loss": 1.801, |
|
"step": 65900 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 3.4170854271356785e-05, |
|
"loss": 1.7626, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 2.462890625, |
|
"learning_rate": 3.407035175879397e-05, |
|
"loss": 1.7733, |
|
"step": 66100 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 9.2265625, |
|
"learning_rate": 3.396984924623116e-05, |
|
"loss": 1.8519, |
|
"step": 66200 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 3.08203125, |
|
"learning_rate": 3.3869346733668345e-05, |
|
"loss": 1.7851, |
|
"step": 66300 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 2.349609375, |
|
"learning_rate": 3.3768844221105525e-05, |
|
"loss": 1.7981, |
|
"step": 66400 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 3.23046875, |
|
"learning_rate": 3.366834170854272e-05, |
|
"loss": 1.7946, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 10.140625, |
|
"learning_rate": 3.35678391959799e-05, |
|
"loss": 1.7959, |
|
"step": 66600 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 12.2578125, |
|
"learning_rate": 3.346733668341709e-05, |
|
"loss": 1.8109, |
|
"step": 66700 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 4.19921875, |
|
"learning_rate": 3.336683417085427e-05, |
|
"loss": 1.7744, |
|
"step": 66800 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 3.578125, |
|
"learning_rate": 3.3266331658291464e-05, |
|
"loss": 1.7847, |
|
"step": 66900 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 3.3165829145728643e-05, |
|
"loss": 1.7742, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 3.552734375, |
|
"learning_rate": 3.306532663316583e-05, |
|
"loss": 1.7806, |
|
"step": 67100 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 2.986328125, |
|
"learning_rate": 3.2964824120603016e-05, |
|
"loss": 1.7669, |
|
"step": 67200 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 4.4609375, |
|
"learning_rate": 3.28643216080402e-05, |
|
"loss": 1.7828, |
|
"step": 67300 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 3.216796875, |
|
"learning_rate": 3.276381909547739e-05, |
|
"loss": 1.7593, |
|
"step": 67400 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 4.8203125, |
|
"learning_rate": 3.2663316582914576e-05, |
|
"loss": 1.785, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"eval_loss": 1.798654317855835, |
|
"eval_runtime": 58.6944, |
|
"eval_samples_per_second": 17.037, |
|
"eval_steps_per_second": 4.259, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 3.376953125, |
|
"learning_rate": 3.256281407035176e-05, |
|
"loss": 1.7751, |
|
"step": 67600 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 7.28125, |
|
"learning_rate": 3.246231155778894e-05, |
|
"loss": 1.7637, |
|
"step": 67700 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 5.66796875, |
|
"learning_rate": 3.2361809045226135e-05, |
|
"loss": 1.7736, |
|
"step": 67800 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 2.07421875, |
|
"learning_rate": 3.2261306532663315e-05, |
|
"loss": 1.8005, |
|
"step": 67900 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 3.630859375, |
|
"learning_rate": 3.21608040201005e-05, |
|
"loss": 1.7644, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 3.224609375, |
|
"learning_rate": 3.206030150753769e-05, |
|
"loss": 1.8075, |
|
"step": 68100 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 3.1959798994974875e-05, |
|
"loss": 1.7822, |
|
"step": 68200 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 4.23828125, |
|
"learning_rate": 3.185929648241206e-05, |
|
"loss": 1.765, |
|
"step": 68300 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 3.955078125, |
|
"learning_rate": 3.175879396984925e-05, |
|
"loss": 1.7986, |
|
"step": 68400 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 3.1658291457286434e-05, |
|
"loss": 1.805, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 2.892578125, |
|
"learning_rate": 3.155778894472362e-05, |
|
"loss": 1.7655, |
|
"step": 68600 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 3.8828125, |
|
"learning_rate": 3.145728643216081e-05, |
|
"loss": 1.771, |
|
"step": 68700 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 3.21875, |
|
"learning_rate": 3.1356783919597993e-05, |
|
"loss": 1.77, |
|
"step": 68800 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 6.01953125, |
|
"learning_rate": 3.125628140703517e-05, |
|
"loss": 1.7429, |
|
"step": 68900 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 2.5234375, |
|
"learning_rate": 3.1155778894472366e-05, |
|
"loss": 1.7105, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 2.236328125, |
|
"learning_rate": 3.1055276381909546e-05, |
|
"loss": 1.7741, |
|
"step": 69100 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 3.45703125, |
|
"learning_rate": 3.095477386934674e-05, |
|
"loss": 1.75, |
|
"step": 69200 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 3.085427135678392e-05, |
|
"loss": 1.7527, |
|
"step": 69300 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 4.078125, |
|
"learning_rate": 3.075376884422111e-05, |
|
"loss": 1.7835, |
|
"step": 69400 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 2.712890625, |
|
"learning_rate": 3.065326633165829e-05, |
|
"loss": 1.7344, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 3.439453125, |
|
"learning_rate": 3.055276381909548e-05, |
|
"loss": 1.7528, |
|
"step": 69600 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 3.80859375, |
|
"learning_rate": 3.0452261306532665e-05, |
|
"loss": 1.7805, |
|
"step": 69700 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 2.302734375, |
|
"learning_rate": 3.0351758793969855e-05, |
|
"loss": 1.7013, |
|
"step": 69800 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 3.392578125, |
|
"learning_rate": 3.0251256281407038e-05, |
|
"loss": 1.7769, |
|
"step": 69900 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 2.888671875, |
|
"learning_rate": 3.015075376884422e-05, |
|
"loss": 1.7511, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"eval_loss": 1.7408490180969238, |
|
"eval_runtime": 58.481, |
|
"eval_samples_per_second": 17.1, |
|
"eval_steps_per_second": 4.275, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 3.61328125, |
|
"learning_rate": 3.0050251256281408e-05, |
|
"loss": 1.7759, |
|
"step": 70100 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 3.072265625, |
|
"learning_rate": 2.994974874371859e-05, |
|
"loss": 1.7692, |
|
"step": 70200 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 2.580078125, |
|
"learning_rate": 2.984924623115578e-05, |
|
"loss": 1.7994, |
|
"step": 70300 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 3.49609375, |
|
"learning_rate": 2.9748743718592964e-05, |
|
"loss": 1.7955, |
|
"step": 70400 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 3.6328125, |
|
"learning_rate": 2.9648241206030153e-05, |
|
"loss": 1.7496, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 4.4765625, |
|
"learning_rate": 2.9547738693467337e-05, |
|
"loss": 1.7756, |
|
"step": 70600 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 4.19140625, |
|
"learning_rate": 2.9447236180904526e-05, |
|
"loss": 1.7691, |
|
"step": 70700 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 3.50390625, |
|
"learning_rate": 2.934673366834171e-05, |
|
"loss": 1.7742, |
|
"step": 70800 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 4.01171875, |
|
"learning_rate": 2.9246231155778896e-05, |
|
"loss": 1.7427, |
|
"step": 70900 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 2.560546875, |
|
"learning_rate": 2.914572864321608e-05, |
|
"loss": 1.7656, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 3.021484375, |
|
"learning_rate": 2.904522613065327e-05, |
|
"loss": 1.7899, |
|
"step": 71100 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 2.8944723618090452e-05, |
|
"loss": 1.8032, |
|
"step": 71200 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 3.83203125, |
|
"learning_rate": 2.8844221105527642e-05, |
|
"loss": 1.7558, |
|
"step": 71300 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 2.56640625, |
|
"learning_rate": 2.8743718592964825e-05, |
|
"loss": 1.7589, |
|
"step": 71400 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 3.1171875, |
|
"learning_rate": 2.8643216080402015e-05, |
|
"loss": 1.7738, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 2.8542713567839198e-05, |
|
"loss": 1.7832, |
|
"step": 71600 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 2.396484375, |
|
"learning_rate": 2.8442211055276384e-05, |
|
"loss": 1.7336, |
|
"step": 71700 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 3.443359375, |
|
"learning_rate": 2.8341708542713568e-05, |
|
"loss": 1.7822, |
|
"step": 71800 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 3.5, |
|
"learning_rate": 2.8241206030150757e-05, |
|
"loss": 1.7582, |
|
"step": 71900 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 2.814070351758794e-05, |
|
"loss": 1.7894, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 2.861328125, |
|
"learning_rate": 2.804020100502513e-05, |
|
"loss": 1.7813, |
|
"step": 72100 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 2.408203125, |
|
"learning_rate": 2.7939698492462314e-05, |
|
"loss": 1.7854, |
|
"step": 72200 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 4.61328125, |
|
"learning_rate": 2.7839195979899497e-05, |
|
"loss": 1.7625, |
|
"step": 72300 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 3.767578125, |
|
"learning_rate": 2.7738693467336686e-05, |
|
"loss": 1.7482, |
|
"step": 72400 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 3.130859375, |
|
"learning_rate": 2.763819095477387e-05, |
|
"loss": 1.76, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_loss": 1.7778958082199097, |
|
"eval_runtime": 58.6703, |
|
"eval_samples_per_second": 17.044, |
|
"eval_steps_per_second": 4.261, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 4.0859375, |
|
"learning_rate": 2.7537688442211056e-05, |
|
"loss": 1.7671, |
|
"step": 72600 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 2.623046875, |
|
"learning_rate": 2.743718592964824e-05, |
|
"loss": 1.7347, |
|
"step": 72700 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 2.919921875, |
|
"learning_rate": 2.733668341708543e-05, |
|
"loss": 1.7786, |
|
"step": 72800 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 3.025390625, |
|
"learning_rate": 2.7236180904522612e-05, |
|
"loss": 1.7694, |
|
"step": 72900 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 3.00390625, |
|
"learning_rate": 2.7135678391959802e-05, |
|
"loss": 1.8067, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 2.626953125, |
|
"learning_rate": 2.7035175879396985e-05, |
|
"loss": 1.7762, |
|
"step": 73100 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 2.353515625, |
|
"learning_rate": 2.6934673366834175e-05, |
|
"loss": 1.7673, |
|
"step": 73200 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 4.03125, |
|
"learning_rate": 2.6834170854271358e-05, |
|
"loss": 1.7847, |
|
"step": 73300 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 3.240234375, |
|
"learning_rate": 2.6733668341708545e-05, |
|
"loss": 1.7478, |
|
"step": 73400 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 3.228515625, |
|
"learning_rate": 2.6633165829145728e-05, |
|
"loss": 1.7572, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 5.2109375, |
|
"learning_rate": 2.6532663316582917e-05, |
|
"loss": 1.7356, |
|
"step": 73600 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 3.765625, |
|
"learning_rate": 2.64321608040201e-05, |
|
"loss": 1.7722, |
|
"step": 73700 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 4.58203125, |
|
"learning_rate": 2.633165829145729e-05, |
|
"loss": 1.7666, |
|
"step": 73800 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 3.3671875, |
|
"learning_rate": 2.6231155778894474e-05, |
|
"loss": 1.7557, |
|
"step": 73900 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 2.556640625, |
|
"learning_rate": 2.613065326633166e-05, |
|
"loss": 1.7772, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 3.041015625, |
|
"learning_rate": 2.6030150753768847e-05, |
|
"loss": 1.7553, |
|
"step": 74100 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 4.828125, |
|
"learning_rate": 2.5929648241206033e-05, |
|
"loss": 1.7358, |
|
"step": 74200 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 2.876953125, |
|
"learning_rate": 2.5829145728643216e-05, |
|
"loss": 1.7517, |
|
"step": 74300 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 4.09765625, |
|
"learning_rate": 2.5728643216080406e-05, |
|
"loss": 1.7538, |
|
"step": 74400 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 2.759765625, |
|
"learning_rate": 2.562814070351759e-05, |
|
"loss": 1.7389, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 2.751953125, |
|
"learning_rate": 2.5527638190954772e-05, |
|
"loss": 1.7384, |
|
"step": 74600 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 3.3203125, |
|
"learning_rate": 2.5427135678391962e-05, |
|
"loss": 1.7577, |
|
"step": 74700 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 3.078125, |
|
"learning_rate": 2.5326633165829145e-05, |
|
"loss": 1.7384, |
|
"step": 74800 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 2.27734375, |
|
"learning_rate": 2.522613065326633e-05, |
|
"loss": 1.722, |
|
"step": 74900 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 5.04296875, |
|
"learning_rate": 2.5125628140703518e-05, |
|
"loss": 1.749, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"eval_loss": 1.7936066389083862, |
|
"eval_runtime": 58.6259, |
|
"eval_samples_per_second": 17.057, |
|
"eval_steps_per_second": 4.264, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 5.125, |
|
"learning_rate": 2.5025125628140705e-05, |
|
"loss": 1.7382, |
|
"step": 75100 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 2.492462311557789e-05, |
|
"loss": 1.7397, |
|
"step": 75200 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 2.73828125, |
|
"learning_rate": 2.4824120603015078e-05, |
|
"loss": 1.7444, |
|
"step": 75300 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 4.54296875, |
|
"learning_rate": 2.4723618090452264e-05, |
|
"loss": 1.7634, |
|
"step": 75400 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.462311557788945e-05, |
|
"loss": 1.8123, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.4522613065326637e-05, |
|
"loss": 0.0, |
|
"step": 75600 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.442211055276382e-05, |
|
"loss": 0.0, |
|
"step": 75700 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.4321608040201007e-05, |
|
"loss": 0.0, |
|
"step": 75800 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.422110552763819e-05, |
|
"loss": 0.0, |
|
"step": 75900 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.4120603015075376e-05, |
|
"loss": 0.0, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.4020100502512563e-05, |
|
"loss": 0.0, |
|
"step": 76100 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.391959798994975e-05, |
|
"loss": 0.0, |
|
"step": 76200 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.3819095477386936e-05, |
|
"loss": 0.0, |
|
"step": 76300 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.3718592964824122e-05, |
|
"loss": 0.0, |
|
"step": 76400 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.361809045226131e-05, |
|
"loss": 0.0, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.351758793969849e-05, |
|
"loss": 0.0, |
|
"step": 76600 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.3417085427135678e-05, |
|
"loss": 0.0, |
|
"step": 76700 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.3316582914572865e-05, |
|
"loss": 0.0, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.321608040201005e-05, |
|
"loss": 0.0, |
|
"step": 76900 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.3115577889447238e-05, |
|
"loss": 0.0, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.3015075376884424e-05, |
|
"loss": 0.0, |
|
"step": 77100 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.291457286432161e-05, |
|
"loss": 0.0, |
|
"step": 77200 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.2814070351758797e-05, |
|
"loss": 0.0, |
|
"step": 77300 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.271356783919598e-05, |
|
"loss": 0.0, |
|
"step": 77400 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.2613065326633167e-05, |
|
"loss": 0.0, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"eval_loss": NaN, |
|
"eval_runtime": 51.6196, |
|
"eval_samples_per_second": 19.372, |
|
"eval_steps_per_second": 4.843, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.2512562814070353e-05, |
|
"loss": 0.0, |
|
"step": 77600 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.241206030150754e-05, |
|
"loss": 0.0, |
|
"step": 77700 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.2311557788944726e-05, |
|
"loss": 0.0, |
|
"step": 77800 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.2211055276381913e-05, |
|
"loss": 0.0, |
|
"step": 77900 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.21105527638191e-05, |
|
"loss": 0.0, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.2010050251256282e-05, |
|
"loss": 0.0, |
|
"step": 78100 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.190954773869347e-05, |
|
"loss": 0.0, |
|
"step": 78200 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.1809045226130652e-05, |
|
"loss": 0.0, |
|
"step": 78300 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.1708542713567838e-05, |
|
"loss": 0.0, |
|
"step": 78400 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.1608040201005025e-05, |
|
"loss": 0.0, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.150753768844221e-05, |
|
"loss": 0.0, |
|
"step": 78600 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.1407035175879398e-05, |
|
"loss": 0.0, |
|
"step": 78700 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.1306532663316584e-05, |
|
"loss": 0.0, |
|
"step": 78800 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.120603015075377e-05, |
|
"loss": 0.0, |
|
"step": 78900 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.1105527638190957e-05, |
|
"loss": 0.0, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.100502512562814e-05, |
|
"loss": 0.0, |
|
"step": 79100 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.0904522613065327e-05, |
|
"loss": 0.0, |
|
"step": 79200 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.0804020100502513e-05, |
|
"loss": 0.0, |
|
"step": 79300 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.07035175879397e-05, |
|
"loss": 0.0, |
|
"step": 79400 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.0603015075376886e-05, |
|
"loss": 0.0, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.0502512562814073e-05, |
|
"loss": 0.0, |
|
"step": 79600 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.040201005025126e-05, |
|
"loss": 0.0, |
|
"step": 79700 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.0301507537688446e-05, |
|
"loss": 0.0, |
|
"step": 79800 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.020100502512563e-05, |
|
"loss": 0.0, |
|
"step": 79900 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.0100502512562815e-05, |
|
"loss": 0.0, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": NaN, |
|
"eval_runtime": 51.7171, |
|
"eval_samples_per_second": 19.336, |
|
"eval_steps_per_second": 4.834, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": NaN, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0, |
|
"step": 80100 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.9899497487437188e-05, |
|
"loss": 0.0, |
|
"step": 80200 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.9798994974874375e-05, |
|
"loss": 0.0, |
|
"step": 80300 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.9698492462311558e-05, |
|
"loss": 0.0, |
|
"step": 80400 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.9597989949748744e-05, |
|
"loss": 0.0, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.949748743718593e-05, |
|
"loss": 0.0, |
|
"step": 80600 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.9396984924623117e-05, |
|
"loss": 0.0, |
|
"step": 80700 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.92964824120603e-05, |
|
"loss": 0.0, |
|
"step": 80800 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.9195979899497487e-05, |
|
"loss": 0.0, |
|
"step": 80900 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.9095477386934673e-05, |
|
"loss": 0.0, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.899497487437186e-05, |
|
"loss": 0.0, |
|
"step": 81100 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.8894472361809046e-05, |
|
"loss": 0.0, |
|
"step": 81200 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.8793969849246233e-05, |
|
"loss": 0.0, |
|
"step": 81300 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.869346733668342e-05, |
|
"loss": 0.0, |
|
"step": 81400 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.8592964824120602e-05, |
|
"loss": 0.0, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.849246231155779e-05, |
|
"loss": 0.0, |
|
"step": 81600 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.8391959798994975e-05, |
|
"loss": 0.0, |
|
"step": 81700 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.829145728643216e-05, |
|
"loss": 0.0, |
|
"step": 81800 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.8190954773869348e-05, |
|
"loss": 0.0, |
|
"step": 81900 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.8090452261306535e-05, |
|
"loss": 0.0, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.798994974874372e-05, |
|
"loss": 0.0, |
|
"step": 82100 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.7889447236180908e-05, |
|
"loss": 0.0, |
|
"step": 82200 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.778894472361809e-05, |
|
"loss": 0.0, |
|
"step": 82300 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.7688442211055277e-05, |
|
"loss": 0.0, |
|
"step": 82400 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.7587939698492464e-05, |
|
"loss": 0.0, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"eval_loss": NaN, |
|
"eval_runtime": 51.7389, |
|
"eval_samples_per_second": 19.328, |
|
"eval_steps_per_second": 4.832, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.748743718592965e-05, |
|
"loss": 0.0, |
|
"step": 82600 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.7386934673366833e-05, |
|
"loss": 0.0, |
|
"step": 82700 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.728643216080402e-05, |
|
"loss": 0.0, |
|
"step": 82800 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.7185929648241206e-05, |
|
"loss": 0.0, |
|
"step": 82900 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.7085427135678393e-05, |
|
"loss": 0.0, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.698492462311558e-05, |
|
"loss": 0.0, |
|
"step": 83100 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.6884422110552762e-05, |
|
"loss": 0.0, |
|
"step": 83200 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.678391959798995e-05, |
|
"loss": 0.0, |
|
"step": 83300 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.6683417085427135e-05, |
|
"loss": 0.0, |
|
"step": 83400 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.6582914572864322e-05, |
|
"loss": 0.0, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.6482412060301508e-05, |
|
"loss": 0.0, |
|
"step": 83600 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.6381909547738695e-05, |
|
"loss": 0.0, |
|
"step": 83700 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.628140703517588e-05, |
|
"loss": 0.0, |
|
"step": 83800 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.6180904522613068e-05, |
|
"loss": 0.0, |
|
"step": 83900 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.608040201005025e-05, |
|
"loss": 0.0, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.5979899497487437e-05, |
|
"loss": 0.0, |
|
"step": 84100 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.5879396984924624e-05, |
|
"loss": 0.0, |
|
"step": 84200 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.577889447236181e-05, |
|
"loss": 0.0, |
|
"step": 84300 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.5678391959798997e-05, |
|
"loss": 0.0, |
|
"step": 84400 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.5577889447236183e-05, |
|
"loss": 0.0, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.547738693467337e-05, |
|
"loss": 0.0, |
|
"step": 84600 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.5376884422110556e-05, |
|
"loss": 0.0, |
|
"step": 84700 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.527638190954774e-05, |
|
"loss": 0.0, |
|
"step": 84800 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.5175879396984927e-05, |
|
"loss": 0.0, |
|
"step": 84900 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.507537688442211e-05, |
|
"loss": 0.0, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"eval_loss": NaN, |
|
"eval_runtime": 51.6936, |
|
"eval_samples_per_second": 19.345, |
|
"eval_steps_per_second": 4.836, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.4974874371859295e-05, |
|
"loss": 0.0, |
|
"step": 85100 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.4874371859296482e-05, |
|
"loss": 0.0, |
|
"step": 85200 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.4773869346733668e-05, |
|
"loss": 0.0, |
|
"step": 85300 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.4673366834170855e-05, |
|
"loss": 0.0, |
|
"step": 85400 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.457286432160804e-05, |
|
"loss": 0.0, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.4472361809045226e-05, |
|
"loss": 0.0, |
|
"step": 85600 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.4371859296482413e-05, |
|
"loss": 0.0, |
|
"step": 85700 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.4271356783919599e-05, |
|
"loss": 0.0, |
|
"step": 85800 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.4170854271356784e-05, |
|
"loss": 0.0, |
|
"step": 85900 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.407035175879397e-05, |
|
"loss": 0.0, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.3969849246231157e-05, |
|
"loss": 0.0, |
|
"step": 86100 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.3869346733668343e-05, |
|
"loss": 0.0, |
|
"step": 86200 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.3768844221105528e-05, |
|
"loss": 0.0, |
|
"step": 86300 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.3668341708542715e-05, |
|
"loss": 0.0, |
|
"step": 86400 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.3567839195979901e-05, |
|
"loss": 0.0, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.3467336683417087e-05, |
|
"loss": 0.0, |
|
"step": 86600 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.3366834170854272e-05, |
|
"loss": 0.0, |
|
"step": 86700 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.3266331658291459e-05, |
|
"loss": 0.0, |
|
"step": 86800 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.3165829145728645e-05, |
|
"loss": 0.0, |
|
"step": 86900 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.306532663316583e-05, |
|
"loss": 0.0, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2964824120603017e-05, |
|
"loss": 0.0, |
|
"step": 87100 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2864321608040203e-05, |
|
"loss": 0.0, |
|
"step": 87200 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2763819095477386e-05, |
|
"loss": 0.0, |
|
"step": 87300 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2663316582914573e-05, |
|
"loss": 0.0, |
|
"step": 87400 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2562814070351759e-05, |
|
"loss": 0.0, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"eval_loss": NaN, |
|
"eval_runtime": 51.5922, |
|
"eval_samples_per_second": 19.383, |
|
"eval_steps_per_second": 4.846, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2462311557788946e-05, |
|
"loss": 0.0, |
|
"step": 87600 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2361809045226132e-05, |
|
"loss": 0.0, |
|
"step": 87700 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2261306532663318e-05, |
|
"loss": 0.0, |
|
"step": 87800 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2160804020100503e-05, |
|
"loss": 0.0, |
|
"step": 87900 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2060301507537688e-05, |
|
"loss": 0.0, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.1959798994974875e-05, |
|
"loss": 0.0, |
|
"step": 88100 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.1859296482412061e-05, |
|
"loss": 0.0, |
|
"step": 88200 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.1758793969849246e-05, |
|
"loss": 0.0, |
|
"step": 88300 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.1658291457286432e-05, |
|
"loss": 0.0, |
|
"step": 88400 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.1557788944723619e-05, |
|
"loss": 0.0, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.1457286432160805e-05, |
|
"loss": 0.0, |
|
"step": 88600 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.135678391959799e-05, |
|
"loss": 0.0, |
|
"step": 88700 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.1256281407035177e-05, |
|
"loss": 0.0, |
|
"step": 88800 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.1155778894472363e-05, |
|
"loss": 0.0, |
|
"step": 88900 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.105527638190955e-05, |
|
"loss": 0.0, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0954773869346734e-05, |
|
"loss": 0.0, |
|
"step": 89100 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0854271356783919e-05, |
|
"loss": 0.0, |
|
"step": 89200 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0753768844221106e-05, |
|
"loss": 0.0, |
|
"step": 89300 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0653266331658292e-05, |
|
"loss": 0.0, |
|
"step": 89400 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0552763819095479e-05, |
|
"loss": 0.0, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0452261306532663e-05, |
|
"loss": 0.0, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.035175879396985e-05, |
|
"loss": 0.0, |
|
"step": 89700 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0251256281407036e-05, |
|
"loss": 0.0, |
|
"step": 89800 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0150753768844223e-05, |
|
"loss": 0.0, |
|
"step": 89900 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0050251256281408e-05, |
|
"loss": 0.0, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"eval_loss": NaN, |
|
"eval_runtime": 51.5911, |
|
"eval_samples_per_second": 19.383, |
|
"eval_steps_per_second": 4.846, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.949748743718594e-06, |
|
"loss": 0.0, |
|
"step": 90100 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.849246231155779e-06, |
|
"loss": 0.0, |
|
"step": 90200 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.748743718592965e-06, |
|
"loss": 0.0, |
|
"step": 90300 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.64824120603015e-06, |
|
"loss": 0.0, |
|
"step": 90400 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.547738693467337e-06, |
|
"loss": 0.0, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.447236180904523e-06, |
|
"loss": 0.0, |
|
"step": 90600 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.34673366834171e-06, |
|
"loss": 0.0, |
|
"step": 90700 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.246231155778894e-06, |
|
"loss": 0.0, |
|
"step": 90800 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.14572864321608e-06, |
|
"loss": 0.0, |
|
"step": 90900 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.045226130653267e-06, |
|
"loss": 0.0, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.944723618090454e-06, |
|
"loss": 0.0, |
|
"step": 91100 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.844221105527639e-06, |
|
"loss": 0.0, |
|
"step": 91200 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.743718592964825e-06, |
|
"loss": 0.0, |
|
"step": 91300 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.64321608040201e-06, |
|
"loss": 0.0, |
|
"step": 91400 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.542713567839196e-06, |
|
"loss": 0.0, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.442211055276381e-06, |
|
"loss": 0.0, |
|
"step": 91600 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.341708542713568e-06, |
|
"loss": 0.0, |
|
"step": 91700 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.241206030150754e-06, |
|
"loss": 0.0, |
|
"step": 91800 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.14070351758794e-06, |
|
"loss": 0.0, |
|
"step": 91900 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.040201005025125e-06, |
|
"loss": 0.0, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.939698492462312e-06, |
|
"loss": 0.0, |
|
"step": 92100 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.839195979899498e-06, |
|
"loss": 0.0, |
|
"step": 92200 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.738693467336685e-06, |
|
"loss": 0.0, |
|
"step": 92300 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.63819095477387e-06, |
|
"loss": 0.0, |
|
"step": 92400 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.537688442211055e-06, |
|
"loss": 0.0, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"eval_loss": NaN, |
|
"eval_runtime": 51.6585, |
|
"eval_samples_per_second": 19.358, |
|
"eval_steps_per_second": 4.839, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.437185929648241e-06, |
|
"loss": 0.0, |
|
"step": 92600 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.336683417085427e-06, |
|
"loss": 0.0, |
|
"step": 92700 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.236180904522613e-06, |
|
"loss": 0.0, |
|
"step": 92800 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.1356783919597995e-06, |
|
"loss": 0.0, |
|
"step": 92900 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.035175879396985e-06, |
|
"loss": 0.0, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.934673366834172e-06, |
|
"loss": 0.0, |
|
"step": 93100 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.834170854271357e-06, |
|
"loss": 0.0, |
|
"step": 93200 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.733668341708544e-06, |
|
"loss": 0.0, |
|
"step": 93300 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.633165829145729e-06, |
|
"loss": 0.0, |
|
"step": 93400 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.532663316582915e-06, |
|
"loss": 0.0, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.4321608040201015e-06, |
|
"loss": 0.0, |
|
"step": 93600 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.331658291457286e-06, |
|
"loss": 0.0, |
|
"step": 93700 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.231155778894473e-06, |
|
"loss": 0.0, |
|
"step": 93800 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.130653266331659e-06, |
|
"loss": 0.0, |
|
"step": 93900 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.030150753768844e-06, |
|
"loss": 0.0, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.9296482412060305e-06, |
|
"loss": 0.0, |
|
"step": 94100 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.829145728643216e-06, |
|
"loss": 0.0, |
|
"step": 94200 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.728643216080403e-06, |
|
"loss": 0.0, |
|
"step": 94300 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.628140703517588e-06, |
|
"loss": 0.0, |
|
"step": 94400 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.527638190954775e-06, |
|
"loss": 0.0, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.4271356783919595e-06, |
|
"loss": 0.0, |
|
"step": 94600 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.326633165829146e-06, |
|
"loss": 0.0, |
|
"step": 94700 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.226130653266332e-06, |
|
"loss": 0.0, |
|
"step": 94800 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.125628140703518e-06, |
|
"loss": 0.0, |
|
"step": 94900 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.025125628140704e-06, |
|
"loss": 0.0, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"eval_loss": NaN, |
|
"eval_runtime": 51.6508, |
|
"eval_samples_per_second": 19.361, |
|
"eval_steps_per_second": 4.84, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.9246231155778894e-06, |
|
"loss": 0.0, |
|
"step": 95100 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.824120603015075e-06, |
|
"loss": 0.0, |
|
"step": 95200 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.7236180904522615e-06, |
|
"loss": 0.0, |
|
"step": 95300 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.623115577889447e-06, |
|
"loss": 0.0, |
|
"step": 95400 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.522613065326634e-06, |
|
"loss": 0.0, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.422110552763819e-06, |
|
"loss": 0.0, |
|
"step": 95600 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.321608040201005e-06, |
|
"loss": 0.0, |
|
"step": 95700 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.2211055276381906e-06, |
|
"loss": 0.0, |
|
"step": 95800 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.120603015075377e-06, |
|
"loss": 0.0, |
|
"step": 95900 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.020100502512563e-06, |
|
"loss": 0.0, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.919597989949749e-06, |
|
"loss": 0.0, |
|
"step": 96100 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.819095477386935e-06, |
|
"loss": 0.0, |
|
"step": 96200 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.7185929648241204e-06, |
|
"loss": 0.0, |
|
"step": 96300 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.6180904522613065e-06, |
|
"loss": 0.0, |
|
"step": 96400 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.5175879396984926e-06, |
|
"loss": 0.0, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.4170854271356786e-06, |
|
"loss": 0.0, |
|
"step": 96600 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.3165829145728647e-06, |
|
"loss": 0.0, |
|
"step": 96700 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.2160804020100507e-06, |
|
"loss": 0.0, |
|
"step": 96800 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.1155778894472364e-06, |
|
"loss": 0.0, |
|
"step": 96900 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.015075376884422e-06, |
|
"loss": 0.0, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.914572864321608e-06, |
|
"loss": 0.0, |
|
"step": 97100 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.814070351758794e-06, |
|
"loss": 0.0, |
|
"step": 97200 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.7135678391959798e-06, |
|
"loss": 0.0, |
|
"step": 97300 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.613065326633166e-06, |
|
"loss": 0.0, |
|
"step": 97400 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.512562814070352e-06, |
|
"loss": 0.0, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"eval_loss": NaN, |
|
"eval_runtime": 51.6332, |
|
"eval_samples_per_second": 19.367, |
|
"eval_steps_per_second": 4.842, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.4120603015075375e-06, |
|
"loss": 0.0, |
|
"step": 97600 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.3115577889447236e-06, |
|
"loss": 0.0, |
|
"step": 97700 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.2110552763819096e-06, |
|
"loss": 0.0, |
|
"step": 97800 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.1105527638190953e-06, |
|
"loss": 0.0, |
|
"step": 97900 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.0100502512562813e-06, |
|
"loss": 0.0, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.9095477386934674e-06, |
|
"loss": 0.0, |
|
"step": 98100 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.8090452261306533e-06, |
|
"loss": 0.0, |
|
"step": 98200 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.7085427135678393e-06, |
|
"loss": 0.0, |
|
"step": 98300 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.6080402010050254e-06, |
|
"loss": 0.0, |
|
"step": 98400 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.507537688442211e-06, |
|
"loss": 0.0, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.407035175879397e-06, |
|
"loss": 0.0, |
|
"step": 98600 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.306532663316583e-06, |
|
"loss": 0.0, |
|
"step": 98700 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.2060301507537688e-06, |
|
"loss": 0.0, |
|
"step": 98800 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.1055276381909548e-06, |
|
"loss": 0.0, |
|
"step": 98900 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0050251256281407e-06, |
|
"loss": 0.0, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": NaN, |
|
"learning_rate": 9.045226130653266e-07, |
|
"loss": 0.0, |
|
"step": 99100 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": NaN, |
|
"learning_rate": 8.040201005025127e-07, |
|
"loss": 0.0, |
|
"step": 99200 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": NaN, |
|
"learning_rate": 7.035175879396985e-07, |
|
"loss": 0.0, |
|
"step": 99300 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": NaN, |
|
"learning_rate": 6.030150753768844e-07, |
|
"loss": 0.0, |
|
"step": 99400 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": NaN, |
|
"learning_rate": 5.025125628140703e-07, |
|
"loss": 0.0, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": NaN, |
|
"learning_rate": 4.0201005025125634e-07, |
|
"loss": 0.0, |
|
"step": 99600 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": NaN, |
|
"learning_rate": 3.015075376884422e-07, |
|
"loss": 0.0, |
|
"step": 99700 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": NaN, |
|
"learning_rate": 2.0100502512562817e-07, |
|
"loss": 0.0, |
|
"step": 99800 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": NaN, |
|
"learning_rate": 1.0050251256281409e-07, |
|
"loss": 0.0, |
|
"step": 99900 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.0, |
|
"loss": 0.0, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"eval_loss": NaN, |
|
"eval_runtime": 51.6509, |
|
"eval_samples_per_second": 19.361, |
|
"eval_steps_per_second": 4.84, |
|
"step": 100000 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 100000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 2500, |
|
"total_flos": 1.5733698330624e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|