|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 103.12415130940835, |
|
"global_step": 13200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 10.1419, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 9.2788, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3e-06, |
|
"loss": 8.8011, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 8.3846, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 5e-06, |
|
"loss": 7.8214, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 6e-06, |
|
"loss": 7.3094, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 6.7911, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 6.4144, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 9e-06, |
|
"loss": 6.2168, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1e-05, |
|
"loss": 6.0642, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 6.0511, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 1.2e-05, |
|
"loss": 5.995, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 5.9539, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 5.8675, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 1.5e-05, |
|
"loss": 5.8874, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 5.8578, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 5.8386, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"learning_rate": 1.8e-05, |
|
"loss": 5.8138, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"learning_rate": 1.9e-05, |
|
"loss": 5.7399, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 2e-05, |
|
"loss": 5.7753, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"learning_rate": 2.1e-05, |
|
"loss": 5.7564, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 5.738, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 5.6753, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 18.74, |
|
"learning_rate": 2.4e-05, |
|
"loss": 5.7082, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 19.53, |
|
"learning_rate": 2.5e-05, |
|
"loss": 5.6991, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 20.31, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 5.6801, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 21.09, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 5.6692, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 21.87, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 5.6063, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 22.65, |
|
"learning_rate": 2.9e-05, |
|
"loss": 5.6445, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 23.43, |
|
"learning_rate": 3e-05, |
|
"loss": 5.6328, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 24.22, |
|
"learning_rate": 3.1e-05, |
|
"loss": 5.6217, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 24.99, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 5.5601, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 25.78, |
|
"learning_rate": 3.3e-05, |
|
"loss": 5.5976, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 26.56, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 5.5911, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 27.34, |
|
"learning_rate": 3.5e-05, |
|
"loss": 5.5738, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 28.12, |
|
"learning_rate": 3.6e-05, |
|
"loss": 5.566, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 28.9, |
|
"learning_rate": 3.7e-05, |
|
"loss": 5.5071, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 29.68, |
|
"learning_rate": 3.8e-05, |
|
"loss": 5.5438, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 30.47, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 5.5366, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 31.25, |
|
"learning_rate": 4e-05, |
|
"loss": 5.5268, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 32.03, |
|
"learning_rate": 4.1e-05, |
|
"loss": 5.517, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 32.81, |
|
"learning_rate": 4.2e-05, |
|
"loss": 5.4574, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 33.59, |
|
"learning_rate": 4.3e-05, |
|
"loss": 5.5002, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 34.37, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 5.4887, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 35.16, |
|
"learning_rate": 4.5e-05, |
|
"loss": 5.4805, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 35.93, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 5.4265, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 36.71, |
|
"learning_rate": 4.7e-05, |
|
"loss": 5.4615, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 37.5, |
|
"learning_rate": 4.8e-05, |
|
"loss": 5.4576, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 38.28, |
|
"learning_rate": 4.9e-05, |
|
"loss": 5.4421, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 39.06, |
|
"learning_rate": 5e-05, |
|
"loss": 5.4342, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 39.84, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 5.3641, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 40.62, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 5.379, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 41.4, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 5.3638, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 42.19, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 5.3441, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 42.96, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 5.2759, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 43.74, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 5.3011, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 44.53, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 5.2758, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 45.31, |
|
"learning_rate": 5.8e-05, |
|
"loss": 5.2559, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 46.09, |
|
"learning_rate": 5.9e-05, |
|
"loss": 5.2326, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 46.87, |
|
"learning_rate": 6e-05, |
|
"loss": 5.1616, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 47.65, |
|
"learning_rate": 6.1e-05, |
|
"loss": 5.1753, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 48.43, |
|
"learning_rate": 6.2e-05, |
|
"loss": 5.1378, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 49.22, |
|
"learning_rate": 6.3e-05, |
|
"loss": 5.1, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 49.99, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 5.015, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 50.78, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 4.9758, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 51.56, |
|
"learning_rate": 6.6e-05, |
|
"loss": 4.8417, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 52.34, |
|
"learning_rate": 6.7e-05, |
|
"loss": 4.7116, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 53.12, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 4.5582, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 53.9, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.3437, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 54.68, |
|
"learning_rate": 7e-05, |
|
"loss": 4.2114, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 55.47, |
|
"learning_rate": 7.1e-05, |
|
"loss": 4.1021, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 56.25, |
|
"learning_rate": 7.2e-05, |
|
"loss": 4.0074, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 57.03, |
|
"learning_rate": 7.3e-05, |
|
"loss": 3.9346, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 57.81, |
|
"learning_rate": 7.4e-05, |
|
"loss": 3.8289, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 58.59, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 3.8105, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 59.37, |
|
"learning_rate": 7.6e-05, |
|
"loss": 3.755, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 60.16, |
|
"learning_rate": 7.7e-05, |
|
"loss": 3.7105, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 60.93, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 3.6394, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 61.71, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 3.6262, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 62.5, |
|
"learning_rate": 8e-05, |
|
"loss": 3.5924, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 63.28, |
|
"learning_rate": 8.1e-05, |
|
"loss": 3.558, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 64.06, |
|
"learning_rate": 8.2e-05, |
|
"loss": 3.5255, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 64.84, |
|
"learning_rate": 8.3e-05, |
|
"loss": 3.4602, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 65.62, |
|
"learning_rate": 8.4e-05, |
|
"loss": 3.4641, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 66.4, |
|
"learning_rate": 8.5e-05, |
|
"loss": 3.435, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 67.19, |
|
"learning_rate": 8.6e-05, |
|
"loss": 3.408, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 67.96, |
|
"learning_rate": 8.7e-05, |
|
"loss": 3.3594, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 68.74, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 3.3593, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 69.53, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 3.3372, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 70.31, |
|
"learning_rate": 9e-05, |
|
"loss": 3.3217, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 71.09, |
|
"learning_rate": 9.1e-05, |
|
"loss": 3.2985, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 71.87, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 3.2509, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 72.65, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 3.2584, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 73.43, |
|
"learning_rate": 9.4e-05, |
|
"loss": 3.2386, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 74.22, |
|
"learning_rate": 9.5e-05, |
|
"loss": 3.2232, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 74.99, |
|
"learning_rate": 9.6e-05, |
|
"loss": 3.1786, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 75.78, |
|
"learning_rate": 9.7e-05, |
|
"loss": 3.1855, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 76.56, |
|
"learning_rate": 9.8e-05, |
|
"loss": 3.1737, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 77.34, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 3.1565, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 78.12, |
|
"learning_rate": 0.0001, |
|
"loss": 3.1442, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 78.9, |
|
"learning_rate": 9.904580152671757e-05, |
|
"loss": 3.1003, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 79.68, |
|
"learning_rate": 9.809160305343512e-05, |
|
"loss": 3.1137, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 80.47, |
|
"learning_rate": 9.713740458015268e-05, |
|
"loss": 3.0958, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 81.25, |
|
"learning_rate": 9.618320610687024e-05, |
|
"loss": 3.0853, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 82.03, |
|
"learning_rate": 9.522900763358779e-05, |
|
"loss": 3.0704, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 82.81, |
|
"learning_rate": 9.427480916030534e-05, |
|
"loss": 3.03, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 83.59, |
|
"learning_rate": 9.33206106870229e-05, |
|
"loss": 3.0428, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 84.37, |
|
"learning_rate": 9.236641221374047e-05, |
|
"loss": 3.0299, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 85.16, |
|
"learning_rate": 9.141221374045802e-05, |
|
"loss": 3.0239, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 85.93, |
|
"learning_rate": 9.045801526717558e-05, |
|
"loss": 2.9818, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 86.71, |
|
"learning_rate": 8.950381679389314e-05, |
|
"loss": 2.9967, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 87.5, |
|
"learning_rate": 8.854961832061069e-05, |
|
"loss": 2.9866, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 88.28, |
|
"learning_rate": 8.759541984732825e-05, |
|
"loss": 2.9758, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 89.06, |
|
"learning_rate": 8.664122137404582e-05, |
|
"loss": 2.968, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 89.84, |
|
"learning_rate": 8.568702290076335e-05, |
|
"loss": 2.9276, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 90.62, |
|
"learning_rate": 8.473282442748092e-05, |
|
"loss": 2.9464, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 91.4, |
|
"learning_rate": 8.377862595419848e-05, |
|
"loss": 2.9342, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 92.19, |
|
"learning_rate": 8.282442748091603e-05, |
|
"loss": 2.9313, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 92.96, |
|
"learning_rate": 8.187022900763359e-05, |
|
"loss": 2.8932, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 93.74, |
|
"learning_rate": 8.091603053435115e-05, |
|
"loss": 2.9087, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 94.53, |
|
"learning_rate": 7.996183206106872e-05, |
|
"loss": 2.9054, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 95.31, |
|
"learning_rate": 7.900763358778626e-05, |
|
"loss": 2.8916, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 96.09, |
|
"learning_rate": 7.805343511450383e-05, |
|
"loss": 2.8888, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 96.87, |
|
"learning_rate": 7.709923664122138e-05, |
|
"loss": 2.8505, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 97.65, |
|
"learning_rate": 7.614503816793893e-05, |
|
"loss": 2.8729, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 98.43, |
|
"learning_rate": 7.519083969465649e-05, |
|
"loss": 2.8576, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 99.22, |
|
"learning_rate": 7.423664122137405e-05, |
|
"loss": 2.8569, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 99.99, |
|
"learning_rate": 7.32824427480916e-05, |
|
"loss": 2.8276, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 100.78, |
|
"learning_rate": 7.232824427480916e-05, |
|
"loss": 2.8404, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 101.56, |
|
"learning_rate": 7.137404580152673e-05, |
|
"loss": 2.8337, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 102.34, |
|
"learning_rate": 7.041984732824428e-05, |
|
"loss": 2.8285, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 103.12, |
|
"learning_rate": 6.946564885496184e-05, |
|
"loss": 2.8209, |
|
"step": 13200 |
|
} |
|
], |
|
"max_steps": 20480, |
|
"num_train_epochs": 160, |
|
"total_flos": 8.948591590637568e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|