gpt2-finetuned-imdb-sentiment / trainer_state.json
Bilal326's picture
Upload folder using huggingface_hub
45a5b48 verified
raw
history blame contribute delete
No virus
18 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 72000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.02,
"learning_rate": 4.965902777777778e-05,
"loss": 0.9428,
"step": 500
},
{
"epoch": 0.04,
"learning_rate": 4.931180555555556e-05,
"loss": 0.6733,
"step": 1000
},
{
"epoch": 0.06,
"learning_rate": 4.896458333333334e-05,
"loss": 0.6008,
"step": 1500
},
{
"epoch": 0.08,
"learning_rate": 4.861736111111111e-05,
"loss": 0.6298,
"step": 2000
},
{
"epoch": 0.1,
"learning_rate": 4.827013888888889e-05,
"loss": 0.6776,
"step": 2500
},
{
"epoch": 0.12,
"learning_rate": 4.7922916666666665e-05,
"loss": 0.9368,
"step": 3000
},
{
"epoch": 0.15,
"learning_rate": 4.7575694444444445e-05,
"loss": 0.993,
"step": 3500
},
{
"epoch": 0.17,
"learning_rate": 4.7228472222222225e-05,
"loss": 0.8873,
"step": 4000
},
{
"epoch": 0.19,
"learning_rate": 4.6881944444444445e-05,
"loss": 0.8436,
"step": 4500
},
{
"epoch": 0.21,
"learning_rate": 4.6534722222222225e-05,
"loss": 0.8983,
"step": 5000
},
{
"epoch": 0.23,
"learning_rate": 4.61875e-05,
"loss": 0.8515,
"step": 5500
},
{
"epoch": 0.25,
"learning_rate": 4.584027777777778e-05,
"loss": 0.7899,
"step": 6000
},
{
"epoch": 0.27,
"learning_rate": 4.5493750000000005e-05,
"loss": 0.7288,
"step": 6500
},
{
"epoch": 0.29,
"learning_rate": 4.514652777777778e-05,
"loss": 0.8042,
"step": 7000
},
{
"epoch": 0.31,
"learning_rate": 4.479930555555556e-05,
"loss": 0.7053,
"step": 7500
},
{
"epoch": 0.33,
"learning_rate": 4.445208333333333e-05,
"loss": 0.7334,
"step": 8000
},
{
"epoch": 0.35,
"learning_rate": 4.410555555555556e-05,
"loss": 0.5877,
"step": 8500
},
{
"epoch": 0.38,
"learning_rate": 4.375833333333334e-05,
"loss": 0.7819,
"step": 9000
},
{
"epoch": 0.4,
"learning_rate": 4.341111111111111e-05,
"loss": 0.6116,
"step": 9500
},
{
"epoch": 0.42,
"learning_rate": 4.306388888888889e-05,
"loss": 0.6046,
"step": 10000
},
{
"epoch": 0.44,
"learning_rate": 4.271736111111112e-05,
"loss": 0.8132,
"step": 10500
},
{
"epoch": 0.46,
"learning_rate": 4.237013888888889e-05,
"loss": 0.7237,
"step": 11000
},
{
"epoch": 0.48,
"learning_rate": 4.202291666666667e-05,
"loss": 0.8234,
"step": 11500
},
{
"epoch": 0.5,
"learning_rate": 4.1675694444444444e-05,
"loss": 0.7425,
"step": 12000
},
{
"epoch": 0.52,
"learning_rate": 4.132916666666667e-05,
"loss": 0.6984,
"step": 12500
},
{
"epoch": 0.54,
"learning_rate": 4.098194444444445e-05,
"loss": 0.5202,
"step": 13000
},
{
"epoch": 0.56,
"learning_rate": 4.0634722222222224e-05,
"loss": 0.5542,
"step": 13500
},
{
"epoch": 0.58,
"learning_rate": 4.0287500000000003e-05,
"loss": 0.6946,
"step": 14000
},
{
"epoch": 0.6,
"learning_rate": 3.9940277777777777e-05,
"loss": 0.7048,
"step": 14500
},
{
"epoch": 0.62,
"learning_rate": 3.9593750000000004e-05,
"loss": 0.6162,
"step": 15000
},
{
"epoch": 0.65,
"learning_rate": 3.9246527777777783e-05,
"loss": 0.6961,
"step": 15500
},
{
"epoch": 0.67,
"learning_rate": 3.8899305555555557e-05,
"loss": 0.6798,
"step": 16000
},
{
"epoch": 0.69,
"learning_rate": 3.8552083333333336e-05,
"loss": 0.6381,
"step": 16500
},
{
"epoch": 0.71,
"learning_rate": 3.820486111111111e-05,
"loss": 0.665,
"step": 17000
},
{
"epoch": 0.73,
"learning_rate": 3.7858333333333336e-05,
"loss": 0.6398,
"step": 17500
},
{
"epoch": 0.75,
"learning_rate": 3.7511111111111116e-05,
"loss": 0.6476,
"step": 18000
},
{
"epoch": 0.77,
"learning_rate": 3.716388888888889e-05,
"loss": 0.6423,
"step": 18500
},
{
"epoch": 0.79,
"learning_rate": 3.681666666666667e-05,
"loss": 0.7113,
"step": 19000
},
{
"epoch": 0.81,
"learning_rate": 3.647013888888889e-05,
"loss": 0.5891,
"step": 19500
},
{
"epoch": 0.83,
"learning_rate": 3.612291666666667e-05,
"loss": 0.5659,
"step": 20000
},
{
"epoch": 0.85,
"learning_rate": 3.577569444444445e-05,
"loss": 0.7071,
"step": 20500
},
{
"epoch": 0.88,
"learning_rate": 3.542847222222222e-05,
"loss": 0.4982,
"step": 21000
},
{
"epoch": 0.9,
"learning_rate": 3.508125e-05,
"loss": 0.5591,
"step": 21500
},
{
"epoch": 0.92,
"learning_rate": 3.4734027777777775e-05,
"loss": 0.4665,
"step": 22000
},
{
"epoch": 0.94,
"learning_rate": 3.4386805555555555e-05,
"loss": 0.5992,
"step": 22500
},
{
"epoch": 0.96,
"learning_rate": 3.4039583333333335e-05,
"loss": 0.5878,
"step": 23000
},
{
"epoch": 0.98,
"learning_rate": 3.3693055555555555e-05,
"loss": 0.4549,
"step": 23500
},
{
"epoch": 1.0,
"learning_rate": 3.3345833333333335e-05,
"loss": 0.5481,
"step": 24000
},
{
"epoch": 1.02,
"learning_rate": 3.299861111111111e-05,
"loss": 0.3857,
"step": 24500
},
{
"epoch": 1.04,
"learning_rate": 3.265138888888889e-05,
"loss": 0.4695,
"step": 25000
},
{
"epoch": 1.06,
"learning_rate": 3.230416666666667e-05,
"loss": 0.5488,
"step": 25500
},
{
"epoch": 1.08,
"learning_rate": 3.195763888888889e-05,
"loss": 0.3905,
"step": 26000
},
{
"epoch": 1.1,
"learning_rate": 3.161041666666667e-05,
"loss": 0.3481,
"step": 26500
},
{
"epoch": 1.12,
"learning_rate": 3.126319444444444e-05,
"loss": 0.4074,
"step": 27000
},
{
"epoch": 1.15,
"learning_rate": 3.091597222222222e-05,
"loss": 0.3932,
"step": 27500
},
{
"epoch": 1.17,
"learning_rate": 3.056875e-05,
"loss": 0.4097,
"step": 28000
},
{
"epoch": 1.19,
"learning_rate": 3.0222222222222225e-05,
"loss": 0.3805,
"step": 28500
},
{
"epoch": 1.21,
"learning_rate": 2.9875000000000004e-05,
"loss": 0.3944,
"step": 29000
},
{
"epoch": 1.23,
"learning_rate": 2.9527777777777778e-05,
"loss": 0.3737,
"step": 29500
},
{
"epoch": 1.25,
"learning_rate": 2.9180555555555554e-05,
"loss": 0.4347,
"step": 30000
},
{
"epoch": 1.27,
"learning_rate": 2.8834027777777778e-05,
"loss": 0.4832,
"step": 30500
},
{
"epoch": 1.29,
"learning_rate": 2.8486805555555558e-05,
"loss": 0.36,
"step": 31000
},
{
"epoch": 1.31,
"learning_rate": 2.8139583333333337e-05,
"loss": 0.4434,
"step": 31500
},
{
"epoch": 1.33,
"learning_rate": 2.7792361111111114e-05,
"loss": 0.3939,
"step": 32000
},
{
"epoch": 1.35,
"learning_rate": 2.7445138888888887e-05,
"loss": 0.5545,
"step": 32500
},
{
"epoch": 1.38,
"learning_rate": 2.7097916666666667e-05,
"loss": 0.3297,
"step": 33000
},
{
"epoch": 1.4,
"learning_rate": 2.6750694444444447e-05,
"loss": 0.369,
"step": 33500
},
{
"epoch": 1.42,
"learning_rate": 2.6403472222222227e-05,
"loss": 0.4523,
"step": 34000
},
{
"epoch": 1.44,
"learning_rate": 2.605625e-05,
"loss": 0.3366,
"step": 34500
},
{
"epoch": 1.46,
"learning_rate": 2.5709027777777776e-05,
"loss": 0.4618,
"step": 35000
},
{
"epoch": 1.48,
"learning_rate": 2.5361805555555556e-05,
"loss": 0.3792,
"step": 35500
},
{
"epoch": 1.5,
"learning_rate": 2.501527777777778e-05,
"loss": 0.4079,
"step": 36000
},
{
"epoch": 1.52,
"learning_rate": 2.4668055555555556e-05,
"loss": 0.322,
"step": 36500
},
{
"epoch": 1.54,
"learning_rate": 2.4320833333333333e-05,
"loss": 0.3155,
"step": 37000
},
{
"epoch": 1.56,
"learning_rate": 2.3973611111111113e-05,
"loss": 0.3767,
"step": 37500
},
{
"epoch": 1.58,
"learning_rate": 2.362638888888889e-05,
"loss": 0.4379,
"step": 38000
},
{
"epoch": 1.6,
"learning_rate": 2.327916666666667e-05,
"loss": 0.2879,
"step": 38500
},
{
"epoch": 1.62,
"learning_rate": 2.2931944444444445e-05,
"loss": 0.3315,
"step": 39000
},
{
"epoch": 1.65,
"learning_rate": 2.2585416666666666e-05,
"loss": 0.375,
"step": 39500
},
{
"epoch": 1.67,
"learning_rate": 2.2238194444444446e-05,
"loss": 0.4259,
"step": 40000
},
{
"epoch": 1.69,
"learning_rate": 2.1890972222222222e-05,
"loss": 0.331,
"step": 40500
},
{
"epoch": 1.71,
"learning_rate": 2.1543750000000002e-05,
"loss": 0.3414,
"step": 41000
},
{
"epoch": 1.73,
"learning_rate": 2.1197222222222225e-05,
"loss": 0.3511,
"step": 41500
},
{
"epoch": 1.75,
"learning_rate": 2.085e-05,
"loss": 0.3501,
"step": 42000
},
{
"epoch": 1.77,
"learning_rate": 2.050277777777778e-05,
"loss": 0.3793,
"step": 42500
},
{
"epoch": 1.79,
"learning_rate": 2.0155555555555555e-05,
"loss": 0.4683,
"step": 43000
},
{
"epoch": 1.81,
"learning_rate": 1.980902777777778e-05,
"loss": 0.4273,
"step": 43500
},
{
"epoch": 1.83,
"learning_rate": 1.946180555555556e-05,
"loss": 0.3493,
"step": 44000
},
{
"epoch": 1.85,
"learning_rate": 1.911458333333333e-05,
"loss": 0.3802,
"step": 44500
},
{
"epoch": 1.88,
"learning_rate": 1.876736111111111e-05,
"loss": 0.3083,
"step": 45000
},
{
"epoch": 1.9,
"learning_rate": 1.8420833333333335e-05,
"loss": 0.3615,
"step": 45500
},
{
"epoch": 1.92,
"learning_rate": 1.807361111111111e-05,
"loss": 0.4269,
"step": 46000
},
{
"epoch": 1.94,
"learning_rate": 1.7726388888888888e-05,
"loss": 0.3793,
"step": 46500
},
{
"epoch": 1.96,
"learning_rate": 1.7379166666666668e-05,
"loss": 0.3858,
"step": 47000
},
{
"epoch": 1.98,
"learning_rate": 1.7031944444444444e-05,
"loss": 0.3694,
"step": 47500
},
{
"epoch": 2.0,
"learning_rate": 1.6685416666666668e-05,
"loss": 0.372,
"step": 48000
},
{
"epoch": 2.02,
"learning_rate": 1.6338888888888888e-05,
"loss": 0.2141,
"step": 48500
},
{
"epoch": 2.04,
"learning_rate": 1.5991666666666668e-05,
"loss": 0.2247,
"step": 49000
},
{
"epoch": 2.06,
"learning_rate": 1.5644444444444444e-05,
"loss": 0.2085,
"step": 49500
},
{
"epoch": 2.08,
"learning_rate": 1.5297222222222224e-05,
"loss": 0.1905,
"step": 50000
},
{
"epoch": 2.1,
"learning_rate": 1.4950000000000001e-05,
"loss": 0.1304,
"step": 50500
},
{
"epoch": 2.12,
"learning_rate": 1.4602777777777779e-05,
"loss": 0.1376,
"step": 51000
},
{
"epoch": 2.15,
"learning_rate": 1.4255555555555556e-05,
"loss": 0.2761,
"step": 51500
},
{
"epoch": 2.17,
"learning_rate": 1.3908333333333334e-05,
"loss": 0.2343,
"step": 52000
},
{
"epoch": 2.19,
"learning_rate": 1.356111111111111e-05,
"loss": 0.1518,
"step": 52500
},
{
"epoch": 2.21,
"learning_rate": 1.321388888888889e-05,
"loss": 0.2608,
"step": 53000
},
{
"epoch": 2.23,
"learning_rate": 1.2866666666666668e-05,
"loss": 0.211,
"step": 53500
},
{
"epoch": 2.25,
"learning_rate": 1.2520138888888888e-05,
"loss": 0.1526,
"step": 54000
},
{
"epoch": 2.27,
"learning_rate": 1.2172916666666667e-05,
"loss": 0.2058,
"step": 54500
},
{
"epoch": 2.29,
"learning_rate": 1.1825694444444445e-05,
"loss": 0.1747,
"step": 55000
},
{
"epoch": 2.31,
"learning_rate": 1.1478472222222223e-05,
"loss": 0.236,
"step": 55500
},
{
"epoch": 2.33,
"learning_rate": 1.1131944444444445e-05,
"loss": 0.2769,
"step": 56000
},
{
"epoch": 2.35,
"learning_rate": 1.0784722222222223e-05,
"loss": 0.2139,
"step": 56500
},
{
"epoch": 2.38,
"learning_rate": 1.04375e-05,
"loss": 0.2679,
"step": 57000
},
{
"epoch": 2.4,
"learning_rate": 1.0090277777777778e-05,
"loss": 0.2829,
"step": 57500
},
{
"epoch": 2.42,
"learning_rate": 9.743055555555556e-06,
"loss": 0.1741,
"step": 58000
},
{
"epoch": 2.44,
"learning_rate": 9.396527777777778e-06,
"loss": 0.1771,
"step": 58500
},
{
"epoch": 2.46,
"learning_rate": 9.049305555555556e-06,
"loss": 0.2378,
"step": 59000
},
{
"epoch": 2.48,
"learning_rate": 8.702083333333334e-06,
"loss": 0.2023,
"step": 59500
},
{
"epoch": 2.5,
"learning_rate": 8.354861111111112e-06,
"loss": 0.2075,
"step": 60000
},
{
"epoch": 2.52,
"learning_rate": 8.008333333333334e-06,
"loss": 0.2484,
"step": 60500
},
{
"epoch": 2.54,
"learning_rate": 7.661111111111112e-06,
"loss": 0.1732,
"step": 61000
},
{
"epoch": 2.56,
"learning_rate": 7.31388888888889e-06,
"loss": 0.2412,
"step": 61500
},
{
"epoch": 2.58,
"learning_rate": 6.966666666666667e-06,
"loss": 0.2601,
"step": 62000
},
{
"epoch": 2.6,
"learning_rate": 6.6194444444444445e-06,
"loss": 0.1001,
"step": 62500
},
{
"epoch": 2.62,
"learning_rate": 6.272222222222223e-06,
"loss": 0.1968,
"step": 63000
},
{
"epoch": 2.65,
"learning_rate": 5.925e-06,
"loss": 0.0861,
"step": 63500
},
{
"epoch": 2.67,
"learning_rate": 5.577777777777778e-06,
"loss": 0.1727,
"step": 64000
},
{
"epoch": 2.69,
"learning_rate": 5.23125e-06,
"loss": 0.2547,
"step": 64500
},
{
"epoch": 2.71,
"learning_rate": 4.884027777777778e-06,
"loss": 0.1864,
"step": 65000
},
{
"epoch": 2.73,
"learning_rate": 4.536805555555556e-06,
"loss": 0.0953,
"step": 65500
},
{
"epoch": 2.75,
"learning_rate": 4.1902777777777775e-06,
"loss": 0.132,
"step": 66000
},
{
"epoch": 2.77,
"learning_rate": 3.843055555555556e-06,
"loss": 0.3,
"step": 66500
},
{
"epoch": 2.79,
"learning_rate": 3.4958333333333335e-06,
"loss": 0.2237,
"step": 67000
},
{
"epoch": 2.81,
"learning_rate": 3.1486111111111112e-06,
"loss": 0.1955,
"step": 67500
},
{
"epoch": 2.83,
"learning_rate": 2.801388888888889e-06,
"loss": 0.3045,
"step": 68000
},
{
"epoch": 2.85,
"learning_rate": 2.4541666666666667e-06,
"loss": 0.2148,
"step": 68500
},
{
"epoch": 2.88,
"learning_rate": 2.1069444444444445e-06,
"loss": 0.1646,
"step": 69000
},
{
"epoch": 2.9,
"learning_rate": 1.7597222222222223e-06,
"loss": 0.1259,
"step": 69500
},
{
"epoch": 2.92,
"learning_rate": 1.4131944444444446e-06,
"loss": 0.1705,
"step": 70000
},
{
"epoch": 2.94,
"learning_rate": 1.0659722222222221e-06,
"loss": 0.2866,
"step": 70500
},
{
"epoch": 2.96,
"learning_rate": 7.194444444444445e-07,
"loss": 0.1586,
"step": 71000
},
{
"epoch": 2.98,
"learning_rate": 3.7222222222222226e-07,
"loss": 0.1653,
"step": 71500
},
{
"epoch": 3.0,
"learning_rate": 2.5000000000000002e-08,
"loss": 0.1686,
"step": 72000
}
],
"logging_steps": 500,
"max_steps": 72000,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 3.7626732085248e+16,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}