|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.177373602536292, |
|
"global_step": 110000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9791423327215084e-05, |
|
"loss": 8.0732, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.958284665443017e-05, |
|
"loss": 7.3801, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9374269981645254e-05, |
|
"loss": 7.0667, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9165693308860336e-05, |
|
"loss": 6.906, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.8957116636075424e-05, |
|
"loss": 6.7114, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.8748539963290505e-05, |
|
"loss": 6.6333, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8539963290505594e-05, |
|
"loss": 6.4948, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8331386617720675e-05, |
|
"loss": 6.3157, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.812280994493576e-05, |
|
"loss": 6.1185, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7914233272150845e-05, |
|
"loss": 5.9335, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.770565659936593e-05, |
|
"loss": 5.7535, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.749707992658101e-05, |
|
"loss": 5.5631, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.72885032537961e-05, |
|
"loss": 5.3707, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.707992658101118e-05, |
|
"loss": 5.2234, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.687134990822627e-05, |
|
"loss": 5.1324, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.666277323544135e-05, |
|
"loss": 5.023, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.645419656265643e-05, |
|
"loss": 4.9164, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.624561988987152e-05, |
|
"loss": 4.8293, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.60370432170866e-05, |
|
"loss": 4.7737, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.582846654430168e-05, |
|
"loss": 4.6915, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.561988987151678e-05, |
|
"loss": 4.6345, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.541131319873186e-05, |
|
"loss": 4.6052, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.520273652594694e-05, |
|
"loss": 4.5157, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.499415985316203e-05, |
|
"loss": 4.4535, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.478558318037711e-05, |
|
"loss": 4.4238, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.45770065075922e-05, |
|
"loss": 4.3574, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.436842983480728e-05, |
|
"loss": 4.3355, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.415985316202236e-05, |
|
"loss": 4.3007, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.395127648923745e-05, |
|
"loss": 4.2482, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.374269981645253e-05, |
|
"loss": 4.1904, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.353412314366761e-05, |
|
"loss": 4.1877, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.33255464708827e-05, |
|
"loss": 4.1422, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.311696979809778e-05, |
|
"loss": 4.0952, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.2908393125312865e-05, |
|
"loss": 4.0837, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.269981645252795e-05, |
|
"loss": 4.0552, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.2491239779743035e-05, |
|
"loss": 4.0141, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.228266310695812e-05, |
|
"loss": 3.9979, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2074086434173204e-05, |
|
"loss": 3.9504, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.1865509761388286e-05, |
|
"loss": 3.9522, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.1656933088603374e-05, |
|
"loss": 3.9057, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.1448356415818456e-05, |
|
"loss": 3.8997, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.123977974303354e-05, |
|
"loss": 3.8594, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.1031203070248626e-05, |
|
"loss": 3.823, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.082262639746371e-05, |
|
"loss": 3.8168, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.0614049724678796e-05, |
|
"loss": 3.7902, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.040547305189388e-05, |
|
"loss": 3.7825, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.019689637910896e-05, |
|
"loss": 3.7529, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.998831970632405e-05, |
|
"loss": 3.7309, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.977974303353913e-05, |
|
"loss": 3.6979, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.957116636075421e-05, |
|
"loss": 3.6644, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.93625896879693e-05, |
|
"loss": 3.6569, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.915401301518438e-05, |
|
"loss": 3.6378, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.894543634239947e-05, |
|
"loss": 3.6143, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.873685966961455e-05, |
|
"loss": 3.5909, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.852828299682963e-05, |
|
"loss": 3.6046, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.831970632404472e-05, |
|
"loss": 3.577, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.81111296512598e-05, |
|
"loss": 3.5381, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.7902552978474884e-05, |
|
"loss": 3.5436, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.769397630568997e-05, |
|
"loss": 3.5292, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.7485399632905054e-05, |
|
"loss": 3.5224, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.727682296012014e-05, |
|
"loss": 3.5099, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 3.706824628733523e-05, |
|
"loss": 3.4796, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.685966961455031e-05, |
|
"loss": 3.475, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.66510929417654e-05, |
|
"loss": 3.4557, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.644251626898048e-05, |
|
"loss": 3.4315, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.6233939596195564e-05, |
|
"loss": 3.4356, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.602536292341065e-05, |
|
"loss": 3.4244, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.5816786250625733e-05, |
|
"loss": 3.4026, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.5608209577840815e-05, |
|
"loss": 3.42, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.5399632905055903e-05, |
|
"loss": 3.3832, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.5191056232270985e-05, |
|
"loss": 3.3545, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.4982479559486067e-05, |
|
"loss": 3.3327, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.4773902886701155e-05, |
|
"loss": 3.3129, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.4565326213916237e-05, |
|
"loss": 3.3115, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.4356749541131325e-05, |
|
"loss": 3.2831, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.4148172868346407e-05, |
|
"loss": 3.2743, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.393959619556149e-05, |
|
"loss": 3.2856, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.3731019522776576e-05, |
|
"loss": 3.267, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.352244284999166e-05, |
|
"loss": 3.264, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.331386617720674e-05, |
|
"loss": 3.2614, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.310528950442183e-05, |
|
"loss": 3.2554, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.289671283163691e-05, |
|
"loss": 3.2308, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.2688136158852e-05, |
|
"loss": 3.2329, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.247955948606708e-05, |
|
"loss": 3.2174, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.227098281328216e-05, |
|
"loss": 3.1932, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.206240614049725e-05, |
|
"loss": 3.1748, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.185382946771233e-05, |
|
"loss": 3.1664, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.164525279492741e-05, |
|
"loss": 3.1862, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.14366761221425e-05, |
|
"loss": 3.1649, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.122809944935758e-05, |
|
"loss": 3.1528, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.101952277657267e-05, |
|
"loss": 3.1512, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.081094610378775e-05, |
|
"loss": 3.1362, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.0602369431002834e-05, |
|
"loss": 3.1517, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.039379275821792e-05, |
|
"loss": 3.1104, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.0185216085433004e-05, |
|
"loss": 3.1216, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.997663941264809e-05, |
|
"loss": 3.1177, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.9768062739863174e-05, |
|
"loss": 3.0815, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 2.9559486067078256e-05, |
|
"loss": 3.0839, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.935090939429334e-05, |
|
"loss": 3.0499, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.9142332721508426e-05, |
|
"loss": 3.0597, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 2.8933756048723514e-05, |
|
"loss": 3.0588, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 2.87251793759386e-05, |
|
"loss": 3.0393, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 2.8516602703153684e-05, |
|
"loss": 3.041, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 2.8308026030368766e-05, |
|
"loss": 3.0235, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.809944935758385e-05, |
|
"loss": 3.0257, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 2.7890872684798936e-05, |
|
"loss": 3.0206, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.768229601201402e-05, |
|
"loss": 3.0039, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.7473719339229102e-05, |
|
"loss": 2.9904, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.7265142666444187e-05, |
|
"loss": 2.9984, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.7056565993659272e-05, |
|
"loss": 2.9806, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.6847989320874357e-05, |
|
"loss": 2.9864, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.663941264808944e-05, |
|
"loss": 2.9916, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.6430835975304524e-05, |
|
"loss": 2.9799, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.622225930251961e-05, |
|
"loss": 2.9744, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.6013682629734694e-05, |
|
"loss": 2.9689, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.5805105956949775e-05, |
|
"loss": 2.9755, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.559652928416486e-05, |
|
"loss": 2.9355, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.5387952611379945e-05, |
|
"loss": 2.9549, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.517937593859503e-05, |
|
"loss": 2.9552, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.497079926581011e-05, |
|
"loss": 2.9608, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.4762222593025197e-05, |
|
"loss": 2.9073, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.455364592024028e-05, |
|
"loss": 2.9192, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.4345069247455367e-05, |
|
"loss": 2.922, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 2.4136492574670448e-05, |
|
"loss": 2.8877, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.3927915901885533e-05, |
|
"loss": 2.9045, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.3719339229100618e-05, |
|
"loss": 2.8876, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.3510762556315703e-05, |
|
"loss": 2.8986, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.3302185883530785e-05, |
|
"loss": 2.8924, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.309360921074587e-05, |
|
"loss": 2.8761, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 2.2885032537960955e-05, |
|
"loss": 2.8782, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.267645586517604e-05, |
|
"loss": 2.878, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.2467879192391125e-05, |
|
"loss": 2.8706, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 2.225930251960621e-05, |
|
"loss": 2.8647, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 2.2050725846821295e-05, |
|
"loss": 2.8407, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.1842149174036376e-05, |
|
"loss": 2.86, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.163357250125146e-05, |
|
"loss": 2.8585, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.1424995828466546e-05, |
|
"loss": 2.8472, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.121641915568163e-05, |
|
"loss": 2.8495, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.1007842482896713e-05, |
|
"loss": 2.8375, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.0799265810111798e-05, |
|
"loss": 2.8456, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 2.0590689137326883e-05, |
|
"loss": 2.8388, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 2.0382112464541968e-05, |
|
"loss": 2.8346, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 2.017353579175705e-05, |
|
"loss": 2.8259, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.9964959118972134e-05, |
|
"loss": 2.8211, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.975638244618722e-05, |
|
"loss": 2.7936, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.9547805773402304e-05, |
|
"loss": 2.7904, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.9339229100617386e-05, |
|
"loss": 2.7982, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.913065242783247e-05, |
|
"loss": 2.7948, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.8922075755047556e-05, |
|
"loss": 2.8152, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.871349908226264e-05, |
|
"loss": 2.7862, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.8504922409477726e-05, |
|
"loss": 2.7915, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.829634573669281e-05, |
|
"loss": 2.7796, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.8087769063907896e-05, |
|
"loss": 2.7892, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.7879192391122977e-05, |
|
"loss": 2.7827, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.7670615718338062e-05, |
|
"loss": 2.7857, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.7462039045553147e-05, |
|
"loss": 2.7889, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.7253462372768232e-05, |
|
"loss": 2.748, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.7044885699983314e-05, |
|
"loss": 2.7607, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.68363090271984e-05, |
|
"loss": 2.7597, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.6627732354413484e-05, |
|
"loss": 2.7591, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 1.641915568162857e-05, |
|
"loss": 2.759, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.621057900884365e-05, |
|
"loss": 2.7519, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.6002002336058735e-05, |
|
"loss": 2.745, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.579342566327382e-05, |
|
"loss": 2.7569, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.5584848990488905e-05, |
|
"loss": 2.7549, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.5376272317703987e-05, |
|
"loss": 2.748, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5167695644919072e-05, |
|
"loss": 2.7653, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.4959118972134157e-05, |
|
"loss": 2.7258, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.475054229934924e-05, |
|
"loss": 2.729, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.4541965626564325e-05, |
|
"loss": 2.7198, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.4333388953779412e-05, |
|
"loss": 2.7184, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.4124812280994495e-05, |
|
"loss": 2.7079, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.391623560820958e-05, |
|
"loss": 2.7243, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.3707658935424663e-05, |
|
"loss": 2.6942, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.3499082262639748e-05, |
|
"loss": 2.7002, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.3290505589854831e-05, |
|
"loss": 2.7085, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.3081928917069916e-05, |
|
"loss": 2.6986, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.2873352244285e-05, |
|
"loss": 2.7054, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2664775571500085e-05, |
|
"loss": 2.7214, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.2456198898715168e-05, |
|
"loss": 2.7054, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.2247622225930253e-05, |
|
"loss": 2.7054, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.2039045553145336e-05, |
|
"loss": 2.7097, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.1830468880360421e-05, |
|
"loss": 2.6911, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.1621892207575505e-05, |
|
"loss": 2.6768, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.141331553479059e-05, |
|
"loss": 2.6735, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.1204738862005673e-05, |
|
"loss": 2.703, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 1.099616218922076e-05, |
|
"loss": 2.6704, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.0787585516435843e-05, |
|
"loss": 2.6833, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.0579008843650926e-05, |
|
"loss": 2.6809, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.0370432170866011e-05, |
|
"loss": 2.6766, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.0161855498081094e-05, |
|
"loss": 2.6719, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 9.95327882529618e-06, |
|
"loss": 2.6801, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 9.744702152511263e-06, |
|
"loss": 2.6555, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 9.536125479726348e-06, |
|
"loss": 2.656, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 9.32754880694143e-06, |
|
"loss": 2.6629, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 9.118972134156516e-06, |
|
"loss": 2.6591, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 8.9103954613716e-06, |
|
"loss": 2.6521, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 8.701818788586686e-06, |
|
"loss": 2.6594, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 8.493242115801769e-06, |
|
"loss": 2.65, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 8.284665443016854e-06, |
|
"loss": 2.6492, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 8.076088770231937e-06, |
|
"loss": 2.6443, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 7.867512097447022e-06, |
|
"loss": 2.6567, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 7.658935424662106e-06, |
|
"loss": 2.6213, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 7.45035875187719e-06, |
|
"loss": 2.6659, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 7.241782079092274e-06, |
|
"loss": 2.6456, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 7.033205406307358e-06, |
|
"loss": 2.6431, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 6.824628733522444e-06, |
|
"loss": 2.6418, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 6.616052060737528e-06, |
|
"loss": 2.6437, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 6.407475387952612e-06, |
|
"loss": 2.6398, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 6.198898715167696e-06, |
|
"loss": 2.6565, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 5.99032204238278e-06, |
|
"loss": 2.6403, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 5.7817453695978644e-06, |
|
"loss": 2.6243, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 5.5731686968129486e-06, |
|
"loss": 2.6253, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.3645920240280335e-06, |
|
"loss": 2.6358, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 5.156015351243118e-06, |
|
"loss": 2.6267, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 4.947438678458202e-06, |
|
"loss": 2.6483, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 4.738862005673286e-06, |
|
"loss": 2.6325, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.53028533288837e-06, |
|
"loss": 2.6104, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.321708660103454e-06, |
|
"loss": 2.6263, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 4.113131987318538e-06, |
|
"loss": 2.6118, |
|
"step": 110000 |
|
} |
|
], |
|
"max_steps": 119860, |
|
"num_train_epochs": 10, |
|
"total_flos": 4.514572271830057e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|