|
[ |
|
{ |
|
"loss": 0.07571334838867187, |
|
"learning_rate": 4.208860759493671e-05, |
|
"epoch": 1.5818181818181818, |
|
"total_flos": 41209510416075264, |
|
"step": 500 |
|
}, |
|
{ |
|
"loss": 0.018005035400390626, |
|
"learning_rate": 3.4177215189873416e-05, |
|
"epoch": 3.1644268774703557, |
|
"total_flos": 82425029676129792, |
|
"step": 1000 |
|
}, |
|
{ |
|
"loss": 0.00924688720703125, |
|
"learning_rate": 2.626582278481013e-05, |
|
"epoch": 4.746245059288538, |
|
"total_flos": 123634540092205056, |
|
"step": 1500 |
|
}, |
|
{ |
|
"loss": 0.004915031433105468, |
|
"learning_rate": 1.8354430379746836e-05, |
|
"epoch": 6.328853754940711, |
|
"total_flos": 164850059352259584, |
|
"step": 2000 |
|
}, |
|
{ |
|
"loss": 0.0025854034423828127, |
|
"learning_rate": 1.0443037974683544e-05, |
|
"epoch": 7.910671936758893, |
|
"total_flos": 206059569768334848, |
|
"step": 2500 |
|
}, |
|
{ |
|
"loss": 0.0013542556762695312, |
|
"learning_rate": 2.531645569620253e-06, |
|
"epoch": 9.493280632411068, |
|
"total_flos": 247275089028389376, |
|
"step": 3000 |
|
} |
|
] |