File size: 351 Bytes
daae4c6 |
1 2 3 4 5 6 7 8 9 10 11 12 |
{
"epoch": 3.0,
"eval_loss": 1.5226281881332397,
"eval_runtime": 266.3195,
"eval_samples_per_second": 3.755,
"eval_steps_per_second": 3.755,
"total_flos": 2.1746709092395008e+18,
"train_loss": 1.2714244133601815,
"train_runtime": 406836.4039,
"train_samples_per_second": 0.374,
"train_steps_per_second": 0.023
} |