File size: 360 Bytes
7ffcbd2
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
    "epoch": 2.996011396011396,
    "eval_loss": 0.6186444163322449,
    "eval_runtime": 442.2425,
    "eval_samples_per_second": 26.734,
    "eval_steps_per_second": 0.418,
    "total_flos": 2755219238682624.0,
    "train_loss": 0.604149692316215,
    "train_runtime": 70723.2043,
    "train_samples_per_second": 9.528,
    "train_steps_per_second": 0.019
}