File size: 382 Bytes
677ac83
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
    "epoch": 4.0,
    "eval_loss": 1.9417628049850464,
    "eval_runtime": 91.2862,
    "eval_samples_per_second": 19.291,
    "eval_steps_per_second": 0.307,
    "perplexity": 6.971028709988671,
    "total_flos": 414362338590720.0,
    "train_loss": 1.7886004945443523,
    "train_runtime": 13163.6015,
    "train_samples_per_second": 4.815,
    "train_steps_per_second": 0.151
}