File size: 212 Bytes
dae1175
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
{
    "epoch": 100.0,
    "total_flos": 1.8822505078960128e+18,
    "train_loss": 0.18895462423563003,
    "train_runtime": 1214.4267,
    "train_samples_per_second": 20.009,
    "train_steps_per_second": 0.165
}