File size: 292 Bytes
2cf6678
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
{
    "epoch": 0.9998717955292536,
    "num_input_tokens_seen": 798621696,
    "total_flos": 4.855554488590664e+17,
    "train_loss": 5.304058988399935,
    "train_runtime": 127910.3478,
    "train_samples": 390002,
    "train_samples_per_second": 3.049,
    "train_steps_per_second": 0.048
}