AmpGPT2 / all_results.json
wabu's picture
Upload 14 files
29d0ef7 verified
raw
history blame contribute delete
438 Bytes
{
"epoch": 50.0,
"eval_accuracy": 0.42128729553879257,
"eval_loss": 3.988962173461914,
"eval_runtime": 19.7085,
"eval_samples": 1169,
"eval_samples_per_second": 59.315,
"eval_steps_per_second": 1.877,
"perplexity": 53.99881886539013,
"train_loss": 4.29683648186761,
"train_runtime": 9035.6616,
"train_samples": 4718,
"train_samples_per_second": 26.108,
"train_steps_per_second": 0.819
}