roberta-base-education-domain / all_results.json
Preeyank's picture
These are the model files
1e157b2
raw
history blame contribute delete
784 Bytes
{
"epoch": 3.0,
"eval_loss": 1.8736294507980347,
"eval_mem_cpu_alloc_delta": 73728,
"eval_mem_cpu_peaked_delta": 69632,
"eval_mem_gpu_alloc_delta": 0,
"eval_mem_gpu_peaked_delta": 1368289280,
"eval_runtime": 1.981,
"eval_samples": 500,
"eval_samples_per_second": 252.396,
"init_mem_cpu_alloc_delta": 2391441408,
"init_mem_cpu_peaked_delta": 380411904,
"init_mem_gpu_alloc_delta": 500087808,
"init_mem_gpu_peaked_delta": 0,
"perplexity": 6.511888138461621,
"train_mem_cpu_alloc_delta": 2262335488,
"train_mem_cpu_peaked_delta": 53248,
"train_mem_gpu_alloc_delta": 1501058560,
"train_mem_gpu_peaked_delta": 1229582848,
"train_runtime": 25.2846,
"train_samples": 500,
"train_samples_per_second": 1.898
}