sharpenb's picture
Upload folder using huggingface_hub (#1)
ce078c3 verified
raw
history blame
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 76.79094085693359,
"base_token_generation_latency_async": 76.63809824734926,
"base_token_generation_throughput_sync": 0.013022369420672466,
"base_token_generation_throughput_async": 0.013048340484291541,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 77.35275573730469,
"base_inference_latency_async": 77.56454944610596,
"base_inference_throughput_sync": 0.012927787645938164,
"base_inference_throughput_async": 0.01289248770399715,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 163.55000762939454,
"smashed_token_generation_latency_async": 164.2586711794138,
"smashed_token_generation_throughput_sync": 0.006114337837672298,
"smashed_token_generation_throughput_async": 0.0060879586619067205,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 170.26846618652343,
"smashed_inference_latency_async": 133.2902193069458,
"smashed_inference_throughput_sync": 0.0058730781007008855,
"smashed_inference_throughput_async": 0.007502425948427332,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}