sharpenb's picture
Upload folder using huggingface_hub (#2)
3904165 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 13.74486255645752,
"base_token_generation_latency_sync": 32.37070541381836,
"base_token_generation_latency_async": 32.25280176848173,
"base_token_generation_throughput_sync": 0.030892128769400295,
"base_token_generation_throughput_async": 0.031005058325730504,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 123.61564178466797,
"base_inference_latency_async": 43.86191368103027,
"base_inference_throughput_sync": 0.008089591135577714,
"base_inference_throughput_async": 0.0227988228528316,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 170382.984375,
"smashed_token_generation_latency_sync": 175.3365936279297,
"smashed_token_generation_latency_async": 175.29774066060781,
"smashed_token_generation_throughput_sync": 0.005703316001005669,
"smashed_token_generation_throughput_async": 0.0057045800831859545,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 270.7505142211914,
"smashed_inference_latency_async": 230.96849918365479,
"smashed_inference_throughput_sync": 0.003693437121907157,
"smashed_inference_throughput_async": 0.0043295947435881684,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}