sharpenb's picture
Upload folder using huggingface_hub (#1)
9f74062 verified
raw
history blame contribute delete
No virus
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 32.622706985473634,
"base_token_generation_latency_async": 33.04024673998356,
"base_token_generation_throughput_sync": 0.03065349544552768,
"base_token_generation_throughput_async": 0.030266117800804827,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 32.64716815948486,
"base_inference_latency_async": 32.19940662384033,
"base_inference_throughput_sync": 0.03063052804809576,
"base_inference_throughput_async": 0.03105647292455394,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 65.22703285217285,
"smashed_token_generation_latency_async": 64.26504217088223,
"smashed_token_generation_throughput_sync": 0.015331066833384064,
"smashed_token_generation_throughput_async": 0.015560559305959483,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 64.54333419799805,
"smashed_inference_latency_async": 62.95466423034668,
"smashed_inference_throughput_sync": 0.015493466713887507,
"smashed_inference_throughput_async": 0.015884446565246867,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}