sharpenb's picture
Upload folder using huggingface_hub (#1)
4d35731 verified
raw
history blame
1.5 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 101.87378540039063,
"base_token_generation_latency_async": 101.67951695621014,
"base_token_generation_throughput_sync": 0.009816067951826256,
"base_token_generation_throughput_async": 0.009834822488689296,
"base_token_generation_CO2_emissions": 2.6819935676011093e-05,
"base_token_generation_energy_consumption": 0.008565413871084828,
"base_inference_latency_sync": 90.84702758789062,
"base_inference_latency_async": 88.71569633483887,
"base_inference_throughput_sync": 0.011007514792187808,
"base_inference_throughput_async": 0.01127196247466411,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 63.452283477783205,
"smashed_token_generation_latency_async": 64.33862000703812,
"smashed_token_generation_throughput_sync": 0.015759874116274063,
"smashed_token_generation_throughput_async": 0.01554276420430852,
"smashed_token_generation_CO2_emissions": 1.9680748845555618e-05,
"smashed_token_generation_energy_consumption": 0.0053972414185424426,
"smashed_inference_latency_sync": 76.47395782470703,
"smashed_inference_latency_async": 73.17061424255371,
"smashed_inference_throughput_sync": 0.013076346882584418,
"smashed_inference_throughput_async": 0.013666688606509356
}