sharpenb's picture
Upload folder using huggingface_hub (#1)
3beab12 verified
raw
history blame contribute delete
No virus
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 38.753916549682614,
"base_token_generation_latency_async": 38.30525279045105,
"base_token_generation_throughput_sync": 0.02580384356037918,
"base_token_generation_throughput_async": 0.026106080162699923,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 32.50810871124268,
"base_inference_latency_async": 32.19771385192871,
"base_inference_throughput_sync": 0.030761555797743406,
"base_inference_throughput_async": 0.03105810569653528,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 65.75935745239258,
"smashed_token_generation_latency_async": 65.77239874750376,
"smashed_token_generation_throughput_sync": 0.015206961240823623,
"smashed_token_generation_throughput_async": 0.015203946017522323,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 70.72235565185547,
"smashed_inference_latency_async": 67.43879318237305,
"smashed_inference_throughput_sync": 0.014139800502724969,
"smashed_inference_throughput_async": 0.014828260602109605,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}