sharpenb's picture
Upload folder using huggingface_hub (#2)
784e467 verified
raw
history blame
750 Bytes
{
"current_gpu_type": "NVIDIA A100-PCIE-40GB",
"current_gpu_total_memory": 40339.3125,
"perplexity": 26.50621795654297,
"token_generation_latency_sync": 58.16511192321777,
"token_generation_latency_async": 58.18860549479723,
"token_generation_throughput_sync": 0.01719243661595758,
"token_generation_throughput_async": 0.01718549519268703,
"token_generation_CO2_emissions": null,
"token_generation_energy_consumption": null,
"inference_latency_sync": 181.9189239501953,
"inference_latency_async": 103.4461259841919,
"inference_throughput_sync": 0.005496954238107598,
"inference_throughput_async": 0.009666867564985613,
"inference_CO2_emissions": null,
"inference_energy_consumption": null
}