sharpenb's picture
Upload folder using huggingface_hub (#2)
aa81348 verified
raw
history blame
1.68 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": Infinity,
"base_token_generation_latency_sync": 19.835946083068848,
"base_token_generation_latency_async": 20.04348710179329,
"base_token_generation_throughput_sync": 0.0504135268271151,
"base_token_generation_throughput_async": 0.049891518123636784,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 41.10581817626953,
"base_inference_latency_async": 16.791272163391113,
"base_inference_throughput_sync": 0.024327456412905117,
"base_inference_throughput_async": 0.05955474905470433,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": Infinity,
"smashed_token_generation_latency_sync": 23.63898696899414,
"smashed_token_generation_latency_async": 23.484380170702934,
"smashed_token_generation_throughput_sync": 0.042302997218605046,
"smashed_token_generation_throughput_async": 0.04258149428391186,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 52.26373176574707,
"smashed_inference_latency_async": 23.98686408996582,
"smashed_inference_throughput_sync": 0.019133727466728395,
"smashed_inference_throughput_async": 0.04168948455493687,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}