sharpenb's picture
Upload folder using huggingface_hub (#1)
0ffe774 verified
raw
history blame contribute delete
No virus
1.69 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.891793251037598,
"base_token_generation_latency_sync": 33.19310340881348,
"base_token_generation_latency_async": 33.19741077721119,
"base_token_generation_throughput_sync": 0.030126740114769703,
"base_token_generation_throughput_async": 0.03012283116629275,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 122.0282356262207,
"base_inference_latency_async": 39.394283294677734,
"base_inference_throughput_sync": 0.008194824704858111,
"base_inference_throughput_async": 0.025384393784239817,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 97609.7265625,
"smashed_token_generation_latency_sync": 169.4194564819336,
"smashed_token_generation_latency_async": 169.6473952382803,
"smashed_token_generation_throughput_sync": 0.0059025097870423,
"smashed_token_generation_throughput_async": 0.005894579156935701,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 268.8793609619141,
"smashed_inference_latency_async": 214.86566066741943,
"smashed_inference_throughput_sync": 0.0037191400501046523,
"smashed_inference_throughput_async": 0.004654070812868761,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}