sharpenb's picture
Upload folder using huggingface_hub (#2)
1dd3eb0 verified
raw
history blame
1.69 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 10.639251708984375,
"base_token_generation_latency_sync": 40.07466354370117,
"base_token_generation_latency_async": 40.20857363939285,
"base_token_generation_throughput_sync": 0.024953422226727025,
"base_token_generation_throughput_async": 0.02487031768319897,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.12806396484375,
"base_inference_latency_async": 40.34836292266846,
"base_inference_throughput_sync": 0.008394327639666108,
"base_inference_throughput_async": 0.024784153000621038,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 11.474609375,
"smashed_token_generation_latency_sync": 47.03260345458985,
"smashed_token_generation_latency_async": 46.74163851886988,
"smashed_token_generation_throughput_sync": 0.02126184660318674,
"smashed_token_generation_throughput_async": 0.021394200795855585,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 162.76981506347656,
"smashed_inference_latency_async": 70.5676794052124,
"smashed_inference_throughput_sync": 0.006143645242885006,
"smashed_inference_throughput_async": 0.014170793321087106,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}