sharpenb's picture
Upload folder using huggingface_hub (#1)
f3fef52 verified
raw
history blame
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 53.17919807434082,
"base_token_generation_latency_async": 53.305395878851414,
"base_token_generation_throughput_sync": 0.018804345236685772,
"base_token_generation_throughput_async": 0.0187598269089442,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 52.69626922607422,
"base_inference_latency_async": 51.61581039428711,
"base_inference_throughput_sync": 0.01897667547791406,
"base_inference_throughput_async": 0.01937390873767393,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 163.73534088134767,
"smashed_token_generation_latency_async": 164.74530082195997,
"smashed_token_generation_throughput_sync": 0.006107416973130189,
"smashed_token_generation_throughput_async": 0.006069975865840924,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 171.19641571044923,
"smashed_inference_latency_async": 139.2533779144287,
"smashed_inference_throughput_sync": 0.005841243789188535,
"smashed_inference_throughput_async": 0.007181154345961364,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}