sharpenb's picture
Upload folder using huggingface_hub (#2)
80d00c7 verified
raw
history blame
No virus
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 11.575923919677734,
"base_token_generation_latency_sync": 38.96055145263672,
"base_token_generation_latency_async": 38.49546890705824,
"base_token_generation_throughput_sync": 0.025666987830387174,
"base_token_generation_throughput_async": 0.025977083235805123,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.22933731079101,
"base_inference_latency_async": 39.063167572021484,
"base_inference_throughput_sync": 0.008387197501512018,
"base_inference_throughput_async": 0.025599562507476678,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 174225.5,
"smashed_token_generation_latency_sync": 179.29608459472655,
"smashed_token_generation_latency_async": 174.7506869956851,
"smashed_token_generation_throughput_sync": 0.005577366634973421,
"smashed_token_generation_throughput_async": 0.005722438161428755,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 267.67871856689453,
"smashed_inference_latency_async": 230.59861660003662,
"smashed_inference_throughput_sync": 0.0037358218290711592,
"smashed_inference_throughput_async": 0.004336539458666645,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}