sharpenb's picture
Upload folder using huggingface_hub (#1)
cb4667b verified
raw
history blame
1.69 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 82.86221313476562,
"base_token_generation_latency_sync": 23.3876594543457,
"base_token_generation_latency_async": 23.057953640818596,
"base_token_generation_throughput_sync": 0.042757591966484196,
"base_token_generation_throughput_async": 0.043368983023269636,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 41.338264846801756,
"base_inference_latency_async": 23.22986125946045,
"base_inference_throughput_sync": 0.024190662179604465,
"base_inference_throughput_async": 0.043048040142415665,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 82.88560485839844,
"smashed_token_generation_latency_sync": 38.0381778717041,
"smashed_token_generation_latency_async": 38.69975656270981,
"smashed_token_generation_throughput_sync": 0.026289377040425523,
"smashed_token_generation_throughput_async": 0.02583995582451743,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 66.39616012573242,
"smashed_inference_latency_async": 47.6445198059082,
"smashed_inference_throughput_sync": 0.01506111193940026,
"smashed_inference_throughput_async": 0.02098877277121794,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}