sharpenb's picture
Upload folder using huggingface_hub (#1)
7b5638f verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.405904769897461,
"base_token_generation_latency_sync": 38.60344696044922,
"base_token_generation_latency_async": 38.134160824120045,
"base_token_generation_throughput_sync": 0.025904422499486643,
"base_token_generation_throughput_async": 0.026223207181931614,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 120.48721923828126,
"base_inference_latency_async": 39.444828033447266,
"base_inference_throughput_sync": 0.008299635482684288,
"base_inference_throughput_async": 0.025351866134440983,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.43250846862793,
"smashed_token_generation_latency_sync": 62.9321174621582,
"smashed_token_generation_latency_async": 62.363552674651146,
"smashed_token_generation_throughput_sync": 0.015890137505722914,
"smashed_token_generation_throughput_async": 0.016035006940944674,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 198.10078887939454,
"smashed_inference_latency_async": 111.86470985412598,
"smashed_inference_throughput_sync": 0.005047935475960212,
"smashed_inference_throughput_async": 0.008939369719941363,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}