sharpenb's picture
c5b024fce8aa102170c9981d7d1655629e0d8ecd67559bff18396eb99c312c03
f36cc0b verified
raw
history blame
1.68 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 82.86221313476562,
"base_token_generation_latency_sync": 22.66096477508545,
"base_token_generation_latency_async": 20.442129857838154,
"base_token_generation_throughput_sync": 0.04412874782363405,
"base_token_generation_throughput_async": 0.048918581720904616,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 41.06926002502441,
"base_inference_latency_async": 18.918275833129883,
"base_inference_throughput_sync": 0.024349111705218886,
"base_inference_throughput_async": 0.05285893962116725,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": Infinity,
"smashed_token_generation_latency_sync": 85.1107421875,
"smashed_token_generation_latency_async": 82.53523223102093,
"smashed_token_generation_throughput_sync": 0.011749398187563537,
"smashed_token_generation_throughput_async": 0.012116037878235342,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 88.11602020263672,
"smashed_inference_latency_async": 77.12936401367188,
"smashed_inference_throughput_sync": 0.011348674142344852,
"smashed_inference_throughput_async": 0.012965230723576833,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}