sharpenb's picture
Upload folder using huggingface_hub (#1)
82f2b66 verified
raw
history blame
1.68 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": Infinity,
"base_token_generation_latency_sync": 20.81943130493164,
"base_token_generation_latency_async": 21.098139323294163,
"base_token_generation_throughput_sync": 0.048032051661426656,
"base_token_generation_throughput_async": 0.04739754462119387,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 39.95013084411621,
"base_inference_latency_async": 18.52705478668213,
"base_inference_throughput_sync": 0.02503120712925721,
"base_inference_throughput_async": 0.053975119710815214,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": Infinity,
"smashed_token_generation_latency_sync": 33.48402099609375,
"smashed_token_generation_latency_async": 33.967031352221966,
"smashed_token_generation_throughput_sync": 0.02986499142730379,
"smashed_token_generation_throughput_async": 0.029440311978708868,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 65.11626243591309,
"smashed_inference_latency_async": 40.59250354766846,
"smashed_inference_throughput_sync": 0.015357146780102622,
"smashed_inference_throughput_async": 0.024635090536499756,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}