sharpenb's picture
Upload folder using huggingface_hub (#1)
1266d3a verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 9.401168823242188,
"base_token_generation_latency_sync": 38.77763404846191,
"base_token_generation_latency_async": 38.67101036012173,
"base_token_generation_throughput_sync": 0.02578806119915055,
"base_token_generation_throughput_async": 0.025859164027201596,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 121.23289642333984,
"base_inference_latency_async": 44.76165771484375,
"base_inference_throughput_sync": 0.008248586229500324,
"base_inference_throughput_async": 0.022340548832452702,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 9.998388290405273,
"smashed_token_generation_latency_sync": 62.574696731567386,
"smashed_token_generation_latency_async": 62.1792558580637,
"smashed_token_generation_throughput_sync": 0.01598090046348598,
"smashed_token_generation_throughput_async": 0.016082534057382344,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 195.8287338256836,
"smashed_inference_latency_async": 110.43708324432373,
"smashed_inference_throughput_sync": 0.005106502914378986,
"smashed_inference_throughput_async": 0.0090549294731704,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}