sharpenb's picture
Upload folder using huggingface_hub (#1)
b375ceb verified
raw
history blame contribute delete
No virus
1.62 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 47.838710403442384,
"base_token_generation_latency_async": 47.88779690861702,
"base_token_generation_throughput_sync": 0.02090357351957468,
"base_token_generation_throughput_async": 0.020882146696125377,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 46.498303985595705,
"base_inference_latency_async": 45.331716537475586,
"base_inference_throughput_sync": 0.021506160747492665,
"base_inference_throughput_async": 0.022059610276908514,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 188.36228790283204,
"smashed_token_generation_latency_async": 188.33214957267046,
"smashed_token_generation_throughput_sync": 0.005308918314455051,
"smashed_token_generation_throughput_async": 0.005309767887580643,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 194.22156677246093,
"smashed_inference_latency_async": 166.04604721069336,
"smashed_inference_throughput_sync": 0.005148758794493424,
"smashed_inference_throughput_async": 0.00602242580777075,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}