sharpenb's picture
Upload folder using huggingface_hub (#1)
2ac2021 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 21.553375244140625,
"base_token_generation_latency_sync": 24.06592712402344,
"base_token_generation_latency_async": 23.613459430634975,
"base_token_generation_throughput_sync": 0.04155252340150924,
"base_token_generation_throughput_async": 0.04234872924645034,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 40.53800964355469,
"base_inference_latency_async": 22.180819511413574,
"base_inference_throughput_sync": 0.024668206673017907,
"base_inference_throughput_async": 0.045083996986018955,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 23.580896377563477,
"smashed_token_generation_latency_sync": 36.70088005065918,
"smashed_token_generation_latency_async": 36.72023508697748,
"smashed_token_generation_throughput_sync": 0.02724730302433277,
"smashed_token_generation_throughput_async": 0.027232941119013736,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 66.75742721557617,
"smashed_inference_latency_async": 44.14482116699219,
"smashed_inference_throughput_sync": 0.014979606640183328,
"smashed_inference_throughput_async": 0.022652713807972485,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}