sharpenb's picture
Upload folder using huggingface_hub (#2)
41d8b67 verified
raw
history blame contribute delete
No virus
1.69 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 29282.24609375,
"base_token_generation_latency_sync": 31.626212120056152,
"base_token_generation_latency_async": 31.4815953373909,
"base_token_generation_throughput_sync": 0.03161934145650777,
"base_token_generation_throughput_async": 0.031764591002549775,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 129.1005958557129,
"base_inference_latency_async": 31.828856468200684,
"base_inference_throughput_sync": 0.007745897634102581,
"base_inference_throughput_async": 0.03141803102474234,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 28594.048828125,
"smashed_token_generation_latency_sync": 39.41377983093262,
"smashed_token_generation_latency_async": 39.471565932035446,
"smashed_token_generation_throughput_sync": 0.02537183706534999,
"smashed_token_generation_throughput_async": 0.025334692870352827,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 183.90487060546874,
"smashed_inference_latency_async": 65.73736667633057,
"smashed_inference_throughput_sync": 0.00543759388594607,
"smashed_inference_throughput_async": 0.015212048345709907,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}