sharpenb's picture
Upload folder using huggingface_hub (#1)
6d8332c verified
raw
history blame
No virus
1.69 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 10.639251708984375,
"base_token_generation_latency_sync": 39.957108688354495,
"base_token_generation_latency_async": 39.34535309672356,
"base_token_generation_throughput_sync": 0.025026835845393645,
"base_token_generation_throughput_async": 0.025415962020767173,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.04286651611328,
"base_inference_latency_async": 40.1911735534668,
"base_inference_throughput_sync": 0.008400335352010723,
"base_inference_throughput_async": 0.024881084864807147,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 11.474609375,
"smashed_token_generation_latency_sync": 47.31715698242188,
"smashed_token_generation_latency_async": 46.08920980244875,
"smashed_token_generation_throughput_sync": 0.021133983184397485,
"smashed_token_generation_throughput_async": 0.02169705239656483,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 162.52672119140624,
"smashed_inference_latency_async": 70.94693183898926,
"smashed_inference_throughput_sync": 0.006152834393442965,
"smashed_inference_throughput_async": 0.014095042224933041,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}