sharpenb's picture
Upload folder using huggingface_hub (#2)
aa15c21 verified
raw
history blame
756 Bytes
{
"current_gpu_type": "NVIDIA A100-PCIE-40GB",
"current_gpu_total_memory": 40339.3125,
"perplexity": 5556.52587890625,
"token_generation_latency_sync": 164.92979888916017,
"token_generation_latency_async": 164.86735362559557,
"token_generation_throughput_sync": 0.0060631857113464535,
"token_generation_throughput_async": 0.006065482207417142,
"token_generation_CO2_emissions": null,
"token_generation_energy_consumption": null,
"inference_latency_sync": 252.58270874023438,
"inference_latency_async": 220.59009075164795,
"inference_throughput_sync": 0.003959099199575209,
"inference_throughput_async": 0.004533295201940205,
"inference_CO2_emissions": null,
"inference_energy_consumption": null
}