sharpenb's picture
Upload folder using huggingface_hub (#2)
f5ab715 verified
raw
history blame contribute delete
No virus
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.405904769897461,
"base_token_generation_latency_sync": 37.993507385253906,
"base_token_generation_latency_async": 39.05783258378506,
"base_token_generation_throughput_sync": 0.026320286512640352,
"base_token_generation_throughput_async": 0.025603059203422162,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 118.9781494140625,
"base_inference_latency_async": 38.94762992858887,
"base_inference_throughput_sync": 0.008404904639421179,
"base_inference_throughput_async": 0.025675503280520966,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 37720.5078125,
"smashed_token_generation_latency_sync": 170.02661895751953,
"smashed_token_generation_latency_async": 170.09772770106792,
"smashed_token_generation_throughput_sync": 0.005881432014182709,
"smashed_token_generation_throughput_async": 0.0058789733026734705,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 265.8291702270508,
"smashed_inference_latency_async": 212.9425287246704,
"smashed_inference_throughput_sync": 0.0037618143981184497,
"smashed_inference_throughput_async": 0.004696102774720855,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}