sharpenb's picture
Upload folder using huggingface_hub (#2)
b29267f verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 6.637787342071533,
"base_token_generation_latency_sync": 39.560679244995114,
"base_token_generation_latency_async": 39.7673673927784,
"base_token_generation_throughput_sync": 0.025277624628412102,
"base_token_generation_throughput_async": 0.025146245918747846,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 118.31193618774414,
"base_inference_latency_async": 38.94233703613281,
"base_inference_throughput_sync": 0.008452232566062843,
"base_inference_throughput_async": 0.02567899299603272,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 152463.125,
"smashed_token_generation_latency_sync": 176.87936248779297,
"smashed_token_generation_latency_async": 177.394749969244,
"smashed_token_generation_throughput_sync": 0.0056535708063116365,
"smashed_token_generation_throughput_async": 0.005637145406915233,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 270.4816131591797,
"smashed_inference_latency_async": 232.70025253295898,
"smashed_inference_throughput_sync": 0.0036971089765406545,
"smashed_inference_throughput_async": 0.004297373935416606,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}