benchmark_results / 2024-06-11 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
d2e612d verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "dcdda5324bcc7a750b5e40e11dd795442204ff27",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020058400154113767,
"prefill.throughput.value": 348.98097287008073,
"decode.latency.mean": 2.4373055419921874,
"decode.throughput.value": 52.10672105401839,
"per_token.latency.mean": 0.019270506199169538,
"per_token.throughput.value": 51.89277280339916
}
},
{
"model": "google/gemma-2b",
"commit": "dcdda5324bcc7a750b5e40e11dd795442204ff27",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021743040084838867,
"prefill.throughput.value": 321.9421006762071,
"decode.latency.mean": 2.5406910400390625,
"decode.throughput.value": 49.98640054953215,
"per_token.latency.mean": 0.02008856086580179,
"per_token.throughput.value": 49.779573891844706
}
},
{
"model": "google/gemma-2b",
"commit": "dcdda5324bcc7a750b5e40e11dd795442204ff27",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014116623878479003,
"prefill.throughput.value": 495.86927159486066,
"decode.latency.mean": 1.5734918212890625,
"decode.throughput.value": 80.71220852991591,
"per_token.latency.mean": 0.012442798056621324,
"per_token.throughput.value": 80.36777543519312
}
}
]