benchmark_results / 2024-06-27 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
bd4e0df verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "1de7dc7403b3b89ec421d43a8c9ee245211a61f6",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.01987712001800537,
"prefill.throughput.value": 352.16369341530174,
"decode.latency.mean": 2.3944873046875,
"decode.throughput.value": 53.03849377333597,
"per_token.latency.mean": 0.018929073069877775,
"per_token.throughput.value": 52.82878862099807
}
},
{
"model": "google/gemma-2b",
"commit": "1de7dc7403b3b89ec421d43a8c9ee245211a61f6",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022656448364257814,
"prefill.throughput.value": 308.96281215210263,
"decode.latency.mean": 2.652583251953125,
"decode.throughput.value": 47.877856390176845,
"per_token.latency.mean": 0.020973070302970798,
"per_token.throughput.value": 47.68019110002944
}
},
{
"model": "google/gemma-2b",
"commit": "1de7dc7403b3b89ec421d43a8c9ee245211a61f6",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014239791870117188,
"prefill.throughput.value": 491.5802185767756,
"decode.latency.mean": 1.572478271484375,
"decode.throughput.value": 80.76423204252966,
"per_token.latency.mean": 0.01243496223871887,
"per_token.throughput.value": 80.41841871351163
}
}
]