benchmark_results / 2024-06-06 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
df108f2 verified
raw
history blame
1.72 kB
[
{
"model": "google/gemma-2b",
"commit": "940fde8dafaecb8f17b588c5078291f1c1a420c8",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02033520030975342,
"prefill.throughput.value": 344.2306883322204,
"decode.latency.mean": 2.4782998046875,
"decode.throughput.value": 51.244808945144555,
"per_token.latency.mean": 0.01958842790173919,
"per_token.throughput.value": 51.05054908011344
}
},
{
"model": "google/gemma-2b",
"commit": "940fde8dafaecb8f17b588c5078291f1c1a420c8",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.025318976402282714,
"prefill.throughput.value": 276.4724722192518,
"decode.latency.mean": 2.537893798828125,
"decode.throughput.value": 50.04149506123636,
"per_token.latency.mean": 0.020059904995643103,
"per_token.throughput.value": 49.85068474737015
}
},
{
"model": "google/gemma-2b",
"commit": "940fde8dafaecb8f17b588c5078291f1c1a420c8",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.01400380802154541,
"prefill.throughput.value": 499.86403621288036,
"decode.latency.mean": 1.5594651489257814,
"decode.throughput.value": 81.4381777543938,
"per_token.latency.mean": 0.012331704181173573,
"per_token.throughput.value": 81.09179277318934
}
}
]