benchmark_results / 2024-06-22 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
1811f08 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "74a207404e8d4524d1fdc4aa23789694f9eef347",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.019968095779418944,
"prefill.throughput.value": 350.5592159275838,
"decode.latency.mean": 2.421538330078125,
"decode.throughput.value": 52.4460003058893,
"per_token.latency.mean": 0.019144975224973656,
"per_token.throughput.value": 52.23302659047322
}
},
{
"model": "google/gemma-2b",
"commit": "74a207404e8d4524d1fdc4aa23789694f9eef347",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022197407722473146,
"prefill.throughput.value": 315.35213875055535,
"decode.latency.mean": 2.6403282470703124,
"decode.throughput.value": 48.10008003395722,
"per_token.latency.mean": 0.020876559193426444,
"per_token.throughput.value": 47.90061382887642
}
},
{
"model": "google/gemma-2b",
"commit": "74a207404e8d4524d1fdc4aa23789694f9eef347",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014264544010162354,
"prefill.throughput.value": 490.72721813000516,
"decode.latency.mean": 1.5749666748046875,
"decode.throughput.value": 80.63662681354788,
"per_token.latency.mean": 0.012454191584832113,
"per_token.throughput.value": 80.29425219521227
}
}
]