benchmark_results / 2024-09-11 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
1f148af verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "f38590dade57c1f8cf8a67e9409dae8935f8c478",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02084187126159668,
"prefill.throughput.value": 335.86235670202177,
"decode.latency.mean": 2.465659790039062,
"decode.throughput.value": 51.507511503842956,
"per_token.latency.mean": 0.01941322982968308,
"per_token.throughput.value": 51.51126364717462
}
},
{
"model": "google/gemma-2b",
"commit": "f38590dade57c1f8cf8a67e9409dae8935f8c478",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02260856056213379,
"prefill.throughput.value": 309.6172346205902,
"decode.latency.mean": 2.7054483642578124,
"decode.throughput.value": 46.942311550950635,
"per_token.latency.mean": 0.02130169141574169,
"per_token.throughput.value": 46.94462897256188
}
},
{
"model": "google/gemma-2b",
"commit": "f38590dade57c1f8cf8a67e9409dae8935f8c478",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014381855964660645,
"prefill.throughput.value": 486.7243850307308,
"decode.latency.mean": 1.562775146484375,
"decode.throughput.value": 81.26568961996848,
"per_token.latency.mean": 0.01230390424803486,
"per_token.throughput.value": 81.27501481163728
}
}
]