benchmark_results / 2024-06-23 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
28e51e4 verified
raw
history blame
1.72 kB
[
{
"model": "google/gemma-2b",
"commit": "74a207404e8d4524d1fdc4aa23789694f9eef347",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020358543395996093,
"prefill.throughput.value": 343.8359937566401,
"decode.latency.mean": 2.4922604980468748,
"decode.throughput.value": 50.95775505791906,
"per_token.latency.mean": 0.019697526441732414,
"per_token.throughput.value": 50.76779579192935
}
},
{
"model": "google/gemma-2b",
"commit": "74a207404e8d4524d1fdc4aa23789694f9eef347",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.0221495361328125,
"prefill.throughput.value": 316.03370644092826,
"decode.latency.mean": 2.602114013671875,
"decode.throughput.value": 48.80647017491318,
"per_token.latency.mean": 0.020574183678909723,
"per_token.throughput.value": 48.6046015534062
}
},
{
"model": "google/gemma-2b",
"commit": "74a207404e8d4524d1fdc4aa23789694f9eef347",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.01409222412109375,
"prefill.throughput.value": 496.7278365607418,
"decode.latency.mean": 1.56042431640625,
"decode.throughput.value": 81.38811902937309,
"per_token.latency.mean": 0.012339556185153163,
"per_token.throughput.value": 81.04019180229436
}
}
]