benchmark_results / 2024-09-12 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
4941993 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "cea9ec086a14da1320940c5e48d7bd5dbcf32734",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020966336250305175,
"prefill.throughput.value": 333.8685365163936,
"decode.latency.mean": 2.5033509521484376,
"decode.throughput.value": 50.731999798512255,
"per_token.latency.mean": 0.019710193911875326,
"per_token.throughput.value": 50.7351680288393
}
},
{
"model": "google/gemma-2b",
"commit": "cea9ec086a14da1320940c5e48d7bd5dbcf32734",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02228686428070068,
"prefill.throughput.value": 314.0863565118783,
"decode.latency.mean": 2.625386474609375,
"decode.throughput.value": 48.37383037820975,
"per_token.latency.mean": 0.02067133874217356,
"per_token.throughput.value": 48.37616046414087
}
},
{
"model": "google/gemma-2b",
"commit": "cea9ec086a14da1320940c5e48d7bd5dbcf32734",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014213151931762695,
"prefill.throughput.value": 492.501595255365,
"decode.latency.mean": 1.5640379028320313,
"decode.throughput.value": 81.20007818866719,
"per_token.latency.mean": 0.01231400713207215,
"per_token.throughput.value": 81.20833367031875
}
}
]