benchmark_results / 2024-08-26 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
7395013 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "0a7af19f4dc868bafc82f35eb7e8d13bac87a594",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020607343673706056,
"prefill.throughput.value": 339.6847313674713,
"decode.latency.mean": 2.4659722900390624,
"decode.throughput.value": 51.500984221517044,
"per_token.latency.mean": 0.01941582211171548,
"per_token.throughput.value": 51.50438617773498
}
},
{
"model": "google/gemma-2b",
"commit": "0a7af19f4dc868bafc82f35eb7e8d13bac87a594",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022281279563903807,
"prefill.throughput.value": 314.16508104589127,
"decode.latency.mean": 2.5893426513671876,
"decode.throughput.value": 49.047197339040196,
"per_token.latency.mean": 0.020387485233817514,
"per_token.throughput.value": 49.049698309100975
}
},
{
"model": "google/gemma-2b",
"commit": "0a7af19f4dc868bafc82f35eb7e8d13bac87a594",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014160320281982422,
"prefill.throughput.value": 494.33910113649006,
"decode.latency.mean": 1.5661528930664064,
"decode.throughput.value": 81.09042262875357,
"per_token.latency.mean": 0.012330753979720468,
"per_token.throughput.value": 81.09804166433216
}
}
]