hf-transformers-bot's picture
Upload folder using huggingface_hub
464c60c verified
raw
history blame
1.72 kB
[
{
"model": "google/gemma-2b",
"commit": "2505357e4fdea82d1586b8c0862b4eeb46fac457",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02032707118988037,
"prefill.throughput.value": 344.36835167305753,
"decode.latency.mean": 2.403255249023437,
"decode.throughput.value": 52.84499016557083,
"per_token.latency.mean": 0.0189985674831707,
"per_token.throughput.value": 52.63554743723807
}
},
{
"model": "google/gemma-2b",
"commit": "2505357e4fdea82d1586b8c0862b4eeb46fac457",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02256251239776611,
"prefill.throughput.value": 310.24913700182884,
"decode.latency.mean": 2.5812564697265623,
"decode.throughput.value": 49.20084520444935,
"per_token.latency.mean": 0.020413658503958362,
"per_token.throughput.value": 48.98680948376267
}
},
{
"model": "google/gemma-2b",
"commit": "2505357e4fdea82d1586b8c0862b4eeb46fac457",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.01407475233078003,
"prefill.throughput.value": 497.3444530666251,
"decode.latency.mean": 1.559105651855469,
"decode.throughput.value": 81.4569556905006,
"per_token.latency.mean": 0.012329652096443025,
"per_token.throughput.value": 81.10528927969423
}
}
]