hf-transformers-bot's picture
Upload folder using huggingface_hub
61bb83b verified
raw
history blame
1.84 kB
{
"google/gemma-2b": {
"backend.cache_implementation=null,backend.torch_compile=False": {
"9ba9369a2557e53a01378199a9839ec6e82d8bc7": {
"metrics": {
"prefill.latency.mean": 0.019696576118469238,
"prefill.throughput.value": 355.3917167073614,
"decode.latency.mean": 2.428328125,
"decode.throughput.value": 52.29935719663091,
"per_token.latency.mean": 0.01919638968456404,
"per_token.throughput.value": 52.09312878265373
}
}
},
"backend.cache_implementation=static,backend.torch_compile=False": {
"9ba9369a2557e53a01378199a9839ec6e82d8bc7": {
"metrics": {
"prefill.latency.mean": 0.022163279533386232,
"prefill.throughput.value": 315.8377346391976,
"decode.latency.mean": 2.604711669921875,
"decode.throughput.value": 48.75779590752523,
"per_token.latency.mean": 0.020594631338307978,
"per_token.throughput.value": 48.556343814705954
}
}
},
"backend.cache_implementation=static,backend.torch_compile=True": {
"9ba9369a2557e53a01378199a9839ec6e82d8bc7": {
"metrics": {
"prefill.latency.mean": 0.014070287704467773,
"prefill.throughput.value": 497.50226484546386,
"decode.latency.mean": 1.5731578369140626,
"decode.throughput.value": 80.72934388397142,
"per_token.latency.mean": 0.012440422197575626,
"per_token.throughput.value": 80.38312399034808
}
}
}
}
}