hf-transformers-bot's picture
Upload folder using huggingface_hub
aa2396d verified
raw
history blame
1.84 kB
{
"google/gemma-2b": {
"backend.cache_implementation=null,backend.torch_compile=False": {
"c212ac9a0262d47b19675d5d7a9c729ebbdc3ef4": {
"metrics": {
"prefill.latency.mean": 0.019582624435424807,
"prefill.throughput.value": 357.4597482111263,
"decode.latency.mean": 2.34328271484375,
"decode.throughput.value": 54.19747228770403,
"per_token.latency.mean": 0.01852147654205443,
"per_token.throughput.value": 53.991375780944
}
}
},
"backend.cache_implementation=static,backend.torch_compile=False": {
"c212ac9a0262d47b19675d5d7a9c729ebbdc3ef4": {
"metrics": {
"prefill.latency.mean": 0.02187764835357666,
"prefill.throughput.value": 319.96126306032374,
"decode.latency.mean": 2.530910888671875,
"decode.throughput.value": 50.17956205745542,
"per_token.latency.mean": 0.020008462197224613,
"per_token.throughput.value": 49.978853454250505
}
}
},
"backend.cache_implementation=static,backend.torch_compile=True": {
"c212ac9a0262d47b19675d5d7a9c729ebbdc3ef4": {
"metrics": {
"prefill.latency.mean": 0.01412662410736084,
"prefill.throughput.value": 495.51824602967736,
"decode.latency.mean": 1.5754417114257813,
"decode.throughput.value": 80.61231277484997,
"per_token.latency.mean": 0.012458251127612448,
"per_token.throughput.value": 80.26808817359618
}
}
}
}
}