IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-generation_openai-community/gpt2/benchmark.json with huggingface_hub
142c15e
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-generation_openai-community/gpt2", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-generation", | |
"library": "transformers", | |
"model": "openai-community/gpt2", | |
"processor": "openai-community/gpt2", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "156844ab796ad7cf3da92a0bf30b174d1bcc0aa5", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"prefill": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1048.813568, | |
"max_global_vram": 1107.550208, | |
"max_process_vram": 278173.339648, | |
"max_reserved": 725.614592, | |
"max_allocated": 513.035776 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 62, | |
"total": 0.5022576332092284, | |
"mean": 0.008100929567890781, | |
"stdev": 0.00044579104776226177, | |
"p50": 0.007892645359039305, | |
"p90": 0.008507093906402587, | |
"p95": 0.008735318183898924, | |
"p99": 0.009591236648559572, | |
"values": [ | |
0.010715686798095703, | |
0.008736326217651367, | |
0.008716165542602539, | |
0.00850032615661621, | |
0.008378245353698731, | |
0.00832192611694336, | |
0.007788644790649414, | |
0.00780224609375, | |
0.007812806129455566, | |
0.007835206031799316, | |
0.007862085819244385, | |
0.00784976577758789, | |
0.007947045803070069, | |
0.008007366180419922, | |
0.00788800621032715, | |
0.008049446105957031, | |
0.007855044841766358, | |
0.007851204872131347, | |
0.007851844787597656, | |
0.007856325149536133, | |
0.007856645107269288, | |
0.007849605083465576, | |
0.007854724884033203, | |
0.007891524791717529, | |
0.008295045852661134, | |
0.008297765731811523, | |
0.008437286376953125, | |
0.00817232608795166, | |
0.0081593656539917, | |
0.008295366287231445, | |
0.008360486030578613, | |
0.008580645561218262, | |
0.008132165908813476, | |
0.007865285873413086, | |
0.007893765926361084, | |
0.00789584493637085, | |
0.007867364883422852, | |
0.007895685195922852, | |
0.008872325897216798, | |
0.008507845878601074, | |
0.008867046356201173, | |
0.008300326347351073, | |
0.008310405731201172, | |
0.008205286026000977, | |
0.008234246253967285, | |
0.008309605598449708, | |
0.0077521648406982425, | |
0.00783632516860962, | |
0.007816326141357422, | |
0.007836966037750245, | |
0.00784096622467041, | |
0.00783824586868286, | |
0.007819046020507812, | |
0.00785904598236084, | |
0.007831686019897461, | |
0.007854405879974366, | |
0.007925605773925782, | |
0.007944806098937988, | |
0.007819045066833495, | |
0.007834404945373534, | |
0.0078286452293396, | |
0.007786244869232178 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 246.88524733350263 | |
}, | |
"energy": null, | |
"efficiency": null | |
}, | |
"decode": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1048.813568, | |
"max_global_vram": 1107.542016, | |
"max_process_vram": 257819.164672, | |
"max_reserved": 725.614592, | |
"max_allocated": 513.25952 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 62, | |
"total": 0.5003808221817018, | |
"mean": 0.00807065842228551, | |
"stdev": 0.0004766203563662722, | |
"p50": 0.00792624545097351, | |
"p90": 0.008505366325378419, | |
"p95": 0.00858242998123169, | |
"p99": 0.009785139961242677, | |
"values": [ | |
0.008944486618041992, | |
0.008636006355285645, | |
0.008507526397705079, | |
0.008251206398010254, | |
0.011100008010864257, | |
0.007769925117492676, | |
0.007638885021209717, | |
0.007775364875793457, | |
0.007941285133361816, | |
0.0077688050270080565, | |
0.007890725135803222, | |
0.007924005031585694, | |
0.007836804866790772, | |
0.007956805229187012, | |
0.007865445137023926, | |
0.00782896614074707, | |
0.007863205909729004, | |
0.007840806007385254, | |
0.007914885997772217, | |
0.007868165969848634, | |
0.007954405784606934, | |
0.007836485862731933, | |
0.007928485870361329, | |
0.008053285598754883, | |
0.008242404937744141, | |
0.008570726394653321, | |
0.008159686088562011, | |
0.00814144515991211, | |
0.008252326011657716, | |
0.00817856502532959, | |
0.008329925537109375, | |
0.00826944637298584, | |
0.008079364776611328, | |
0.007883524894714356, | |
0.008006244659423829, | |
0.007988805770874024, | |
0.0077833662033081055, | |
0.008485925674438477, | |
0.008455845832824707, | |
0.008583045959472657, | |
0.008520646095275879, | |
0.008135685920715332, | |
0.008036165237426758, | |
0.008311845779418946, | |
0.00818736457824707, | |
0.007823686122894287, | |
0.007763686180114746, | |
0.007671844959259033, | |
0.007776004791259766, | |
0.007764804840087891, | |
0.007867044925689697, | |
0.00782976484298706, | |
0.007875205039978028, | |
0.00793296480178833, | |
0.007740485191345215, | |
0.00781616497039795, | |
0.0079499249458313, | |
0.007712325096130371, | |
0.007911045074462891, | |
0.007880805015563964, | |
0.00789888620376587, | |
0.007667844772338867 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 123.90562797685749 | |
}, | |
"energy": null, | |
"efficiency": null | |
}, | |
"per_token": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 61, | |
"total": 0.9864467725753784, | |
"mean": 0.016171258566809482, | |
"stdev": 0.0006720809997592394, | |
"p50": 0.015867530822753908, | |
"p90": 0.016913772583007813, | |
"p95": 0.017278411865234374, | |
"p99": 0.018290701675415036, | |
"values": [ | |
0.017258411407470704, | |
0.017278411865234374, | |
0.01679505157470703, | |
0.019544654846191407, | |
0.016308971405029297, | |
0.015618730545043945, | |
0.015557130813598634, | |
0.01566689109802246, | |
0.015670571327209473, | |
0.015703371047973633, | |
0.015796010971069335, | |
0.015892810821533203, | |
0.016008811950683592, | |
0.01576913070678711, | |
0.015951531410217285, | |
0.015787851333618163, | |
0.015743210792541505, | |
0.015746891021728517, | |
0.01575697135925293, | |
0.01579249095916748, | |
0.015768811225891112, | |
0.015802250862121583, | |
0.015914570808410643, | |
0.016601451873779295, | |
0.01685793113708496, | |
0.016677291870117187, | |
0.016417612075805665, | |
0.01642673110961914, | |
0.01660017204284668, | |
0.016769611358642578, | |
0.016913772583007813, | |
0.0163500919342041, | |
0.015878411293029786, | |
0.01592673110961914, | |
0.015867530822753908, | |
0.015855051040649413, | |
0.016306890487670898, | |
0.017454732894897462, | |
0.017107532501220702, | |
0.017421932220458985, | |
0.016490251541137695, | |
0.016478412628173827, | |
0.016417451858520508, | |
0.016557132720947266, | |
0.016228651046752928, | |
0.01555729103088379, | |
0.015666730880737306, | |
0.015601290702819823, | |
0.015687850952148436, | |
0.015747851371765136, | |
0.015864331245422362, | |
0.01565441131591797, | |
0.015792811393737793, | |
0.015764810562133788, | |
0.015712170600891112, | |
0.01604433059692383, | |
0.015621770858764648, | |
0.015684170722961424, | |
0.01578145122528076, | |
0.015759369850158692, | |
0.015767210960388182 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 61.83810591294599 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |