cuda
/
cuda_inference_transformers_fill-mask_hf-internal-testing
/tiny-random-BertModel
/benchmark.json

IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_fill-mask_hf-internal-testing/tiny-random-BertModel/benchmark.json with huggingface_hub
05b8617
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_fill-mask_hf-internal-testing/tiny-random-BertModel", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.5.1+cu124", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "fill-mask", | |
"library": "transformers", | |
"model_type": "bert", | |
"model": "hf-internal-testing/tiny-random-BertModel", | |
"processor": "hf-internal-testing/tiny-random-BertModel", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 2, | |
"sequence_length": 16, | |
"num_choices": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": true, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7R32", | |
"cpu_count": 16, | |
"cpu_ram_mb": 66697.248768, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.10.228-219.884.amzn2.x86_64-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"NVIDIA A10G" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 24146608128, | |
"optimum_benchmark_version": "0.5.0.dev0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.47.0", | |
"transformers_commit": null, | |
"accelerate_version": "1.2.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.31.0", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.12", | |
"timm_commit": null, | |
"peft_version": "0.14.0", | |
"peft_commit": null | |
}, | |
"print_report": true, | |
"log_report": true | |
}, | |
"report": { | |
"load_model": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 799.014912, | |
"max_global_vram": 639.107072, | |
"max_process_vram": 0.0, | |
"max_reserved": 2.097152, | |
"max_allocated": 0.534528 | |
}, | |
"latency": { | |
"unit": "s", | |
"values": [ | |
0.05666099166870117 | |
], | |
"count": 1, | |
"total": 0.05666099166870117, | |
"mean": 0.05666099166870117, | |
"p50": 0.05666099166870117, | |
"p90": 0.05666099166870117, | |
"p95": 0.05666099166870117, | |
"p99": 0.05666099166870117, | |
"stdev": 0, | |
"stdev_": 0 | |
}, | |
"throughput": null, | |
"energy": { | |
"unit": "kWh", | |
"cpu": 9.589206865416694e-05, | |
"ram": 5.253550660922821e-05, | |
"gpu": 0.00013195871667800313, | |
"total": 0.0002803862919413983 | |
}, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1038.31552, | |
"max_global_vram": 670.564352, | |
"max_process_vram": 0.0, | |
"max_reserved": 23.068672, | |
"max_allocated": 10.112 | |
}, | |
"latency": { | |
"unit": "s", | |
"values": [ | |
0.0028774399757385254, | |
0.0026746881008148193, | |
0.0027032959461212158, | |
0.002702336072921753, | |
0.0026603519916534426, | |
0.0027934720516204835, | |
0.0028108799457550048, | |
0.0027740159034729006, | |
0.002913280010223389, | |
0.002944000005722046, | |
0.0029378559589385987, | |
0.002846719980239868, | |
0.0029521920680999755, | |
0.002916352033615112, | |
0.0029020159244537353, | |
0.002845695972442627, | |
0.002942975997924805, | |
0.0029061119556427002, | |
0.002916352033615112, | |
0.002826240062713623, | |
0.002836479902267456, | |
0.0027985920906066896, | |
0.002775039911270142, | |
0.0029716479778289797, | |
0.002876415967941284, | |
0.002862080097198486, | |
0.002838464021682739, | |
0.0028538880348205568, | |
0.0028938241004943847, | |
0.002820096015930176, | |
0.002891776084899902, | |
0.002942975997924805, | |
0.002799616098403931, | |
0.002615295886993408, | |
0.0026387839317321775, | |
0.002643968105316162, | |
0.0026767361164093018, | |
0.00275763201713562, | |
0.0027535679340362548, | |
0.002763808012008667, | |
0.0028221440315246583, | |
0.0027811839580535887, | |
0.0027944960594177247, | |
0.0027893760204315186, | |
0.0028446719646453857, | |
0.0028098559379577635, | |
0.0027299840450286865, | |
0.002810784101486206, | |
0.0027893760204315186, | |
0.002758591890335083, | |
0.002845695972442627, | |
0.002815999984741211, | |
0.002778111934661865, | |
0.002762752056121826, | |
0.0028088319301605223, | |
0.002826175928115845, | |
0.002830336093902588, | |
0.002944000005722046, | |
0.0029470720291137694, | |
0.0029992959499359133, | |
0.002905087947845459, | |
0.002942975997924805, | |
0.002924544095993042, | |
0.0030464000701904297, | |
0.002905087947845459, | |
0.0029480960369110106, | |
0.002928704023361206, | |
0.0029184000492095945, | |
0.0028190720081329346, | |
0.002850816011428833, | |
0.002820096015930176, | |
0.002748415946960449, | |
0.0027146239280700685, | |
0.002766848087310791, | |
0.0029061119556427002, | |
0.0028641281127929686, | |
0.0029594240188598633, | |
0.0028979198932647704, | |
0.002894848108291626, | |
0.0027545599937438964, | |
0.002870271921157837, | |
0.002853951930999756, | |
0.002862080097198486, | |
0.0029061119556427002, | |
0.002871295928955078, | |
0.0028241920471191407, | |
0.002723839998245239, | |
0.002748415946960449, | |
0.0027596800327301025, | |
0.0027381761074066163, | |
0.002929663896560669, | |
0.0028569600582122805, | |
0.002860095977783203, | |
0.002762752056121826, | |
0.002819999933242798, | |
0.0027944960594177247, | |
0.0027351040840148926, | |
0.0027822721004486083, | |
0.0028180480003356934, | |
0.0028784000873565674, | |
0.002934783935546875, | |
0.0029859519004821777, | |
0.00301363205909729, | |
0.002831360101699829, | |
0.002706432104110718, | |
0.002734015941619873, | |
0.002791424036026001, | |
0.0027217919826507567, | |
0.0027607040405273437, | |
0.0027351040840148926, | |
0.0027442560195922853, | |
0.002831360101699829, | |
0.0029521920680999755, | |
0.002939903974533081, | |
0.002753535985946655, | |
0.0028190720081329346, | |
0.002787328004837036, | |
0.0027432959079742433, | |
0.002775039911270142, | |
0.0027729918956756593, | |
0.0030310399532318113, | |
0.0028129279613494872, | |
0.0027975680828094484, | |
0.0027667839527130127, | |
0.0027299840450286865, | |
0.0028026878833770754, | |
0.0027607040405273437, | |
0.002707456111907959, | |
0.002766848087310791, | |
0.0028057599067687986, | |
0.0028344318866729734, | |
0.0028559360504150392, | |
0.0029808640480041503, | |
0.003023871898651123, | |
0.0029777920246124266, | |
0.0028897280693054197, | |
0.002933759927749634, | |
0.0029767680168151854, | |
0.002958336114883423, | |
0.0029081599712371827, | |
0.002972671985626221, | |
0.002861056089401245, | |
0.0027402238845825196, | |
0.0028231680393218995, | |
0.0028897280693054197, | |
0.002877408027648926, | |
0.0028108799457550048, | |
0.0028344318866729734, | |
0.0027688961029052736, | |
0.0026890239715576174, | |
0.002746367931365967, | |
0.0028518400192260743, | |
0.0029061119556427002, | |
0.0027135999202728273, | |
0.0027985920906066896, | |
0.0027381761074066163, | |
0.002736128091812134, | |
0.002770944118499756, | |
0.00279967999458313, | |
0.002714688062667847, | |
0.0027555840015411376, | |
0.002711551904678345, | |
0.0027136640548706056, | |
0.002778111934661865, | |
0.002960383892059326, | |
0.0028037118911743166, | |
0.0026419200897216796, | |
0.0029378559589385987, | |
0.0028569600582122805, | |
0.0027156479358673097, | |
0.0026705920696258544, | |
0.0027012479305267333, | |
0.002741247892379761, | |
0.002668544054031372, | |
0.0026920959949493406, | |
0.002732032060623169, | |
0.002826240062713623, | |
0.0027248640060424803, | |
0.0026910719871521, | |
0.002669568061828613, | |
0.0027299840450286865, | |
0.0028334081172943113, | |
0.002954240083694458, | |
0.002742271900177002, | |
0.00276582407951355, | |
0.0027392001152038575, | |
0.0028098559379577635, | |
0.0027893760204315186, | |
0.003202048063278198, | |
0.0034693119525909424, | |
0.0029112319946289064, | |
0.0029224960803985595, | |
0.002936863899230957, | |
0.002858047962188721, | |
0.002894848108291626, | |
0.0027504639625549315, | |
0.003357696056365967, | |
0.0027453439235687258, | |
0.002663424015045166, | |
0.002708479881286621, | |
0.002698240041732788, | |
0.002678783893585205, | |
0.002712575912475586, | |
0.0026972479820251464, | |
0.002673664093017578, | |
0.0027095038890838623, | |
0.002698240041732788, | |
0.0026920959949493406, | |
0.002777087926864624, | |
0.0027105278968811036, | |
0.00268287992477417, | |
0.0027586560249328613, | |
0.002699264049530029, | |
0.0026951680183410643, | |
0.002775039911270142, | |
0.002702336072921753, | |
0.002753535985946655, | |
0.0026634559631347654, | |
0.0027033278942108154, | |
0.002706432104110718, | |
0.002643968105316162, | |
0.0027095038890838623, | |
0.002708479881286621, | |
0.002669568061828613, | |
0.0027279040813446044, | |
0.0027545599937438964, | |
0.0026470398902893066, | |
0.0027187199592590334, | |
0.002706432104110718, | |
0.0026685121059417723, | |
0.0027054080963134766, | |
0.002699264049530029, | |
0.0027043840885162354, | |
0.0026603519916534426, | |
0.002786303997039795, | |
0.002708479881286621, | |
0.0027135999202728273, | |
0.0026961920261383055, | |
0.0027105278968811036, | |
0.0026736319065093993, | |
0.0027105278968811036, | |
0.002716671943664551, | |
0.002708479881286621, | |
0.0027904000282287598, | |
0.002712575912475586, | |
0.0026552319526672364, | |
0.002722815990447998, | |
0.002769920110702515, | |
0.002696160078048706, | |
0.002756608009338379, | |
0.0026757760047912598, | |
0.0027238719463348387, | |
0.0027043840885162354, | |
0.002678783893585205, | |
0.0026818881034851075, | |
0.0026746881008148193, | |
0.0027637760639190674, | |
0.0026746881008148193, | |
0.0026593279838562013, | |
0.002698240041732788, | |
0.0026839039325714112, | |
0.0026552319526672364, | |
0.0027043840885162354, | |
0.002673664093017578, | |
0.0026818559169769288, | |
0.002707456111907959, | |
0.0026890239715576174, | |
0.002662400007247925, | |
0.0026910719871521, | |
0.0026808319091796875, | |
0.0026675200462341307, | |
0.002687999963760376, | |
0.0026808319091796875, | |
0.002669568061828613, | |
0.002678719997406006, | |
0.0026716160774230956, | |
0.0026920320987701414, | |
0.002677759885787964, | |
0.002717695951461792, | |
0.002694144010543823, | |
0.0026859519481658937, | |
0.0026808319091796875, | |
0.0026798079013824463, | |
0.0026603519916534426, | |
0.002748415946960449, | |
0.0027648000717163087, | |
0.0026961920261383055, | |
0.0027156479358673097, | |
0.0026818559169769288, | |
0.002687999963760376, | |
0.002717695951461792, | |
0.0026951680183410643, | |
0.0026849279403686525, | |
0.0027002880573272704, | |
0.0026900479793548586, | |
0.0026715519428253173, | |
0.0026961920261383055, | |
0.0027207679748535154, | |
0.0027156479358673097, | |
0.002695199966430664, | |
0.0026972160339355468, | |
0.0027146239280700685, | |
0.0026715519428253173, | |
0.002678783893585205, | |
0.002699264049530029, | |
0.0027207679748535154, | |
0.0027085120677948, | |
0.0027648000717163087, | |
0.0026757121086120605, | |
0.002697184085845947, | |
0.0026828160285949708, | |
0.002663424015045166, | |
0.002706432104110718, | |
0.002686975955963135, | |
0.002672640085220337, | |
0.002703360080718994, | |
0.0026839039325714112, | |
0.0026849279403686525, | |
0.0026859519481658937, | |
0.0026859519481658937, | |
0.0026705920696258544, | |
0.0026818559169769288, | |
0.00268287992477417, | |
0.002698240041732788, | |
0.0026818559169769288, | |
0.0026900479793548586, | |
0.0026859519481658937, | |
0.0026798079013824463, | |
0.0026859519481658937, | |
0.002707456111907959, | |
0.002673664093017578, | |
0.002717695951461792, | |
0.002698240041732788, | |
0.0026859519481658937, | |
0.0027136640548706056, | |
0.0026798079013824463, | |
0.0026808319091796875, | |
0.0027248640060424803, | |
0.00268287992477417, | |
0.002671583890914917, | |
0.002699264049530029, | |
0.0027187199592590334, | |
0.0026706559658050536, | |
0.0026798079013824463, | |
0.0026859519481658937, | |
0.002668544054031372, | |
0.0026839039325714112, | |
0.0026859519481658937, | |
0.002712575912475586, | |
0.002663424015045166, | |
0.0027075200080871583, | |
0.0027074880599975587, | |
0.0026798079013824463, | |
0.0026818559169769288, | |
0.002698240041732788, | |
0.0026859519481658937, | |
0.0026839039325714112 | |
], | |
"count": 357, | |
"total": 0.9891591353416446, | |
"mean": 0.0027707538805088073, | |
"p50": 0.0027351040840148926, | |
"p90": 0.0029233152866363524, | |
"p95": 0.002952601671218872, | |
"p99": 0.0030377984046936033, | |
"stdev": 0.00010735937395649541, | |
"stdev_": 3.8747351293713783 | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 721.8252094021177 | |
}, | |
"energy": { | |
"unit": "kWh", | |
"cpu": 3.150399591089434e-08, | |
"ram": 1.7215031121280853e-08, | |
"gpu": 4.470304994680101e-08, | |
"total": 9.342207697897618e-08 | |
}, | |
"efficiency": { | |
"unit": "samples/kWh", | |
"value": 21408215.966447443 | |
} | |
} | |
} | |
} |