rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
22dc712
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "4", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.33.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.1", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 903.561216, | |
"max_global_vram": 689.373184, | |
"max_process_vram": 46029.70112, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 7.48298974609375, | |
"mean": 7.48298974609375, | |
"stdev": 0.0, | |
"p50": 7.48298974609375, | |
"p90": 7.48298974609375, | |
"p95": 7.48298974609375, | |
"p99": 7.48298974609375, | |
"values": [ | |
7.48298974609375 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1073.823744, | |
"max_global_vram": 777.797632, | |
"max_process_vram": 178454.597632, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 160, | |
"total": 0.9954701762199409, | |
"mean": 0.006221688601374626, | |
"stdev": 0.00047702983820466314, | |
"p50": 0.00614205551147461, | |
"p90": 0.006313607883453369, | |
"p95": 0.006389439153671264, | |
"p99": 0.008879868717193599, | |
"values": [ | |
0.007116611957550049, | |
0.0064562950134277345, | |
0.006213575839996338, | |
0.0063498950004577635, | |
0.006134856224060059, | |
0.006315334796905518, | |
0.006058055877685547, | |
0.006188295841217041, | |
0.006141096115112305, | |
0.006084296226501465, | |
0.006099815845489502, | |
0.006058375835418701, | |
0.00607533597946167, | |
0.0060718169212341305, | |
0.006071657180786133, | |
0.00626205587387085, | |
0.006075016021728516, | |
0.006078695774078369, | |
0.006098055839538574, | |
0.0060830159187316895, | |
0.006150055885314942, | |
0.006120776176452637, | |
0.006146376132965088, | |
0.006107336044311524, | |
0.006141575813293457, | |
0.0061612558364868165, | |
0.006125576019287109, | |
0.0061441359519958496, | |
0.006202856063842774, | |
0.006150215148925781, | |
0.006148776054382324, | |
0.00612605619430542, | |
0.006151656150817871, | |
0.006141416072845459, | |
0.006158696174621582, | |
0.006157256126403808, | |
0.0061826958656311035, | |
0.006165895938873291, | |
0.0061748561859130855, | |
0.006153096199035644, | |
0.006139656066894531, | |
0.006188776016235352, | |
0.006141095161437988, | |
0.0061649360656738286, | |
0.006180295944213867, | |
0.006154375076293945, | |
0.006148776054382324, | |
0.006132775783538818, | |
0.006147016048431397, | |
0.00612061595916748, | |
0.006180935859680175, | |
0.0061118159294128414, | |
0.006151976108551026, | |
0.006171814918518067, | |
0.006189735889434814, | |
0.006230854988098144, | |
0.0062078161239624025, | |
0.006205094814300537, | |
0.006188615798950196, | |
0.006230054855346679, | |
0.006225096225738525, | |
0.006176936149597168, | |
0.00619325590133667, | |
0.008398366928100585, | |
0.006167655944824219, | |
0.006175334930419922, | |
0.006163335800170898, | |
0.006149576187133789, | |
0.006156614780426025, | |
0.006157415866851807, | |
0.006134535789489746, | |
0.006184454917907715, | |
0.0061442961692810055, | |
0.0063737349510192874, | |
0.006175655841827393, | |
0.006141735076904297, | |
0.006322696208953858, | |
0.006274055957794189, | |
0.006236295223236084, | |
0.006199336051940918, | |
0.006387975215911865, | |
0.0061985359191894535, | |
0.006215015888214112, | |
0.006220934867858886, | |
0.010402198791503905, | |
0.006266534805297852, | |
0.0060527768135070804, | |
0.006232935905456543, | |
0.00636045503616333, | |
0.006382534980773926, | |
0.006286855220794678, | |
0.006057735919952393, | |
0.00607917594909668, | |
0.006079815864562989, | |
0.0060990161895751955, | |
0.006089416027069092, | |
0.006131175994873047, | |
0.00607965612411499, | |
0.006127336025238037, | |
0.00611501693725586, | |
0.006089896202087402, | |
0.006103975772857666, | |
0.006129096031188965, | |
0.006155975818634033, | |
0.00620925521850586, | |
0.0061423759460449215, | |
0.006138376235961914, | |
0.006134535789489746, | |
0.006068136215209961, | |
0.006085576057434082, | |
0.006119336128234863, | |
0.006057415962219238, | |
0.006060935974121094, | |
0.00606333589553833, | |
0.0061252560615539554, | |
0.006102375984191895, | |
0.0060726161003112795, | |
0.006111015796661377, | |
0.006072456836700439, | |
0.006076776981353759, | |
0.00610973596572876, | |
0.00606093692779541, | |
0.006101097106933594, | |
0.006067017078399658, | |
0.006078217029571533, | |
0.006081096172332764, | |
0.006084136009216309, | |
0.009572761535644531, | |
0.006331015110015869, | |
0.006061736106872558, | |
0.006043176174163819, | |
0.006017255783081055, | |
0.006196296215057373, | |
0.006221254825592041, | |
0.006031016826629639, | |
0.006019337177276611, | |
0.006094215869903564, | |
0.006624293804168701, | |
0.006417253971099854, | |
0.006423655033111572, | |
0.006261575222015381, | |
0.006048136234283448, | |
0.006045416831970215, | |
0.0060498971939086914, | |
0.006084136009216309, | |
0.006050695896148681, | |
0.006060616016387939, | |
0.006082215785980225, | |
0.006069575786590576, | |
0.006012775897979736, | |
0.006007016181945801, | |
0.005982216835021973, | |
0.0062599749565124515, | |
0.0061514959335327146, | |
0.0062409348487854005, | |
0.006102375984191895, | |
0.006282855033874512, | |
0.006313416004180908, | |
0.006063656806945801, | |
0.006101736068725586 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 160.728069832852 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |