rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
1031c2f
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "6", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1072.80384, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 47642.267648, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.362125, | |
"mean": 8.362125, | |
"stdev": 0.0, | |
"p50": 8.362125, | |
"p90": 8.362125, | |
"p95": 8.362125, | |
"p99": 8.362125, | |
"values": [ | |
8.362125 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1244.565504, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 185337.434112, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 154, | |
"total": 0.9965589299201968, | |
"mean": 0.006471161882598679, | |
"stdev": 0.0007562427780178031, | |
"p50": 0.006302970647811889, | |
"p90": 0.006784283685684204, | |
"p95": 0.006910603094100951, | |
"p99": 0.00822322262763977, | |
"values": [ | |
0.006872971057891846, | |
0.006808651924133301, | |
0.0069804911613464355, | |
0.006770730972290039, | |
0.006692010879516602, | |
0.006629771232604981, | |
0.006726730823516846, | |
0.006643691062927246, | |
0.006589130878448486, | |
0.006717771053314209, | |
0.006555850982666016, | |
0.00652129077911377, | |
0.006263370990753174, | |
0.006318409919738769, | |
0.006335531234741211, | |
0.006243690013885498, | |
0.006419051170349121, | |
0.006435211181640625, | |
0.006479051113128662, | |
0.006208169937133789, | |
0.0061811299324035646, | |
0.006242570877075196, | |
0.006221930027008057, | |
0.006210090160369873, | |
0.006201611042022705, | |
0.006286409854888916, | |
0.006287371158599854, | |
0.006189929962158203, | |
0.006191370010375977, | |
0.006204969882965088, | |
0.006295690059661865, | |
0.006163051128387451, | |
0.00614753007888794, | |
0.006486731052398682, | |
0.006468010902404785, | |
0.006428490161895752, | |
0.006259050846099854, | |
0.006311850070953369, | |
0.006380331039428711, | |
0.00626161003112793, | |
0.006198729991912842, | |
0.006329771041870117, | |
0.006240489959716797, | |
0.006224810123443603, | |
0.006255850791931152, | |
0.008401133537292481, | |
0.007050732135772705, | |
0.006792651176452637, | |
0.0067008109092712405, | |
0.00674993085861206, | |
0.006784811973571777, | |
0.006727691173553467, | |
0.006734731197357177, | |
0.006721450805664063, | |
0.008065452575683594, | |
0.006995212078094482, | |
0.006725450992584228, | |
0.006762250900268555, | |
0.006776650905609131, | |
0.0067892918586730956, | |
0.006783051013946533, | |
0.006827371120452881, | |
0.006787530899047852, | |
0.006759850978851318, | |
0.006717610836029053, | |
0.006694252014160156, | |
0.006740971088409424, | |
0.006755691051483154, | |
0.006848330974578857, | |
0.006777131080627442, | |
0.006742411136627198, | |
0.006735371112823486, | |
0.006773612022399902, | |
0.006726730823516846, | |
0.006759850978851318, | |
0.006772810935974121, | |
0.007014891147613525, | |
0.0067620911598205566, | |
0.006771532058715821, | |
0.006511210918426514, | |
0.006372010231018067, | |
0.006302411079406738, | |
0.006398890018463135, | |
0.006316810131072998, | |
0.00646721076965332, | |
0.0062099299430847165, | |
0.006379691123962403, | |
0.006389450073242187, | |
0.006192490100860596, | |
0.006112490177154541, | |
0.006115530014038086, | |
0.006116650104522705, | |
0.006189129829406738, | |
0.006184010028839111, | |
0.006136970043182373, | |
0.006169291019439698, | |
0.006098410129547119, | |
0.006249289989471435, | |
0.006155051231384277, | |
0.006166729927062988, | |
0.006130730152130127, | |
0.006139530181884766, | |
0.006096650123596192, | |
0.006213129997253418, | |
0.00614976978302002, | |
0.006156170845031738, | |
0.006158889770507813, | |
0.006288330078125, | |
0.006270730972290039, | |
0.006125770092010498, | |
0.006140170097351075, | |
0.006116331100463867, | |
0.006161449909210205, | |
0.006303530216217041, | |
0.006120971202850342, | |
0.00612785005569458, | |
0.0061297698020935055, | |
0.006197929859161377, | |
0.006195850849151611, | |
0.006165450096130371, | |
0.006156010150909424, | |
0.0061198511123657226, | |
0.006207689762115478, | |
0.00615024995803833, | |
0.006166090011596679, | |
0.006132170200347901, | |
0.006132329940795898, | |
0.0060937700271606445, | |
0.006217130184173584, | |
0.006144969940185547, | |
0.0061720099449157715, | |
0.006118889808654785, | |
0.006128650188446045, | |
0.0061059298515319825, | |
0.006193610191345214, | |
0.006227849960327148, | |
0.00612929105758667, | |
0.00623153018951416, | |
0.007318252086639404, | |
0.0066843309402465825, | |
0.0067440118789672855, | |
0.00672865104675293, | |
0.00676961088180542, | |
0.00670241117477417, | |
0.006309930801391602, | |
0.006186408996582031, | |
0.006420650959014893, | |
0.006118410110473633, | |
0.006526570796966552, | |
0.01438210391998291, | |
0.004013126850128174, | |
0.005427528858184814, | |
0.006701130867004395, | |
0.006664811134338379 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 154.5317545971237 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |