
IlyasMoutawwakil
HF staff
Upload cuda_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json with huggingface_hub
a5decef
verified
{ | |
"config": { | |
"name": "cuda_inference_timm_image-classification_timm/resnet50.a1_in1k", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.0+cu121", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "timm", | |
"model": "timm/resnet50.a1_in1k", | |
"processor": "timm/resnet50.a1_in1k", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": false, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": true, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7R32", | |
"cpu_count": 16, | |
"cpu_ram_mb": 66697.29792, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"NVIDIA A10G" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 24146608128, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.40.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.30.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.27.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.3", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 948.957184, | |
"max_global_vram": 1047.003136, | |
"max_process_vram": 0.0, | |
"max_reserved": 396.361728, | |
"max_allocated": 253.912576 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 179, | |
"total": 0.9968294696807863, | |
"mean": 0.005568879718887074, | |
"stdev": 0.00019994935588350308, | |
"p50": 0.005607423782348633, | |
"p90": 0.005799936008453369, | |
"p95": 0.005867315053939819, | |
"p99": 0.005959229650497436, | |
"values": [ | |
0.006095871925354004, | |
0.005895167827606201, | |
0.005896192073822021, | |
0.0059269118309021, | |
0.005864448070526123, | |
0.005892096042633056, | |
0.005883903980255127, | |
0.005948416233062744, | |
0.005863423824310303, | |
0.005889023780822754, | |
0.005816319942474365, | |
0.005769248008728027, | |
0.0056514558792114256, | |
0.005755904197692871, | |
0.005684224128723145, | |
0.005743616104125977, | |
0.005708799839019775, | |
0.005530623912811279, | |
0.005594111919403076, | |
0.005586944103240967, | |
0.005646336078643799, | |
0.005763072013854981, | |
0.005659647941589355, | |
0.005736447811126709, | |
0.00573747205734253, | |
0.005842912197113037, | |
0.005661727905273438, | |
0.00566476821899414, | |
0.005645311832427978, | |
0.005710847854614258, | |
0.005761023998260498, | |
0.005830656051635743, | |
0.005786623954772949, | |
0.0057712640762329105, | |
0.005798912048339844, | |
0.005785600185394287, | |
0.005681151866912842, | |
0.005699584007263184, | |
0.005702591896057129, | |
0.005769216060638428, | |
0.005734399795532226, | |
0.00570470380783081, | |
0.005559296131134033, | |
0.005526527881622314, | |
0.005583871841430664, | |
0.00567193603515625, | |
0.005738495826721191, | |
0.005593088150024414, | |
0.0055920639038085935, | |
0.005681151866912842, | |
0.0056258559226989744, | |
0.00576204776763916, | |
0.0057497601509094234, | |
0.005791744232177734, | |
0.005769216060638428, | |
0.005865471839904785, | |
0.0057794561386108395, | |
0.005709824085235596, | |
0.005595136165618897, | |
0.005607423782348633, | |
0.00576204776763916, | |
0.005753856182098388, | |
0.005760000228881836, | |
0.005689407825469971, | |
0.005750783920288086, | |
0.005696544170379639, | |
0.005599232196807862, | |
0.005598207950592041, | |
0.005659647941589355, | |
0.005997568130493164, | |
0.005616640090942383, | |
0.005757952213287353, | |
0.005788671970367432, | |
0.005674015998840332, | |
0.005609471797943116, | |
0.0056258559226989744, | |
0.005686272144317627, | |
0.005560319900512695, | |
0.005612607955932617, | |
0.00563097620010376, | |
0.005677055835723877, | |
0.005740543842315674, | |
0.005639167785644531, | |
0.005731328010559082, | |
0.005777408123016357, | |
0.005837823867797852, | |
0.00576204776763916, | |
0.005818367958068848, | |
0.005797887802124023, | |
0.0057149438858032225, | |
0.005755904197692871, | |
0.005698560237884521, | |
0.00566476821899414, | |
0.005665791988372803, | |
0.0057487359046936035, | |
0.005585919857025146, | |
0.005577727794647217, | |
0.005446656227111817, | |
0.005393407821655274, | |
0.005654528141021729, | |
0.00563097620010376, | |
0.005438464164733887, | |
0.00536575984954834, | |
0.005325823783874512, | |
0.005395455837249756, | |
0.005497856140136719, | |
0.005430272102355957, | |
0.00556441593170166, | |
0.005617663860321045, | |
0.0055552000999450684, | |
0.005726208209991455, | |
0.005688384056091309, | |
0.005804031848907471, | |
0.005617695808410645, | |
0.005433343887329102, | |
0.005379039764404297, | |
0.005322751998901368, | |
0.005386240005493164, | |
0.005327871799468994, | |
0.0053340158462524415, | |
0.005363743782043457, | |
0.005325823783874512, | |
0.005322751998901368, | |
0.005379072189331055, | |
0.005347328186035156, | |
0.005313536167144775, | |
0.00536678409576416, | |
0.005326848030090332, | |
0.0053012480735778805, | |
0.0053606400489807126, | |
0.005332992076873779, | |
0.005327871799468994, | |
0.005537792205810547, | |
0.005381120204925537, | |
0.005307392120361328, | |
0.005378047943115235, | |
0.005322751998901368, | |
0.0053125758171081545, | |
0.005328927993774414, | |
0.005313536167144775, | |
0.005329919815063477, | |
0.005321695804595948, | |
0.005308351993560791, | |
0.005319680213928223, | |
0.00531763219833374, | |
0.005469183921813964, | |
0.005350399971008301, | |
0.0053340158462524415, | |
0.00532480001449585, | |
0.005370880126953125, | |
0.005344255924224854, | |
0.005311488151550293, | |
0.005322751998901368, | |
0.005323775768280029, | |
0.005302271842956543, | |
0.00537395191192627, | |
0.005325823783874512, | |
0.005327871799468994, | |
0.005356607913970947, | |
0.005342207908630371, | |
0.005305312156677246, | |
0.005339136123657226, | |
0.0053340158462524415, | |
0.005449728012084961, | |
0.005336063861846924, | |
0.005331967830657959, | |
0.005315584182739258, | |
0.005329919815063477, | |
0.005311488151550293, | |
0.005327871799468994, | |
0.005328896045684814, | |
0.005339136123657226, | |
0.005327871799468994, | |
0.005348351955413819, | |
0.00537395191192627, | |
0.005315584182739258, | |
0.005384191989898681, | |
0.005341184139251709, | |
0.005307392120361328 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 179.56933000518234 | |
}, | |
"energy": { | |
"unit": "kWh", | |
"cpu": 6.201931856759001e-08, | |
"ram": 3.3908561697003916e-08, | |
"gpu": 1.2919440236649145e-07, | |
"total": 2.2512228263108538e-07 | |
}, | |
"efficiency": { | |
"unit": "samples/kWh", | |
"value": 4442030.29710182 | |
} | |
} | |
} | |
} |