![IlyasMoutawwakil's picture](https://cdn-avatars.huggingface.co/v1/production/uploads/1642598610696-noauth.jpeg)
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
6382069
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "multiple-choice", | |
"model": "FacebookAI/roberta-base", | |
"library": "transformers", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"hub_kwargs": { | |
"revision": "main", | |
"force_download": false, | |
"local_files_only": false, | |
"trust_remote_code": false | |
}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.0", | |
"optimum_benchmark_commit": "d7339efdbbcababd22ca2f563771622060764f8f", | |
"transformers_version": "4.40.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.30.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.27.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "0.9.16", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1006.227456, | |
"max_global_vram": 898.47808, | |
"max_process_vram": 231095.021568, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.507712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 130, | |
"total": 0.9995370836257931, | |
"mean": 0.007688746797121488, | |
"stdev": 0.0003509335248130768, | |
"p50": 0.007607967138290405, | |
"p90": 0.007871373796463013, | |
"p95": 0.007991588735580445, | |
"p99": 0.008614207410812378, | |
"values": [ | |
0.007779806137084961, | |
0.007724126815795898, | |
0.007631967067718506, | |
0.007607807159423828, | |
0.007679646015167236, | |
0.007839645862579346, | |
0.007913565158843994, | |
0.007825406074523925, | |
0.007771805763244629, | |
0.007720127105712891, | |
0.00763852596282959, | |
0.007647807121276855, | |
0.007645885944366455, | |
0.007570366859436035, | |
0.007562207221984863, | |
0.007587807178497314, | |
0.007937564849853515, | |
0.007629405975341797, | |
0.007590366840362549, | |
0.007505247116088867, | |
0.011035311698913574, | |
0.007919485092163087, | |
0.007575807094573975, | |
0.007522367000579834, | |
0.007610527038574219, | |
0.007748126983642578, | |
0.007597246170043945, | |
0.007584766864776611, | |
0.007595326900482178, | |
0.007557887077331543, | |
0.007557247161865234, | |
0.007601087093353272, | |
0.007555167198181153, | |
0.007562686920166015, | |
0.007608127117156982, | |
0.0075687670707702635, | |
0.007589247226715088, | |
0.007593407154083252, | |
0.007612446784973145, | |
0.007602847099304199, | |
0.007532927989959717, | |
0.007560126781463623, | |
0.007576766967773437, | |
0.007565886974334717, | |
0.007555807113647461, | |
0.007571486949920654, | |
0.007563646793365479, | |
0.007583006858825684, | |
0.007609247207641601, | |
0.007721405982971191, | |
0.007562366962432862, | |
0.007565406799316407, | |
0.007603967189788818, | |
0.007594526767730713, | |
0.007595646858215332, | |
0.007571967124938965, | |
0.007555966854095459, | |
0.00756604814529419, | |
0.007614367008209229, | |
0.007586687088012695, | |
0.007618045806884766, | |
0.007601246833801269, | |
0.007577406883239746, | |
0.007647165775299072, | |
0.007575167179107666, | |
0.00763612699508667, | |
0.007597405910491944, | |
0.00786668586730957, | |
0.007688285827636719, | |
0.007636927127838135, | |
0.007611966133117676, | |
0.007649567127227783, | |
0.0075969271659851074, | |
0.007607807159423828, | |
0.007613566875457764, | |
0.007639805793762207, | |
0.007606206893920898, | |
0.007602527141571045, | |
0.007658367156982422, | |
0.00768108606338501, | |
0.007638206958770752, | |
0.007619007110595703, | |
0.00762684679031372, | |
0.007692605972290039, | |
0.0076634869575500485, | |
0.007654526233673096, | |
0.007627007007598877, | |
0.00765052604675293, | |
0.007655006885528564, | |
0.007629086017608642, | |
0.0076182069778442385, | |
0.007661245822906494, | |
0.007624766826629639, | |
0.007596607208251953, | |
0.007714207172393799, | |
0.007496127128601074, | |
0.007574847221374512, | |
0.007548606872558593, | |
0.007569087028503418, | |
0.007536606788635254, | |
0.00752476692199707, | |
0.007591806888580323, | |
0.007506526947021484, | |
0.007494526863098145, | |
0.007421247959136963, | |
0.0075442872047424316, | |
0.007512928009033203, | |
0.007496448040008545, | |
0.007512608051300049, | |
0.007498207092285156, | |
0.007586846828460694, | |
0.0075594868659973145, | |
0.007622046947479248, | |
0.007645727157592774, | |
0.007694366931915283, | |
0.007676926136016846, | |
0.007683805942535401, | |
0.007693566799163818, | |
0.0086463623046875, | |
0.008496603012084962, | |
0.008535483360290527, | |
0.008145724296569824, | |
0.007971964836120605, | |
0.007937565803527832, | |
0.008007644653320312, | |
0.007970526218414306, | |
0.007813405990600587, | |
0.007600286960601807, | |
0.007563967227935791, | |
0.008408283233642578 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 130.06020699945273 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |