IlyasMoutawwakil's picture
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
0e78e90 verified
raw
history blame
10 kB
{
"config": {
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
"backend": {
"name": "pytorch",
"version": "2.2.2+rocm5.7",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-classification",
"model": "FacebookAI/roberta-base",
"library": "transformers",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"hub_kwargs": {
"revision": "main",
"force_download": false,
"local_files_only": false,
"trust_remote_code": false
},
"no_weights": true,
"device_map": null,
"torch_dtype": null,
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": null,
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 1,
"duration": 1,
"warmup_runs": 1,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 2
},
"new_tokens": null,
"latency": true,
"memory": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 2,
"min_new_tokens": 2
},
"call_kwargs": {
"num_inference_steps": 2
}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "error",
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7763 64-Core Processor",
"cpu_count": 128,
"cpu_ram_mb": 1082015.236096,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35",
"processor": "x86_64",
"python_version": "3.10.12",
"gpu": [
"Advanced Micro Devices, Inc. [AMD/ATI]"
],
"gpu_count": 1,
"gpu_vram_mb": 68702699520,
"optimum_benchmark_version": "0.2.0",
"optimum_benchmark_commit": "6d219cab8dced45685ef7bd31a9c5490c58b944d",
"transformers_version": "4.40.2",
"transformers_commit": null,
"accelerate_version": "0.30.1",
"accelerate_commit": null,
"diffusers_version": "0.27.2",
"diffusers_commit": null,
"optimum_version": null,
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": null,
"peft_commit": null
}
},
"report": {
"forward": {
"memory": {
"unit": "MB",
"max_ram": 1011.376128,
"max_global_vram": 897.163264,
"max_process_vram": 216278.016,
"max_reserved": 555.74528,
"max_allocated": 499.443712
},
"latency": {
"unit": "s",
"count": 128,
"total": 1.0016979303359983,
"mean": 0.007825765080749988,
"stdev": 0.00011132330832240842,
"p50": 0.007797586441040038,
"p90": 0.007940849161148072,
"p95": 0.008001039814949037,
"p99": 0.008232056503295898,
"values": [
0.00828774070739746,
0.007891265869140626,
0.00791574478149414,
0.008034784317016602,
0.00808150291442871,
0.008006303787231446,
0.00794790506362915,
0.00799126386642456,
0.007970465183258056,
0.007973344802856445,
0.007891426086425781,
0.00788822603225708,
0.00788390588760376,
0.007851905822753906,
0.007839106082916259,
0.007814785957336427,
0.007799905776977539,
0.007807906150817871,
0.007769987106323242,
0.007796226978302002,
0.007795587062835694,
0.007781346797943115,
0.007782627105712891,
0.0077480669021606444,
0.007753187179565429,
0.007766148090362549,
0.0077927069664001465,
0.007775907039642334,
0.00777014684677124,
0.007817825794219972,
0.007818625926971436,
0.007788547039031982,
0.00777782678604126,
0.007797346115112304,
0.00778838586807251,
0.007791586875915527,
0.007818467140197754,
0.007811747074127197,
0.007836545944213867,
0.00781894588470459,
0.007824545860290527,
0.007824866771697999,
0.007786306858062744,
0.007756066799163818,
0.007744866847991944,
0.007762307167053222,
0.00779846715927124,
0.007767746925354004,
0.007755106925964356,
0.007754467964172364,
0.00773958683013916,
0.0077523870468139645,
0.007761026859283447,
0.007856225967407227,
0.007906785011291504,
0.007894145965576171,
0.007912065982818604,
0.007849185943603516,
0.007765027046203613,
0.007798946857452392,
0.007793026924133301,
0.007831107139587403,
0.0077946271896362305,
0.007797826766967773,
0.007758626937866211,
0.007781826972961426,
0.007824545860290527,
0.00859157657623291,
0.007798145771026612,
0.007740867137908935,
0.0077307882308959965,
0.007699428081512451,
0.007707108020782471,
0.007745186805725098,
0.007693027973175049,
0.007711108207702637,
0.007719748020172119,
0.007816386222839356,
0.007899265766143799,
0.007876865863800049,
0.00778614616394043,
0.007783267021179199,
0.007780066967010498,
0.007850306034088135,
0.007958944797515868,
0.007857026100158692,
0.00802102279663086,
0.007973504066467286,
0.007937825202941895,
0.007889346122741699,
0.007892866134643555,
0.007887906074523925,
0.007824546813964843,
0.007777667045593262,
0.007791906833648682,
0.007776066780090332,
0.007762146949768066,
0.007726467132568359,
0.007740386962890625,
0.007731427192687988,
0.007684547901153565,
0.007684068202972412,
0.007711108207702637,
0.007737506866455078,
0.007882145881652832,
0.008057344436645507,
0.0078642258644104,
0.00781270694732666,
0.007801187038421631,
0.007780066967010498,
0.007814305782318115,
0.007765986919403076,
0.007898466110229492,
0.007856225967407227,
0.007814626216888427,
0.0078064661026000974,
0.007782465934753418,
0.0077829470634460446,
0.007779427051544189,
0.007816226005554198,
0.007796706199645996,
0.007790627002716065,
0.007801665782928467,
0.007829186916351318,
0.007758626937866211,
0.00776310682296753,
0.007727588176727295,
0.007753026962280273
]
},
"throughput": {
"unit": "samples/s",
"value": 127.78303331131481
},
"energy": null,
"efficiency": null
}
}
}