config.name,config.backend.name,config.backend.version,config.backend._target_,config.backend.task,config.backend.library,config.backend.model_type,config.backend.model,config.backend.processor,config.backend.device,config.backend.device_ids,config.backend.seed,config.backend.inter_op_num_threads,config.backend.intra_op_num_threads,config.backend.model_kwargs.trust_remote_code,config.backend.no_weights,config.backend.device_map,config.backend.torch_dtype,config.backend.eval_mode,config.backend.to_bettertransformer,config.backend.low_cpu_mem_usage,config.backend.attn_implementation,config.backend.cache_implementation,config.backend.autocast_enabled,config.backend.autocast_dtype,config.backend.torch_compile,config.backend.torch_compile_target,config.backend.quantization_scheme,config.backend.quantization_config.bits,config.backend.quantization_config.use_exllama ,config.backend.quantization_config.version,config.backend.quantization_config.model_seqlen,config.backend.deepspeed_inference,config.backend.peft_type,config.scenario.name,config.scenario._target_,config.scenario.iterations,config.scenario.duration,config.scenario.warmup_runs,config.scenario.input_shapes.batch_size,config.scenario.input_shapes.num_choices,config.scenario.input_shapes.sequence_length,config.scenario.new_tokens,config.scenario.memory,config.scenario.latency,config.scenario.energy,config.scenario.generate_kwargs.max_new_tokens,config.scenario.generate_kwargs.min_new_tokens,config.launcher.name,config.launcher._target_,config.launcher.device_isolation,config.launcher.device_isolation_action,config.launcher.numactl,config.launcher.start_method,config.environment.cpu,config.environment.cpu_count,config.environment.cpu_ram_mb,config.environment.system,config.environment.machine,config.environment.platform,config.environment.processor,config.environment.python_version,config.environment.gpu,config.environment.gpu_count,config.environment.gpu_vram_mb,config.environment.optimum_benchmark_version,config.environment.optimum_benchmark_commit,config.environment.transformers_version,config.environment.transformers_commit,config.environment.accelerate_version,config.environment.accelerate_commit,config.environment.diffusers_version,config.environment.diffusers_commit,config.environment.optimum_version,config.environment.optimum_commit,config.environment.timm_version,config.environment.timm_commit,config.environment.peft_version,config.environment.peft_commit,report.traceback,report.load.memory.unit,report.load.memory.max_ram,report.load.memory.max_global_vram,report.load.memory.max_process_vram,report.load.memory.max_reserved,report.load.memory.max_allocated,report.load.latency.unit,report.load.latency.count,report.load.latency.total,report.load.latency.mean,report.load.latency.stdev,report.load.latency.p50,report.load.latency.p90,report.load.latency.p95,report.load.latency.p99,report.load.latency.values,report.load.throughput,report.load.energy.unit,report.load.energy.cpu,report.load.energy.ram,report.load.energy.gpu,report.load.energy.total,report.load.efficiency,report.prefill.memory.unit,report.prefill.memory.max_ram,report.prefill.memory.max_global_vram,report.prefill.memory.max_process_vram,report.prefill.memory.max_reserved,report.prefill.memory.max_allocated,report.prefill.latency.unit,report.prefill.latency.count,report.prefill.latency.total,report.prefill.latency.mean,report.prefill.latency.stdev,report.prefill.latency.p50,report.prefill.latency.p90,report.prefill.latency.p95,report.prefill.latency.p99,report.prefill.latency.values,report.prefill.throughput.unit,report.prefill.throughput.value,report.prefill.energy.unit,report.prefill.energy.cpu,report.prefill.energy.ram,report.prefill.energy.gpu,report.prefill.energy.total,report.prefill.efficiency.unit,report.prefill.efficiency.value,report.decode.memory.unit,report.decode.memory.max_ram,report.decode.memory.max_global_vram,report.decode.memory.max_process_vram,report.decode.memory.max_reserved,report.decode.memory.max_allocated,report.decode.latency.unit,report.decode.latency.count,report.decode.latency.total,report.decode.latency.mean,report.decode.latency.stdev,report.decode.latency.p50,report.decode.latency.p90,report.decode.latency.p95,report.decode.latency.p99,report.decode.latency.values,report.decode.throughput.unit,report.decode.throughput.value,report.decode.energy.unit,report.decode.energy.cpu,report.decode.energy.ram,report.decode.energy.gpu,report.decode.energy.total,report.decode.efficiency.unit,report.decode.efficiency.value,report.per_token.memory,report.per_token.latency.unit,report.per_token.latency.count,report.per_token.latency.total,report.per_token.latency.mean,report.per_token.latency.stdev,report.per_token.latency.p50,report.per_token.latency.p90,report.per_token.latency.p95,report.per_token.latency.p99,report.per_token.latency.values,report.per_token.throughput.unit,report.per_token.throughput.value,report.per_token.energy,report.per_token.efficiency 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,827.838464,515.76832,0.0,113.246208,113.023488,s,1,10.3206826171875,10.3206826171875,0.0,10.3206826171875,10.3206826171875,10.3206826171875,10.3206826171875,[10.3206826171875],,kWh,1.209280324582475e-05,1.3265899175839107e-06,3.6952807340000227e-06,1.7114673897408684e-05,,MB,1334.513664,631.11168,0.0,213.909504,180.29568,s,28,0.2370170545578003,0.008464894805635725,0.0005714710549458053,0.008354624271392823,0.00860488624572754,0.008710818910598755,0.010626957817077638,"[0.008671839714050293, 0.008145759582519532, 0.008370976448059082, 0.008174880027770996, 0.008351360321044922, 0.00822060775756836, 0.008245887756347657, 0.008429471969604491, 0.008576191902160645, 0.008317695617675782, 0.008090047836303712, 0.008190464019775391, 0.008342111587524414, 0.008194239616394042, 0.008461600303649902, 0.00831817626953125, 0.008293855667114257, 0.008225824356079102, 0.008311936378479004, 0.011327903747558593, 0.008377087593078614, 0.008357888221740722, 0.00846390438079834, 0.008389823913574219, 0.008731807708740234, 0.00835961627960205, 0.008555839538574219, 0.00852025604248047]",tokens/s,30242.54947971253,kWh,2.4399283105490844e-07,2.690756220995278e-08,1.3242322931271282e-07,4.0332362257757403e-07,tokens/kWh,634726025.6266338,MB,1367.629824,643.694592,0.0,226.492416,180.29824,s,28,10.077106872558595,0.35989667401994974,0.003035151928121632,0.36026242065429687,0.3626016723632813,0.3633097763061523,0.3674705224609375,"[0.36152365112304685, 0.36169784545898437, 0.36168951416015627, 0.3596973571777344, 0.356380126953125, 0.3595142822265625, 0.36229888916015623, 0.35849331665039064, 0.3532386779785156, 0.3612688598632813, 0.3606222534179688, 0.3579005126953125, 0.3636566467285156, 0.3601270446777344, 0.35684603881835936, 0.35859136962890625, 0.3552720642089844, 0.35605596923828126, 0.3626655883789062, 0.35641070556640625, 0.36039779663085936, 0.36145989990234373, 0.3610047607421875, 0.3688811340332031, 0.3582237548828125, 0.3625742797851563, 0.36123284912109377, 0.35938168334960935]",tokens/s,175.0502423273514,kWh,1.0328018358528554e-05,1.1389947207346094e-06,3.929018511901475e-06,1.5396031591164644e-05,tokens/kWh,4091963.5444340054,,s,1764,10.060474532604223,0.00570321685521781,0.0001846999719548029,0.005693808078765869,0.005857183790206909,0.0059349679708480835,0.006298633570671079,"[0.005492288112640381, 0.0058221759796142575, 0.005773568153381347, 0.005759007930755615, 0.005912320137023926, 0.005696383953094483, 0.00578876781463623, 0.0056388797760009764, 0.005637887954711914, 0.0057981438636779785, 0.005748095989227295, 0.005769375801086426, 0.005811679840087891, 0.0056730880737304685, 0.005755775928497314, 0.006012928009033203, 0.005823584079742432, 0.005712800025939942, 0.005646336078643799, 0.005662496089935303, 0.005771488189697266, 0.0058364481925964355, 0.005919072151184082, 0.005883903980255127, 0.005834752082824707, 0.005720032215118408, 0.005640223979949951, 0.005793856143951416, 0.005783520221710205, 0.005713888168334961, 0.005758975982666016, 0.005693439960479736, 0.005724415779113769, 0.005854976177215576, 0.005832704067230224, 0.005832064151763916, 0.005629824161529541, 0.005591807842254638, 0.0056211199760437015, 0.005787744045257569, 0.0057571840286254885, 0.0057779521942138675, 0.005660416126251221, 0.005801343917846679, 0.005653151988983154, 0.005584864139556884, 0.005638144016265869, 0.005619711875915527, 0.005791744232177734, 0.005728288173675537, 0.005637152194976807, 0.0055715198516845706, 0.005611487865447998, 0.005646368026733398, 0.005689119815826416, 0.005616991996765137, 0.00575167989730835, 0.005705023765563965, 0.005561024188995361, 0.005732096195220947, 0.005636352062225342, 0.005998335838317871, 0.005622015953063965, 0.005360928058624268, 0.005861216068267823, 0.005599552154541015, 0.005761600017547607, 0.0059081602096557615, 0.005826879978179931, 0.005887008190155029, 0.005698751926422119, 0.005738272190093994, 0.005614751815795898, 0.005695680141448975, 0.006132383823394775, 0.005714176177978515, 0.005779200077056885, 0.006102431774139405, 0.005660704135894775, 0.005638720035552979, 0.005810175895690918, 0.00586569595336914, 0.005822271823883057, 0.005777376174926758, 0.005734399795532226, 0.00569920015335083, 0.005773312091827393, 0.005828735828399659, 0.005845439910888672, 0.005711552143096924, 0.005975296020507812, 0.0060068159103393555, 0.005872255802154541, 0.005666431903839112, 0.005630559921264648, 0.005643455982208252, 0.005605504035949707, 0.005660895824432373, 0.005651968002319336, 0.0057067198753356935, 0.005760096073150635, 0.005796512126922607, 0.0056929922103881835, 0.005638847827911377, 0.005703999996185303, 0.00573526382446289, 0.0057103362083435055, 0.005719520092010498, 0.005743743896484375, 0.00566153621673584, 0.005761663913726806, 0.005648672103881836, 0.005687104225158691, 0.005646431922912597, 0.005527167797088623, 0.005515103816986084, 0.0057225279808044436, 0.005640416145324707, 0.0055593280792236325, 0.005646687984466553, 0.005790080070495605, 0.00562611198425293, 0.005651519775390625, 0.005771584033966064, 0.005849855899810791, 0.005614912033081055, 0.0055294718742370606, 0.0058000640869140625, 0.0057571520805358885, 0.0056574721336364745, 0.0057003841400146485, 0.005847231864929199, 0.005846560001373291, 0.005715968132019043, 0.005797728061676026, 0.00572495985031128, 0.00565225601196289, 0.005814208030700684, 0.005707839965820313, 0.005834752082824707, 0.005893152236938476, 0.005961023807525635, 0.006039455890655517, 0.005740255832672119, 0.005599264144897461, 0.006049791812896729, 0.005946847915649414, 0.0058908481597900395, 0.005840832233428955, 0.005736256122589111, 0.005679103851318359, 0.005707680225372314, 0.0057017278671264646, 0.0055491838455200195, 0.005461215972900391, 0.005555776119232178, 0.0056436161994934085, 0.005951776027679444, 0.005856959819793701, 0.005900864124298096, 0.0056893758773803715, 0.005625120162963867, 0.005540639877319336, 0.005591104030609131, 0.0057223038673400875, 0.005816192150115967, 0.005672671794891357, 0.005663008213043213, 0.005553919792175293, 0.00557696008682251, 0.005500927925109863, 0.0056620478630065914, 0.005626207828521729, 0.00558131217956543, 0.0055478401184082034, 0.005629600048065186, 0.005562719821929932, 0.005738592147827149, 0.00566812801361084, 0.005704319953918457, 0.005872960090637207, 0.005698239803314209, 0.005636096000671387, 0.006012127876281738, 0.0057067198753356935, 0.0059922561645507815, 0.005787648200988769, 0.00582480001449585, 0.005790656089782715, 0.005306303977966309, 0.005646975994110107, 0.00577942419052124, 0.005513247966766358, 0.005482495784759522, 0.005421055793762207, 0.00682422399520874, 0.005713632106781006, 0.00588595199584961, 0.005767168045043946, 0.00568723201751709, 0.005548096179962158, 0.005602784156799316, 0.0056878399848937986, 0.005547679901123047, 0.005619840145111084, 0.005545728206634522, 0.0057348799705505375, 0.005662559986114502, 0.005632160186767578, 0.0056730880737304685, 0.005734079837799072, 0.005583040237426758, 0.0055766720771789555, 0.005503007888793945, 0.0054637761116027835, 0.005583263874053955, 0.00582041597366333, 0.005801119804382324, 0.0056897602081298826, 0.005591360092163086, 0.005695168018341064, 0.005656288146972656, 0.005618271827697754, 0.00561356782913208, 0.005568511962890625, 0.005496287822723388, 0.005570752143859863, 0.006278528213500977, 0.005925824165344238, 0.0058096318244934084, 0.005785855770111084, 0.0057060480117797855, 0.005687295913696289, 0.005808127880096436, 0.005826272010803222, 0.0056938881874084475, 0.005664608001708984, 0.005707871913909912, 0.005789599895477295, 0.0057749757766723635, 0.005589375972747803, 0.005730303764343261, 0.005727903842926025, 0.005626304149627686, 0.0056953921318054195, 0.005916768074035644, 0.005812128067016601, 0.005767424106597901, 0.00566812801361084, 0.005849567890167237, 0.005658304214477539, 0.0057504639625549315, 0.005348351955413819, 0.005847040176391601, 0.005733695983886719, 0.005720928192138672, 0.0056113600730896, 0.005681151866912842, 0.005653567790985107, 0.005675968170166016, 0.005929279804229737, 0.005906112194061279, 0.005732351779937744, 0.005717599868774414, 0.005686912059783935, 0.005657375812530518, 0.005752831935882568, 0.005728256225585937, 0.005832032203674316, 0.005652927875518799, 0.0054496960639953615, 0.0055642881393432615, 0.005552512168884277, 0.005701632022857666, 0.005639711856842041, 0.005482975959777832, 0.005493760108947754, 0.005628799915313721, 0.005709248065948486, 0.0057413439750671385, 0.005711775779724121, 0.005707168102264405, 0.005735007762908936, 0.005718016147613526, 0.005781439781188965, 0.0056935038566589356, 0.005667967796325684, 0.005512063980102539, 0.005519360065460205, 0.005449535846710205, 0.005414976119995117, 0.005363840103149414, 0.005525504112243652, 0.005570752143859863, 0.005639455795288086, 0.005763807773590088, 0.005734208106994629, 0.005645952224731445, 0.005669248104095459, 0.005677055835723877, 0.005676576137542725, 0.005840703964233398, 0.00562604808807373, 0.0056315197944641114, 0.005673920154571533, 0.00566476821899414, 0.005658847808837891, 0.005688191890716553, 0.005657855987548828, 0.005531295776367188, 0.005560512065887451, 0.005613376140594482, 0.005592415809631348, 0.005538464069366455, 0.005527552127838135, 0.005781119823455811, 0.005635583877563476, 0.005520095825195312, 0.005652031898498535, 0.005838560104370117, 0.005716224193572998, 0.005617472171783447, 0.005612415790557861, 0.00567414379119873, 0.005739488124847412, 0.005713727951049805, 0.005635615825653076, 0.0055894718170166014, 0.00561513614654541, 0.00555241584777832, 0.0055145277976989746, 0.005491615772247315, 0.005470143795013428, 0.005727744102478028, 0.005683616161346435, 0.0056475200653076174, 0.005694464206695556, 0.005553728103637696, 0.0056344962120056154, 0.0056600961685180666, 0.005712575912475586, 0.005745952129364014, 0.0057862081527709965, 0.005617343902587891, 0.005679423809051514, 0.005705728054046631, 0.005731935977935791, 0.0059613118171691895, 0.005907167911529541, 0.005652575969696045, 0.005681151866912842, 0.0056872000694274906, 0.005654848098754882, 0.005535456180572509, 0.005764383792877198, 0.0059686717987060545, 0.005781407833099365, 0.00560752010345459, 0.005648543834686279, 0.005776447772979736, 0.005767680168151855, 0.0057383041381835935, 0.00567574405670166, 0.005658592224121094, 0.005801055908203125, 0.005743360042572022, 0.005723968029022217, 0.005761151790618897, 0.005697535991668701, 0.005756927967071533, 0.0060702719688415525, 0.005619743824005127, 0.005680191993713379, 0.0058023362159729, 0.005702527999877929, 0.005614751815795898, 0.0055649600028991695, 0.00593455982208252, 0.005450016021728516, 0.005794496059417725, 0.005689343929290771, 0.0057487359046936035, 0.006143295764923095, 0.006060095787048339, 0.005765759944915771, 0.00608460807800293, 0.0056483840942382815, 0.005765120029449463, 0.0056761598587036135, 0.005761504173278809, 0.005551743984222412, 0.005722911834716797, 0.005690688133239746, 0.0056572799682617185, 0.00562332820892334, 0.005849728107452393, 0.005740384101867676, 0.0057077760696411135, 0.0058056321144104, 0.005872064113616944, 0.005797567844390869, 0.005855552196502686, 0.006178783893585205, 0.006033440113067627, 0.005809792041778564, 0.005675392150878906, 0.005629951953887939, 0.005748544216156006, 0.005824704170227051, 0.005748703956604004, 0.0057346878051757815, 0.0055509119033813475, 0.0055838079452514644, 0.005599040031433106, 0.00568726396560669, 0.005750400066375732, 0.005583456039428711, 0.005600959777832031, 0.00555622386932373, 0.0055565438270568845, 0.0057325439453125, 0.00556982421875, 0.0056509761810302735, 0.005637279987335205, 0.005731167793273925, 0.005785600185394287, 0.005774752140045166, 0.0058906559944152835, 0.0058772478103637695, 0.005744480133056641, 0.005706751823425293, 0.0056562237739562985, 0.005640192031860352, 0.005623712062835694, 0.005742176055908203, 0.0059922561645507815, 0.005571263790130615, 0.005627871990203858, 0.005912415981292725, 0.005767360210418702, 0.005763360023498535, 0.0053455038070678714, 0.005660672187805176, 0.005634335994720459, 0.005600992202758789, 0.005607423782348633, 0.005650432109832764, 0.005773536205291748, 0.005799935817718506, 0.005717599868774414, 0.005689343929290771, 0.00591871976852417, 0.005729824066162109, 0.005720320224761963, 0.005743008136749267, 0.005724160194396972, 0.005896096229553223, 0.005936575889587402, 0.005794464111328125, 0.005738431930541992, 0.005810400009155273, 0.005666656017303467, 0.005752895832061768, 0.005865119934082032, 0.005751391887664795, 0.005717696189880371, 0.005691391944885254, 0.005604512214660644, 0.0055996479988098145, 0.0054849920272827144, 0.005337376117706299, 0.005327904224395752, 0.005294591903686524, 0.005291584014892578, 0.0054728960990905766, 0.005494783878326416, 0.005570559978485107, 0.0054988799095153805, 0.005502975940704345, 0.005455904006958008, 0.005602335929870606, 0.005720416069030762, 0.0060845441818237304, 0.005903295993804932, 0.005646048069000244, 0.005496640205383301, 0.005568607807159424, 0.0057652158737182616, 0.005657855987548828, 0.005643008232116699, 0.0056888961791992185, 0.005772863864898682, 0.0059790081977844235, 0.005785600185394287, 0.00562604808807373, 0.005768383979797363, 0.005724800109863281, 0.005857279777526855, 0.005724031925201416, 0.005813375949859619, 0.005692416191101074, 0.005824512004852295, 0.005746687889099121, 0.005913856029510498, 0.005469088077545166, 0.0059064321517944334, 0.005684959888458252, 0.005937439918518066, 0.005746016025543213, 0.005732607841491699, 0.005562816143035889, 0.005558207988739013, 0.005533728122711181, 0.005739903926849365, 0.00543507194519043, 0.005512447834014893, 0.005457600116729736, 0.005488639831542969, 0.005447679996490478, 0.005633088111877441, 0.005553152084350586, 0.005595071792602539, 0.005646336078643799, 0.005746399879455567, 0.005647808074951172, 0.0056648640632629395, 0.005619487762451172, 0.005657567977905274, 0.005670656204223632, 0.005682559967041015, 0.005710720062255859, 0.005582848072052002, 0.005480447769165039, 0.005607423782348633, 0.005537792205810547, 0.005636256217956543, 0.005473951816558838, 0.0054949760437011715, 0.005560287952423096, 0.005873695850372314, 0.005651487827301026, 0.005444575786590576, 0.005480319976806641, 0.005564896106719971, 0.005627871990203858, 0.005629951953887939, 0.0056436161994934085, 0.005593440055847168, 0.005553855895996094, 0.005468480110168457, 0.0054551358222961425, 0.005665023803710937, 0.005662303924560547, 0.0055715198516845706, 0.005605504035949707, 0.005588511943817139, 0.005636735916137696, 0.005574304103851319, 0.0056581759452819826, 0.0055443840026855465, 0.005549888134002686, 0.005613759994506836, 0.00561359977722168, 0.0054617919921875, 0.005506624221801757, 0.005440127849578857, 0.0055848960876464845, 0.0051497921943664555, 0.005423168182373047, 0.005583712100982666, 0.005746655941009522, 0.005738624095916748, 0.0055625920295715335, 0.00563372802734375, 0.005480447769165039, 0.005398528099060058, 0.0053309440612792965, 0.005495007991790772, 0.005539616107940674, 0.005598368167877198, 0.005681824207305908, 0.005597536087036133, 0.005514080047607422, 0.0054069762229919435, 0.0058232321739196775, 0.005852960109710694, 0.005596864223480225, 0.0056479997634887695, 0.005658720016479492, 0.005679935932159424, 0.005720064163208007, 0.005781504154205322, 0.005580832004547119, 0.005854623794555664, 0.0058904318809509274, 0.005738495826721191, 0.005650623798370362, 0.0055842242240905764, 0.0057142720222473145, 0.005564896106719971, 0.005756063938140869, 0.005531871795654297, 0.005800415992736816, 0.005713920116424561, 0.005699584007263184, 0.005737887859344482, 0.005566400051116943, 0.005609216213226319, 0.005763999938964844, 0.005844992160797119, 0.005763296127319336, 0.005668447971343994, 0.005705344200134277, 0.005726784229278564, 0.005705696105957031, 0.005639840126037598, 0.005553919792175293, 0.005664896011352539, 0.0057612800598144534, 0.005746943950653076, 0.005690400123596191, 0.005825503826141358, 0.005755008220672608, 0.006356671810150146, 0.006750112056732178, 0.006871327877044677, 0.006871039867401123, 0.006490111827850342, 0.0055801281929016115, 0.005386752128601074, 0.005374015808105469, 0.005535871982574463, 0.005416575908660889, 0.005367616176605225, 0.0053818879127502444, 0.005624447822570801, 0.005581952095031738, 0.005517600059509277, 0.005632639884948731, 0.005703648090362549, 0.0057877120971679685, 0.0057545919418334965, 0.005732192039489746, 0.005734144210815429, 0.005674975872039795, 0.00581712007522583, 0.005881311893463134, 0.006019552230834961, 0.005684544086456298, 0.005593632221221924, 0.005652736186981202, 0.0057291841506958005, 0.005807040214538574, 0.005668831825256348, 0.005795807838439942, 0.005658688068389893, 0.0055830078125, 0.005613120079040528, 0.005785632133483887, 0.005845248222351074, 0.005784800052642822, 0.005647136211395264, 0.005796031951904297, 0.005820223808288574, 0.005766816139221191, 0.005850880146026611, 0.005747072219848633, 0.005742527961730957, 0.00564412784576416, 0.005642848014831543, 0.005705567836761474, 0.0056646718978881835, 0.005756192207336426, 0.005806848049163819, 0.0057877120971679685, 0.0058037757873535155, 0.005636384010314941, 0.0057703680992126465, 0.005732992172241211, 0.005896063804626464, 0.005658976078033447, 0.0057355198860168455, 0.006005343914031983, 0.00566534423828125, 0.005721471786499024, 0.005643936157226562, 0.006107872009277344, 0.005856319904327393, 0.00582483196258545, 0.00575551986694336, 0.0058122239112854005, 0.005662847995758056, 0.005645855903625489, 0.0051660480499267575, 0.005324704170227051, 0.005488351821899414, 0.005754879951477051, 0.005587327957153321, 0.005646336078643799, 0.005709824085235596, 0.005841951847076416, 0.005698527812957764, 0.005777408123016357, 0.0056217598915100095, 0.00570304012298584, 0.005774208068847656, 0.005686367988586426, 0.005681503772735596, 0.005667136192321777, 0.005500927925109863, 0.005742688179016114, 0.006088575839996338, 0.005703711986541748, 0.005871871948242188, 0.005764256000518799, 0.005685855865478515, 0.005768991947174072, 0.005853600025177002, 0.0058037757873535155, 0.0055829758644104005, 0.005590879917144775, 0.005653728008270264, 0.005627999782562256, 0.005509568214416504, 0.0054971837997436525, 0.005453695774078369, 0.005675136089324951, 0.005721151828765869, 0.005757664203643799, 0.005682432174682618, 0.0056386241912841795, 0.0056611838340759274, 0.005623807907104492, 0.005611743927001953, 0.00570681619644165, 0.005385087966918945, 0.005454976081848145, 0.005529312133789063, 0.005561344146728516, 0.0053678078651428224, 0.005607423782348633, 0.005644447803497314, 0.005735392093658447, 0.00562662410736084, 0.005660799980163575, 0.005648640155792237, 0.005777152061462403, 0.006438560009002685, 0.005725632190704346, 0.005655456066131592, 0.0059246401786804195, 0.005654943943023682, 0.005791296005249023, 0.00572822380065918, 0.0057981758117675785, 0.005660672187805176, 0.005459104061126709, 0.005745696067810059, 0.005758560180664062, 0.005690080165863037, 0.005814176082611084, 0.005703423976898193, 0.005729375839233398, 0.005732863903045654, 0.005792128086090088, 0.005814303874969482, 0.005768640041351318, 0.005610047817230224, 0.005857632160186767, 0.005879392147064209, 0.005749152183532715, 0.005774367809295655, 0.005812928199768066, 0.005719999790191651, 0.005672959804534912, 0.005732351779937744, 0.0057160000801086425, 0.005768991947174072, 0.005646719932556153, 0.005750112056732178, 0.005708255767822266, 0.005671264171600342, 0.0057422399520874025, 0.005840896129608154, 0.005847040176391601, 0.005960864067077637, 0.005736288070678711, 0.005745664119720459, 0.005767168045043946, 0.005834976196289063, 0.005743519783020019, 0.005829664230346679, 0.0057458882331848146, 0.005765727996826172, 0.005635968208312988, 0.005898399829864502, 0.005817503929138184, 0.0060076479911804195, 0.005756192207336426, 0.005780288219451904, 0.005636000156402588, 0.005720064163208007, 0.005746687889099121, 0.005842495918273926, 0.005827040195465088, 0.005752799987792968, 0.005883039951324463, 0.005759327888488769, 0.005777919769287109, 0.005803103923797607, 0.00584716796875, 0.005952288150787353, 0.005942783832550049, 0.005788159847259521, 0.00576688003540039, 0.005693727970123291, 0.005644383907318115, 0.005451200008392334, 0.0057114558219909664, 0.005305024147033691, 0.005639872074127197, 0.00572211217880249, 0.005875616073608398, 0.00584822416305542, 0.0058436479568481445, 0.005656832218170166, 0.005703680038452149, 0.005757215976715088, 0.005798848152160645, 0.005765920162200928, 0.0057077760696411135, 0.005482495784759522, 0.005961952209472657, 0.005983967781066895, 0.0056687679290771485, 0.00578166389465332, 0.005821887969970703, 0.0056284799575805665, 0.005787648200988769, 0.005650432109832764, 0.005894303798675537, 0.0057703680992126465, 0.005763967990875244, 0.0056464638710021975, 0.00562988805770874, 0.005816224098205566, 0.005761983871459961, 0.005708735942840576, 0.005621024131774902, 0.0055623998641967775, 0.005690336227416992, 0.005646304130554199, 0.005703423976898193, 0.005572512149810791, 0.005525792121887207, 0.005536863803863526, 0.005691999912261963, 0.005801119804382324, 0.005729248046875, 0.005908480167388916, 0.005763328075408936, 0.005738239765167236, 0.005725887775421143, 0.005878079891204834, 0.005791488170623779, 0.005742688179016114, 0.005681312084197998, 0.005811200141906738, 0.005608448028564453, 0.005709824085235596, 0.005615615844726562, 0.005644095897674561, 0.005552320003509521, 0.00583619213104248, 0.005781951904296875, 0.005775008201599121, 0.005583360195159912, 0.005652480125427246, 0.005596672058105469, 0.005623936176300049, 0.005519807815551758, 0.005571968078613281, 0.005468192100524902, 0.005774464130401611, 0.005550943851470948, 0.005761119842529297, 0.005676959991455078, 0.005754879951477051, 0.00547430419921875, 0.005589344024658203, 0.005431104183197022, 0.005383967876434326, 0.005384064197540283, 0.005433536052703857, 0.005389952182769776, 0.005535552024841309, 0.005412767887115479, 0.005560480117797852, 0.0056397438049316405, 0.005684224128723145, 0.0055387520790100096, 0.005688543796539307, 0.005789152145385742, 0.005801983833312988, 0.005627295970916748, 0.005773920059204102, 0.005536064147949219, 0.005720064163208007, 0.005719264030456543, 0.005866528034210205, 0.005749728202819824, 0.005794591903686523, 0.005654176235198974, 0.0056427202224731444, 0.00555404806137085, 0.005547167778015137, 0.005587808132171631, 0.005679200172424317, 0.005555424213409424, 0.0057424001693725584, 0.005731455802917481, 0.005587967872619629, 0.005478464126586914, 0.0055937919616699215, 0.005578752040863037, 0.0056394882202148435, 0.005495584011077881, 0.005676095962524414, 0.005601471900939942, 0.005753824234008789, 0.0055937919616699215, 0.005843488216400147, 0.005687520027160644, 0.0057571840286254885, 0.005736447811126709, 0.005640192031860352, 0.006018367767333985, 0.005702335834503174, 0.0059127678871154785, 0.005715295791625976, 0.005984799861907959, 0.005719999790191651, 0.005746431827545166, 0.005779712200164795, 0.005797567844390869, 0.0054617919921875, 0.005874112129211426, 0.005811967849731445, 0.005819200038909912, 0.005745855808258057, 0.005744448184967041, 0.005645055770874024, 0.005675104141235352, 0.005554240226745605, 0.005685088157653809, 0.005676928043365478, 0.005849215984344482, 0.005596479892730713, 0.005626560211181641, 0.005623007774353027, 0.005654431819915771, 0.005581696033477784, 0.005640480041503906, 0.005745728015899659, 0.005652927875518799, 0.005673120021820069, 0.005650496006011963, 0.005580063819885254, 0.00605625581741333, 0.005765439987182618, 0.005777503967285156, 0.005856959819793701, 0.005736288070678711, 0.005759520053863525, 0.005815328121185303, 0.005769696235656738, 0.005771872043609619, 0.005807199954986572, 0.00582528018951416, 0.0057077760696411135, 0.0057775359153747555, 0.005711743831634522, 0.005691391944885254, 0.0056070399284362795, 0.005693088054656983, 0.005572832107543945, 0.005701504230499268, 0.005644735813140869, 0.005487872123718262, 0.005332096099853516, 0.0056145601272583005, 0.005608448028564453, 0.005766784191131592, 0.005604896068572998, 0.005776063919067383, 0.00571398401260376, 0.005626976013183594, 0.005456575870513916, 0.005474016189575195, 0.005540287971496582, 0.0056852478981018065, 0.005635424137115479, 0.005692063808441162, 0.005752831935882568, 0.005658751964569092, 0.005597119808197022, 0.005637856006622315, 0.00571619176864624, 0.005378079891204834, 0.005539807796478271, 0.005837088108062744, 0.005888991832733155, 0.005952064037322998, 0.0056358718872070316, 0.005554399967193604, 0.00554207992553711, 0.005597184181213379, 0.005552095890045166, 0.0055890240669250486, 0.005455872058868408, 0.005472256183624268, 0.005389952182769776, 0.005534175872802734, 0.005451680183410645, 0.00549187183380127, 0.005364128112792969, 0.005441472053527832, 0.005519072055816651, 0.00552342414855957, 0.005507808208465576, 0.005744575977325439, 0.005521567821502685, 0.005576704025268555, 0.005529920101165772, 0.005610911846160888, 0.005673247814178467, 0.005703392028808593, 0.005655136108398438, 0.005803264141082763, 0.00631657600402832, 0.005717919826507568, 0.005723616123199463, 0.005741087913513183, 0.005760831832885742, 0.00557862377166748, 0.005598624229431152, 0.005532576084136963, 0.005392096042633057, 0.005486144065856934, 0.00563481616973877, 0.005535840034484863, 0.005515135765075683, 0.00540883207321167, 0.0053836159706115725, 0.005677631855010986, 0.005918655872344971, 0.005754975795745849, 0.005805183887481689, 0.0057003841400146485, 0.0058343358039855955, 0.005768640041351318, 0.005823520183563233, 0.0056843838691711425, 0.00560748815536499, 0.006220384120941162, 0.0056152639389038085, 0.005616352081298828, 0.005588672161102295, 0.005566527843475342, 0.005697472095489502, 0.005459392070770263, 0.005198624134063721, 0.005607359886169434, 0.0055848960876464845, 0.0056852478981018065, 0.005594687938690186, 0.005498847961425781, 0.0054501757621765136, 0.005654176235198974, 0.0056011838912963866, 0.005880288124084473, 0.005825984001159668, 0.0056118078231811525, 0.005677087783813477, 0.005661119937896728, 0.005596992015838623, 0.00556876802444458, 0.005609055995941162, 0.0055706238746643065, 0.005969151973724365, 0.005475168228149414, 0.005559872150421143, 0.005658944129943847, 0.005687424182891846, 0.005566463947296142, 0.005578591823577881, 0.0055911998748779295, 0.005625440120697022, 0.005740575790405274, 0.0058863358497619625, 0.005672959804534912, 0.005693183898925781, 0.0056538238525390624, 0.005811295986175537, 0.005752895832061768, 0.005590816020965576, 0.005340256214141846, 0.005472576141357422, 0.005982656002044678, 0.0057079682350158695, 0.00593503999710083, 0.00595689582824707, 0.005747551918029785, 0.0055892162322998045, 0.005537600040435791, 0.005516672134399414, 0.005437503814697266, 0.005564896106719971, 0.005562079906463623, 0.00565225601196289, 0.005523935794830322, 0.00553984022140503, 0.005453855991363526, 0.005927072048187256, 0.005764512062072754, 0.005770976066589355, 0.005667520046234131, 0.005726208209991455, 0.00557590389251709, 0.005700287818908692, 0.005705471992492676, 0.005744416236877442, 0.005536064147949219, 0.005490943908691406, 0.005383967876434326, 0.005828832149505615, 0.005795839786529541, 0.005717088222503662, 0.005577856063842773, 0.0056566081047058105, 0.005668799877166748, 0.005846079826354981, 0.005772031784057617, 0.005768959999084473, 0.005590943813323974, 0.005677343845367432, 0.005557439804077149, 0.005714623928070068, 0.005676447868347168, 0.0056737599372863766, 0.005619904041290283, 0.005752639770507812, 0.0056852478981018065, 0.00573033618927002, 0.005641280174255371, 0.005777919769287109, 0.005847455978393555, 0.005785600185394287, 0.005737696170806885, 0.0057372479438781735, 0.005844992160797119, 0.005869311809539795, 0.0058371200561523435, 0.005768544197082519, 0.005712672233581543, 0.005703264236450195, 0.005601759910583496, 0.005589056015014649, 0.005621376037597656, 0.005546048164367676, 0.005498208045959472, 0.005419487953186035, 0.005582464218139648, 0.005447711944580078, 0.0057407040596008305, 0.005544320106506347, 0.005703680038452149, 0.005699584007263184, 0.005666944026947021, 0.005690591812133789, 0.005651103973388672, 0.005514592170715332, 0.005716063976287842, 0.0055894718170166014, 0.005679103851318359, 0.005501023769378662, 0.005631392002105713, 0.005589087963104248, 0.005640255928039551, 0.0055443840026855465, 0.005642240047454834, 0.00557587194442749, 0.005772255897521973, 0.00667952013015747, 0.007276671886444092, 0.007256095886230469, 0.006288095951080323, 0.005579840183258056, 0.005641151905059814, 0.005511168003082275, 0.0055929598808288575, 0.005811872005462647, 0.00567471981048584, 0.005786367893218994, 0.005758975982666016, 0.005698560237884521, 0.005667840003967285, 0.005633056163787842, 0.005468832015991211, 0.005615327835083008, 0.0058063678741455075, 0.005663040161132812, 0.005679296016693115, 0.005916704177856445, 0.005717088222503662, 0.005558976173400879, 0.005656576156616211, 0.005785215854644776, 0.005683584213256836, 0.0055623679161071774, 0.005634047985076904, 0.005728256225585937, 0.005715424060821533, 0.005654911994934082, 0.005831071853637695, 0.005553599834442139, 0.005623936176300049, 0.005523712158203125, 0.00562611198425293, 0.00551196813583374, 0.005724544048309326, 0.005534111976623535, 0.00564851188659668, 0.005535744190216065, 0.00556441593170166, 0.005529600143432617, 0.005545375823974609, 0.005487199783325195, 0.005506815910339355, 0.005652575969696045, 0.005690783977508545, 0.005487360000610351, 0.005729375839233398, 0.0057487359046936035, 0.0057086400985717775, 0.005599296092987061, 0.005677055835723877, 0.005683199882507324, 0.005822463989257813, 0.005799935817718506, 0.005682528018951416, 0.005500991821289062, 0.005607840061187744, 0.005614975929260254, 0.005741631984710694, 0.005555967807769776, 0.0056217598915100095, 0.00563808012008667, 0.005727583885192871, 0.005616352081298828, 0.005301695823669433, 0.005646912097930909, 0.005721888065338135, 0.005631616115570068, 0.005708032131195069, 0.005597536087036133, 0.005555520057678223, 0.005507296085357666, 0.005700064182281494, 0.005844607830047608, 0.005575232028961182, 0.005676735877990722, 0.005648640155792237, 0.0056136960983276364, 0.005653567790985107, 0.005636640071868896, 0.0055502400398254395, 0.0055316481590271, 0.005660672187805176, 0.005881247997283935, 0.005687104225158691, 0.005654528141021729, 0.005712672233581543, 0.005635200023651123, 0.005571455955505371, 0.005703616142272949, 0.0058204798698425295, 0.005636096000671387, 0.0055667200088500976, 0.005553919792175293, 0.005618847846984863, 0.005761888027191162, 0.005750080108642578, 0.0057617278099060054, 0.0057114877700805665, 0.00589247989654541, 0.005874911785125733, 0.005726784229278564, 0.005738143920898438, 0.005702208042144776, 0.005789951801300049, 0.0060208640098571775, 0.00564847993850708, 0.0056761279106140134, 0.005767392158508301, 0.005743167877197266, 0.0058080959320068355, 0.005724127769470215, 0.005931104183197021, 0.005957632064819336, 0.005810080051422119, 0.005859488010406494, 0.005783679962158203, 0.005691199779510498, 0.00570143985748291, 0.0058124160766601565, 0.005658720016479492, 0.0059853758811950685, 0.005763807773590088, 0.005739808082580566, 0.00571884822845459, 0.005765024185180664, 0.0057058238983154295, 0.005636576175689697, 0.005908095836639404, 0.005900735855102539, 0.005738431930541992, 0.005658624172210694, 0.005636320114135742, 0.005659776210784912, 0.0056917757987976075, 0.00569920015335083, 0.005843935966491699, 0.0058774399757385255, 0.005783552169799804, 0.005797215938568115, 0.00575929594039917, 0.005744991779327392, 0.005731711864471436, 0.0057571520805358885, 0.005740960121154785, 0.005597184181213379, 0.005642111778259277, 0.005711391925811767, 0.0056756801605224606, 0.005611648082733154, 0.005634016036987305, 0.005535520076751709, 0.0054455680847167965, 0.005702847957611084, 0.00575705623626709, 0.005655168056488037, 0.005565887928009034, 0.005624576091766358, 0.005713920116424561, 0.005578752040863037, 0.005781087875366211, 0.006068640232086182, 0.00604310417175293, 0.00581056022644043, 0.006072127819061279, 0.005953152179718018, 0.005929183959960937, 0.005724671840667724, 0.005858304023742676, 0.0058419198989868165, 0.005950592041015625, 0.005860223770141602, 0.005751008033752441, 0.00570959997177124, 0.005647583961486817, 0.005726975917816162, 0.005695519924163818, 0.0055848960876464845, 0.005582079887390136, 0.005439583778381348, 0.00557532787322998, 0.0055166082382202145, 0.005751488208770752, 0.005773312091827393, 0.005755136013031006, 0.005647264003753662, 0.0056241598129272465, 0.005715583801269531, 0.005655424118041992, 0.005734047889709473, 0.005321152210235595, 0.0059127678871154785, 0.005910336017608642, 0.005825984001159668, 0.005986911773681641, 0.0058059201240539555, 0.005718336105346679, 0.005719871997833252, 0.005752031803131104, 0.005792384147644043, 0.005834911823272705, 0.005744639873504639, 0.005623807907104492, 0.0056538558006286625, 0.005728928089141846, 0.005745791912078858, 0.005700511932373047, 0.005787615776062012, 0.005773312091827393, 0.005708000183105469, 0.005794879913330078, 0.005698272228240967, 0.00574015998840332, 0.0057494077682495115, 0.005768928050994873, 0.005691391944885254, 0.005605375766754151, 0.005881855964660645, 0.005994143962860107, 0.005906112194061279, 0.005769855976104736, 0.005723392009735108, 0.005612319946289063, 0.005668863773345947, 0.005721951961517334, 0.005656383991241455, 0.0057563199996948245, 0.005698495864868164, 0.005739999771118164, 0.0057487359046936035, 0.0057144641876220705, 0.005623807907104492, 0.005586944103240967, 0.005675007820129394, 0.005703328132629394, 0.005962080001831055, 0.005662816047668457, 0.005672863960266113, 0.005562528133392334, 0.005568352222442627, 0.005568511962890625, 0.005691391944885254, 0.005558015823364258, 0.0055008001327514645, 0.005597407817840576, 0.005691455841064453, 0.005762944221496582, 0.005732416152954101, 0.005631711959838867, 0.0056462721824646, 0.005851647853851318, 0.0057580161094665525, 0.0057225279808044436, 0.005460095882415771, 0.005919871807098388, 0.005790592193603516, 0.0058122239112854005, 0.005818367958068848, 0.005873856067657471, 0.0057914237976074215, 0.005722047805786133, 0.005679359912872314, 0.005779391765594483, 0.005814271926879883, 0.005801087856292725, 0.005682015895843506, 0.005785759925842285, 0.007906559944152833, 0.008443743705749511, 0.005696544170379639, 0.005612576007843017, 0.005694143772125244, 0.005703328132629394, 0.005724736213684082, 0.005718944072723389, 0.005655456066131592, 0.005853151798248291, 0.005708096027374268, 0.00576639986038208, 0.005607456207275391, 0.005706111907958985, 0.005705952167510986, 0.005715680122375488, 0.005607103824615479, 0.005576896190643311, 0.00548854398727417, 0.0055606718063354495, 0.005566463947296142, 0.005719200134277343, 0.005643104076385498, 0.005693439960479736, 0.005736447811126709, 0.005789696216583252, 0.005742720127105713, 0.005643136024475097, 0.005598207950592041, 0.005797408103942871, 0.005738463878631592, 0.005812736034393311, 0.005814559936523437, 0.005719007968902588, 0.005688064098358155, 0.00556060791015625, 0.005780928134918213, 0.006394144058227539, 0.006027200222015381, 0.006423776149749756, 0.005962431907653809, 0.005906591892242432, 0.005750783920288086, 0.005859519958496094, 0.006422272205352783, 0.0062650880813598635, 0.005765984058380127, 0.005702623844146729, 0.00559881591796875, 0.005333183765411377, 0.005722943782806396, 0.005718111991882324, 0.005650496006011963, 0.005651360034942627, 0.005725120067596436, 0.005672959804534912, 0.005713920116424561, 0.005601280212402344, 0.005464064121246338, 0.005388288021087646, 0.00555017614364624, 0.005760096073150635, 0.005688127994537354, 0.005570496082305908, 0.005767231941223145, 0.0057274880409240725, 0.0057331199645996095, 0.005741888046264648, 0.005673696041107177, 0.005604351997375488, 0.005619872093200684, 0.005669695854187012, 0.005754879951477051, 0.005729311943054199, 0.005557216167449951, 0.005535744190216065, 0.005578783988952637, 0.005602431774139404, 0.005692255973815918, 0.005658527851104736, 0.005666207790374756, 0.005573311805725098, 0.005636096000671387, 0.005592512130737305, 0.005624063968658447, 0.005787968158721924, 0.005623936176300049, 0.005733856201171875, 0.005751200199127197, 0.005695519924163818, 0.005814239978790283, 0.006047167778015137, 0.005806848049163819, 0.0056332478523254395, 0.005667359828948975, 0.005562335968017578, 0.005709919929504394, 0.005629951953887939, 0.005684544086456298, 0.005587456226348877, 0.005552544116973877, 0.005688384056091309, 0.0057407040596008305, 0.005955135822296143, 0.005899263858795166, 0.00578163194656372, 0.00571721601486206, 0.005791552066802978, 0.005735167980194092, 0.0057725758552551265, 0.005581632137298584, 0.005703423976898193, 0.005516128063201904, 0.0057420158386230465, 0.005640255928039551, 0.0057738242149353025, 0.005851136207580566, 0.005698880195617676, 0.006117248058319092, 0.005765759944915771, 0.0057114877700805665, 0.005695744037628174, 0.005872159957885742, 0.005695104122161865, 0.005617343902587891, 0.005702239990234375, 0.0056208958625793455, 0.0056286721229553225, 0.005760992050170898, 0.005750783920288086, 0.005731679916381836, 0.005672863960266113, 0.005640960216522216, 0.005660672187805176, 0.0056258559226989744, 0.005604864120483399, 0.00563865613937378, 0.005637184143066406, 0.005727519989013672, 0.005725855827331543, 0.0056648640632629395, 0.0059528322219848636, 0.005772928237915039, 0.005638527870178223, 0.005591648101806641, 0.0056650562286376955, 0.005491903781890869, 0.0055989761352539065, 0.005656991958618164, 0.005534080028533936, 0.006753983974456787, 0.006121791839599609, 0.00599238395690918, 0.006024479866027832, 0.0063225278854370116, 0.005751232147216797, 0.005697535991668701, 0.005574656009674072, 0.0056217598915100095, 0.005825695991516114, 0.005608191967010498, 0.005551616191864014, 0.005730847835540772, 0.005881440162658691, 0.005753312110900879, 0.0057645440101623535, 0.005560736179351807, 0.005703839778900146, 0.005600575923919678, 0.0056440639495849605, 0.005870495796203613, 0.005828383922576904, 0.005754975795745849, 0.005764480113983154, 0.005860095977783203, 0.00532758378982544, 0.005730303764343261, 0.005805823802947998, 0.005671520233154297, 0.00579753589630127, 0.0057272639274597165, 0.005722400188446045, 0.005772223949432373, 0.00587775993347168, 0.005669792175292968, 0.005829472064971924, 0.00578326416015625, 0.005756224155426025, 0.005774528026580811, 0.005711711883544922, 0.005711071968078614, 0.005646751880645752, 0.005622079849243164, 0.005650015830993652, 0.005644703865051269, 0.005703680038452149, 0.00563424015045166, 0.005639999866485596, 0.005607391834259033, 0.005558303833007813, 0.0056136960983276364, 0.005711743831634522, 0.005691167831420899, 0.005642464160919189, 0.005811935901641846, 0.005627232074737549, 0.005599391937255859, 0.005583648204803467, 0.005531199932098389, 0.005680799961090088, 0.005567264080047607, 0.005723135948181152, 0.00566758394241333, 0.005750495910644532, 0.005935647964477539, 0.005787648200988769, 0.005807680130004883, 0.005657023906707763, 0.0056258559226989744, 0.005740447998046875, 0.00589785623550415, 0.006070911884307861, 0.0058837437629699705, 0.005804160118103027, 0.005812096118927002, 0.005865536212921143, 0.005788928031921387, 0.0058141441345214845, 0.005782495975494384, 0.005697375774383545, 0.005617728233337402, 0.00572380781173706, 0.005782815933227539, 0.005746975898742675, 0.0057636799812316895, 0.005869696140289307, 0.0058074240684509275, 0.005772255897521973, 0.005617343902587891, 0.005955423831939697, 0.0059045119285583494, 0.005815423965454101, 0.005827199935913086, 0.005864863872528076, 0.005763807773590088, 0.0056824002265930176, 0.005638112068176269, 0.0056202239990234375, 0.005535903930664062, 0.005714208126068115, 0.005793791770935058, 0.005753183841705322, 0.005639840126037598, 0.005462016105651855, 0.005605631828308105, 0.0057415680885314945, 0.00570854377746582, 0.0056835517883300785, 0.005568160057067871, 0.005717184066772461, 0.0056154241561889645, 0.005728576183319092, 0.005692287921905518, 0.005666016101837158, 0.005710432052612305, 0.005703487873077393, 0.005632448196411133, 0.005557951927185059, 0.005653567790985107, 0.005760000228881836, 0.005718111991882324, 0.0055961918830871584, 0.005606272220611572, 0.005652480125427246, 0.005619711875915527, 0.005688767910003662, 0.005656767845153809, 0.005656960010528564, 0.005832096099853515, 0.005878367900848389, 0.005764768123626709, 0.005798240184783935, 0.0057259521484375, 0.005861248016357422, 0.005836991786956787, 0.005742752075195313, 0.005733856201171875, 0.0057820801734924316, 0.005637951850891113, 0.005874144077301025, 0.005725376129150391, 0.005583360195159912, 0.005572896003723144, 0.005641759872436524, 0.005738719940185547, 0.005705728054046631, 0.005584991931915283, 0.0055881600379943846, 0.005491424083709717, 0.005601280212402344, 0.005600863933563232]",tokens/s,175.3396417120473,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,826.335232,515.76832,0.0,113.246208,113.023488,s,1,9.9122119140625,9.9122119140625,0.0,9.9122119140625,9.9122119140625,9.9122119140625,9.9122119140625,[9.9122119140625],,kWh,1.192486784999763e-05,1.3080153650283495e-06,3.7161140840000217e-06,1.6948997299026003e-05,,MB,1337.0368,631.11168,0.0,213.909504,180.29568,s,30,0.23948998594284057,0.00798299953142802,7.520089745924121e-05,0.007962143898010254,0.008087328052520753,0.008110386037826538,0.008198741569519044,"[0.008085824012756348, 0.007911231994628906, 0.00793235206604004, 0.007936448097229003, 0.007921696186065674, 0.00810086441040039, 0.007919040203094483, 0.007940639972686767, 0.008231648445129395, 0.008006976127624512, 0.007945888042449951, 0.007955423831939697, 0.007955615997314452, 0.00788044786453247, 0.008118176460266113, 0.007968671798706055, 0.008064160346984863, 0.007943007946014404, 0.007990655899047851, 0.007885119915008544, 0.007991487979888917, 0.007925407886505127, 0.008007904052734376, 0.008031392097473144, 0.007913792133331298, 0.008006336212158204, 0.007975647926330567, 0.008011199951171876, 0.007981855869293213, 0.007951072216033936]",tokens/s,32068.14668999561,kWh,2.287232843782652e-07,2.5224231824559815e-08,1.268970886300967e-07,3.808446048329217e-07,tokens/kWh,672190170.8764088,MB,1370.15296,643.694592,0.0,226.492416,180.29824,s,30,10.067013519287109,0.33556711730957034,0.0026069846915340988,0.3347383575439453,0.3388870880126953,0.33945511169433595,0.3442833865356445,"[0.34608642578125, 0.33749984741210937, 0.33894918823242187, 0.3341790771484375, 0.33473159790039064, 0.33888018798828123, 0.33741217041015625, 0.3365202026367187, 0.3398690490722656, 0.3353953552246094, 0.3341253967285156, 0.3345431213378906, 0.3351024780273438, 0.33494100952148437, 0.3347451171875, 0.3336499328613281, 0.33621371459960936, 0.3345181579589844, 0.33310140991210935, 0.33336581420898437, 0.3335359802246094, 0.33370797729492185, 0.3335428161621094, 0.33488616943359373, 0.33398779296875, 0.33382162475585936, 0.33448526000976564, 0.3340930480957031, 0.33572543334960936, 0.33539816284179685]",tokens/s,187.74187561971598,kWh,9.667714111454929e-06,1.0661788847772752e-06,3.7801338147698654e-06,1.451402681100207e-05,tokens/kWh,4340628.608474397,,s,1890,10.052824332237241,0.005318954673141398,0.00015231906654323218,0.005291615962982178,0.005386211252212524,0.005446454524993896,0.005943475203514097,"[0.005083231925964355, 0.005395840167999268, 0.00533951997756958, 0.005414591789245605, 0.005558815956115723, 0.005559487819671631, 0.005499008178710937, 0.00556934404373169, 0.005428800106048584, 0.005431551933288575, 0.005431295871734619, 0.0053976960182189945, 0.0053992319107055665, 0.005413023948669433, 0.005404640197753907, 0.005336063861846924, 0.005724256038665772, 0.0053441600799560545, 0.005361663818359375, 0.00531660795211792, 0.005410367965698242, 0.005341631889343262, 0.005595456123352051, 0.005549759864807129, 0.0054735360145568845, 0.005327616214752197, 0.005328896045684814, 0.005318655967712403, 0.005310463905334473, 0.005297567844390869, 0.005335552215576172, 0.00526691198348999, 0.005313151836395263, 0.005326399803161621, 0.005349279880523682, 0.005486815929412842, 0.006633791923522949, 0.0069387202262878415, 0.007081600189208984, 0.007135104179382324, 0.006625696182250977, 0.005298240184783936, 0.00533846378326416, 0.005370783805847168, 0.005293375968933105, 0.005374239921569824, 0.0052646718025207515, 0.00525600004196167, 0.0053842558860778805, 0.0053004159927368165, 0.005297920227050781, 0.005336287975311279, 0.0052674241065979, 0.005288159847259522, 0.005290112018585205, 0.005269983768463135, 0.005272863864898681, 0.005294655799865723, 0.00528710412979126, 0.005274591922760009, 0.00526691198348999, 0.005356063842773437, 0.005385791778564453, 0.004974463939666748, 0.005277279853820801, 0.005294079780578613, 0.005261151790618896, 0.005276127815246582, 0.0052676801681518556, 0.005248479843139649, 0.005294015884399414, 0.0052824001312255855, 0.005273407936096192, 0.005292543888092041, 0.0052928318977355955, 0.005278624057769775, 0.005319744110107422, 0.0052642240524291995, 0.005336512088775635, 0.005325151920318604, 0.005252480030059814, 0.0052880640029907226, 0.005731135845184326, 0.005554175853729248, 0.005359903812408447, 0.00563375997543335, 0.006161664009094238, 0.005518080234527588, 0.005376287937164307, 0.005400032043457031, 0.0054131197929382325, 0.005406655788421631, 0.005341087818145752, 0.00532700777053833, 0.005298175811767578, 0.005301727771759033, 0.005286431789398193, 0.005279744148254394, 0.005252128124237061, 0.00529318380355835, 0.005287551879882812, 0.0053424639701843265, 0.005314911842346191, 0.005306591987609863, 0.005306784152984619, 0.005394271850585938, 0.0052962880134582516, 0.0052715520858764645, 0.005306335926055908, 0.005361375808715821, 0.005261568069458008, 0.005261375904083252, 0.005283840179443359, 0.005249023914337158, 0.005765120029449463, 0.005315839767456055, 0.005734272003173828, 0.0052540478706359865, 0.0052709121704101565, 0.005276256084442139, 0.0053043198585510255, 0.005406303882598877, 0.005535647869110107, 0.00529692792892456, 0.005297887802124024, 0.005272895812988281, 0.004991968154907226, 0.006399551868438721, 0.006148672103881836, 0.0067664642333984375, 0.006465343952178955, 0.005310656070709229, 0.00527350378036499, 0.005304416179656982, 0.005319776058197022, 0.0053175358772277834, 0.005316383838653564, 0.005321055889129639, 0.005300096035003662, 0.005330399990081787, 0.005317215919494629, 0.005281727790832519, 0.005281472206115722, 0.005402560234069824, 0.005273087978363037, 0.00530134391784668, 0.005324639797210693, 0.005242815971374512, 0.005308608055114746, 0.005267263889312744, 0.005287807941436767, 0.005273600101470947, 0.005304448127746582, 0.00531660795211792, 0.0052583680152893065, 0.005276544094085693, 0.005293856143951416, 0.005259488105773926, 0.005351424217224121, 0.005299295902252197, 0.005294079780578613, 0.005251264095306397, 0.0052919998168945315, 0.005263840198516846, 0.005259552001953125, 0.005277696132659912, 0.005262559890747071, 0.005423903942108154, 0.005277535915374756, 0.005273600101470947, 0.005263296127319336, 0.005305823802947998, 0.005261727809906006, 0.005287456035614014, 0.005306496143341065, 0.005280704021453858, 0.0052856321334838864, 0.005335296154022217, 0.005292096138000488, 0.005275167942047119, 0.005251232147216797, 0.005340767860412598, 0.005269919872283936, 0.0052715520858764645, 0.005305439949035645, 0.005372831821441651, 0.005340576171875, 0.005314976215362549, 0.005758624076843261, 0.005030144214630127, 0.005294079780578613, 0.005320640087127686, 0.005300543785095215, 0.005285376071929931, 0.005277279853820801, 0.005319488048553467, 0.005332799911499023, 0.0055708479881286625, 0.005342976093292236, 0.005280096054077149, 0.00539408016204834, 0.005326720237731933, 0.0052646718025207515, 0.0053010878562927246, 0.005334752082824707, 0.005261856079101563, 0.005275392055511475, 0.005279104232788086, 0.00525545597076416, 0.005257567882537842, 0.005249023914337158, 0.005281280040740967, 0.005325056076049804, 0.005264800071716308, 0.00529036808013916, 0.005332448005676269, 0.00526643180847168, 0.005318175792694091, 0.0053211841583251955, 0.005363615989685058, 0.005299456119537353, 0.005290624141693115, 0.0053021440505981446, 0.005276319980621338, 0.005297376155853272, 0.005256703853607178, 0.005274591922760009, 0.005258624076843262, 0.0052947521209716795, 0.005307936191558838, 0.005261760234832763, 0.005510464191436768, 0.005290688037872315, 0.0052995519638061525, 0.005319327831268311, 0.005271711826324463, 0.005293407917022705, 0.005296192169189453, 0.005354112148284912, 0.005304128170013428, 0.005284927845001221, 0.005338047981262207, 0.005267392158508301, 0.0052657279968261715, 0.005289728164672851, 0.005259263992309571, 0.0052815361022949215, 0.00525929594039917, 0.005291359901428223, 0.0052499198913574215, 0.005279007911682129, 0.005271488189697265, 0.005001471996307373, 0.005295167922973633, 0.005304096221923828, 0.005403552055358887, 0.005264832019805908, 0.0053151359558105465, 0.0052715520858764645, 0.0053350400924682614, 0.0053043198585510255, 0.005287903785705567, 0.005261343955993652, 0.005292031764984131, 0.005267327785491944, 0.00526307201385498, 0.005357984066009521, 0.005249023914337158, 0.0053220481872558595, 0.005333695888519287, 0.005351424217224121, 0.005298175811767578, 0.005373119831085205, 0.005266240119934082, 0.005310336112976074, 0.005279871940612793, 0.0052605757713317875, 0.005257952213287354, 0.005312096118927002, 0.005257343769073487, 0.005300447940826416, 0.005294144153594971, 0.005228544235229492, 0.005273600101470947, 0.005322495937347412, 0.005277728080749512, 0.005267776012420654, 0.0052509760856628415, 0.005277311801910401, 0.00532428789138794, 0.005340352058410645, 0.005330624103546143, 0.005388063907623291, 0.005337440013885498, 0.0052674560546875, 0.005269375801086426, 0.0052971520423889164, 0.005284255981445313, 0.005261055946350098, 0.005359551906585693, 0.005297056198120117, 0.0055623679161071774, 0.005306687831878662, 0.0053367681503295895, 0.0053309440612792965, 0.005320703983306885, 0.005359295845031738, 0.0053190398216247554, 0.005351232051849365, 0.005320384025573731, 0.005318111896514892, 0.005296895980834961, 0.005459487915039063, 0.00531935977935791, 0.0052899842262268066, 0.005038847923278808, 0.005307328224182129, 0.005273407936096192, 0.005318848133087159, 0.0052899842262268066, 0.005279327869415283, 0.0053086400032043455, 0.005289951801300049, 0.005283679962158203, 0.00529856014251709, 0.005291647911071777, 0.005251488208770752, 0.00528380823135376, 0.0052899842262268066, 0.005252543926239014, 0.005285600185394287, 0.005309120178222656, 0.0052696638107299805, 0.005310463905334473, 0.005261248111724854, 0.005250751972198486, 0.0053151359558105465, 0.005686272144317627, 0.005316703796386719, 0.005321440219879151, 0.005290080070495606, 0.005291232109069824, 0.005303296089172363, 0.005269311904907227, 0.005306015968322754, 0.0052676801681518556, 0.005250944137573242, 0.005259391784667969, 0.005418911933898925, 0.005277791976928711, 0.005275519847869873, 0.005363359928131104, 0.005271008014678955, 0.005279871940612793, 0.005280320167541504, 0.005263008117675781, 0.0052947521209716795, 0.005271455764770508, 0.005322879791259766, 0.006842368125915528, 0.006784800052642822, 0.006782944202423096, 0.005330239772796631, 0.00533187198638916, 0.005513343811035156, 0.005304416179656982, 0.005317471981048584, 0.005333951950073242, 0.005287231922149659, 0.005316800117492676, 0.005259712219238281, 0.00528329610824585, 0.005310048103332519, 0.005268479824066162, 0.005259263992309571, 0.005283328056335449, 0.005277696132659912, 0.00538047981262207, 0.005175104141235351, 0.005287936210632324, 0.00543555212020874, 0.00533897590637207, 0.005281856060028076, 0.005281919956207275, 0.00529798412322998, 0.005292031764984131, 0.005275263786315918, 0.005441728115081787, 0.005281919956207275, 0.005283904075622558, 0.005306655883789062, 0.005285600185394287, 0.0052789759635925295, 0.005438240051269532, 0.0053302721977233885, 0.005276288032531738, 0.00530185604095459, 0.005236576080322266, 0.0052551040649414064, 0.005228991985321045, 0.005281055927276611, 0.005273920059204101, 0.00525164794921875, 0.005384064197540283, 0.00528707218170166, 0.005245952129364013, 0.0053116798400878905, 0.005280576229095459, 0.0052624640464782714, 0.005284704208374023, 0.005256703853607178, 0.005267903804779052, 0.005941504001617431, 0.005484032154083252, 0.005757535934448242, 0.005959424018859863, 0.005337088108062744, 0.005466271877288818, 0.005768928050994873, 0.00584716796875, 0.0053591361045837406, 0.005304800033569336, 0.005322976112365723, 0.005316480159759522, 0.005395711898803711, 0.005436063766479492, 0.0053350400924682614, 0.005299871921539307, 0.0053146882057189945, 0.005287807941436767, 0.005276000022888184, 0.005271520137786865, 0.0052492480278015135, 0.0052583680152893065, 0.005296351909637451, 0.005288640022277832, 0.005277472019195557, 0.005277952194213867, 0.005267199993133545, 0.005265408039093018, 0.005281792163848877, 0.00508684778213501, 0.005290592193603515, 0.005320672035217285, 0.005277599811553955, 0.005273280143737793, 0.005288383960723877, 0.005267583847045899, 0.0052726402282714845, 0.005267392158508301, 0.005294464111328125, 0.005286399841308594, 0.005251071929931641, 0.005315711975097656, 0.005286784172058106, 0.005318784236907959, 0.0052837119102478024, 0.005273727893829345, 0.005236608028411865, 0.005277696132659912, 0.005316351890563965, 0.005265471935272217, 0.005253664016723633, 0.005258912086486816, 0.005294079780578613, 0.005281792163848877, 0.005243008136749268, 0.005246848106384277, 0.0052667841911315915, 0.005233312129974365, 0.005250368118286133, 0.005232351779937744, 0.005274400234222412, 0.005275167942047119, 0.005268191814422608, 0.005339295864105225, 0.005305215835571289, 0.005338016033172607, 0.005298175811767578, 0.005440767765045166, 0.006265344142913819, 0.006097216129302979, 0.005321983814239502, 0.005448383808135986, 0.005345056056976319, 0.005402847766876221, 0.005262752056121826, 0.005372096061706543, 0.005288032054901123, 0.00531283187866211, 0.005343264102935791, 0.005459807872772217, 0.005388224124908447, 0.005382336139678955, 0.00528934383392334, 0.005376704216003418, 0.005285823822021484, 0.005384191989898681, 0.005297440052032471, 0.005362368106842041, 0.00548799991607666, 0.005368768215179443, 0.005301983833312989, 0.0053350400924682614, 0.005076511859893799, 0.005340767860412598, 0.005382368087768555, 0.005310944080352783, 0.005310080051422119, 0.005422719955444336, 0.00528275203704834, 0.005265664100646972, 0.00531328010559082, 0.005282656192779541, 0.005268864154815674, 0.00537062406539917, 0.005285920143127442, 0.005277696132659912, 0.005341184139251709, 0.005646336078643799, 0.005287936210632324, 0.005296000003814697, 0.005367904186248779, 0.005334527969360352, 0.005269152164459228, 0.005257120132446289, 0.005251584053039551, 0.005290463924407959, 0.005302271842956543, 0.0052483839988708494, 0.0052650880813598635, 0.005380064010620117, 0.005831647872924805, 0.005523231983184815, 0.00690172815322876, 0.005818208217620849, 0.005930751800537109, 0.005302944183349609, 0.005402912139892578, 0.005330656051635742, 0.005292031764984131, 0.005287936210632324, 0.005296000003814697, 0.005311999797821045, 0.00530847978591919, 0.005276127815246582, 0.005548128128051758, 0.005620736122131348, 0.005327104091644287, 0.005279520034790039, 0.005422336101531982, 0.005340608119964599, 0.005349535942077637, 0.0053208317756652836, 0.005302239894866943, 0.005264768123626709, 0.005273248195648193, 0.0053056640625, 0.005416639804840088, 0.005297952175140381, 0.005296351909637451, 0.005281792163848877, 0.0052943677902221676, 0.005519072055816651, 0.005748127937316895, 0.00534991979598999, 0.005304384231567383, 0.0052609601020812986, 0.005302239894866943, 0.005448063850402832, 0.0053023362159729005, 0.005265503883361816, 0.005302175998687744, 0.005277952194213867, 0.005270944118499756, 0.005309120178222656, 0.005281216144561768, 0.005263584136962891, 0.0052750401496887206, 0.005282368183135986, 0.005265344142913819, 0.005277791976928711, 0.005314176082611084, 0.005271232128143311, 0.005296351909637451, 0.005263232231140137, 0.005403232097625732, 0.005310272216796875, 0.0053045120239257814, 0.005283967971801758, 0.005300000190734864, 0.005250463962554932, 0.005304704189300537, 0.005351808071136475, 0.005439551830291748, 0.005371808052062988, 0.0054230718612670896, 0.005310463905334473, 0.005331039905548096, 0.005320608139038086, 0.005291584014892578, 0.0053621439933776854, 0.005314335823059082, 0.005273791790008545, 0.005293280124664307, 0.005399360179901123, 0.005285024166107178, 0.005430079936981201, 0.005345280170440674, 0.005275648117065429, 0.005294079780578613, 0.0053096961975097655, 0.005259103775024414, 0.005276000022888184, 0.005284639835357666, 0.005361695766448974, 0.0053630399703979495, 0.005288352012634277, 0.005344511985778809, 0.005325024127960205, 0.005472959995269776, 0.005272543907165527, 0.005276063919067383, 0.005300640106201172, 0.0054291200637817385, 0.005336319923400879, 0.005313471794128418, 0.005320000171661377, 0.005279712200164795, 0.005314623832702637, 0.004997471809387207, 0.005263487815856934, 0.005291776180267334, 0.00526361608505249, 0.005260863780975342, 0.005362751960754394, 0.005570528030395508, 0.005345119953155518, 0.005327968120574951, 0.005261280059814453, 0.005273856163024902, 0.005368512153625488, 0.005299903869628906, 0.0052923521995544434, 0.005296127796173096, 0.0052705278396606445, 0.005267615795135498, 0.005225279808044434, 0.005277728080749512, 0.0053043198585510255, 0.005300320148468017, 0.005303904056549073, 0.005281472206115722, 0.0052865281105041505, 0.005295968055725098, 0.0053003840446472165, 0.00530134391784668, 0.005283872127532959, 0.005304543972015381, 0.005261983871459961, 0.005320352077484131, 0.005252927780151367, 0.00534768009185791, 0.0052919678688049315, 0.0052800002098083495, 0.005282080173492432, 0.005303775787353516, 0.005293856143951416, 0.0052947521209716795, 0.005308224201202393, 0.005328127861022949, 0.005302080154418945, 0.00529036808013916, 0.005259840011596679, 0.005436895847320556, 0.0053537278175354005, 0.0052575039863586425, 0.005309567928314209, 0.0052993278503417965, 0.005294911861419678, 0.005307328224182129, 0.005325024127960205, 0.005291808128356934, 0.0052952318191528324, 0.005286111831665039, 0.005307040214538574, 0.005285888195037842, 0.005275775909423828, 0.005308224201202393, 0.00528326416015625, 0.005265344142913819, 0.005300032138824463, 0.005282527923583984, 0.005030752182006836, 0.005272511959075928, 0.00532480001449585, 0.005281792163848877, 0.005298336029052734, 0.005336832046508789, 0.005287903785705567, 0.00530841588973999, 0.005490816116333007, 0.005275936126708985, 0.005322495937347412, 0.005261280059814453, 0.0052633600234985355, 0.0053142719268798825, 0.005273888111114502, 0.0052633600234985355, 0.005273056030273437, 0.0052453441619873045, 0.005289408206939697, 0.005306208133697509, 0.005255424022674561, 0.005281407833099366, 0.0052675199508667, 0.005264319896697998, 0.005381951808929444, 0.005355135917663574, 0.005278240203857422, 0.005289951801300049, 0.005273632049560547, 0.0052715520858764645, 0.005283840179443359, 0.0052717761993408205, 0.005295904159545899, 0.0052899842262268066, 0.005279263973236084, 0.00531715202331543, 0.005494719982147217, 0.0053373122215271, 0.005445248126983643, 0.00533519983291626, 0.005310463905334473, 0.005301663875579834, 0.005295872211456299, 0.005329760074615478, 0.005310719966888428, 0.005289728164672851, 0.005259263992309571, 0.005300032138824463, 0.0053014721870422365, 0.005268447875976563, 0.005314879894256592, 0.0052975997924804685, 0.005268735885620117, 0.0052824001312255855, 0.005253536224365234, 0.005436927795410156, 0.005318880081176758, 0.005369952201843262, 0.005263840198516846, 0.0052952318191528324, 0.005319295883178711, 0.005347296237945557, 0.0053534722328186036, 0.0050202240943908695, 0.005269343852996826, 0.005280159950256348, 0.005258240222930908, 0.005257823944091797, 0.005255392074584961, 0.005273536205291748, 0.005275648117065429, 0.005292031764984131, 0.005260416030883789, 0.005260159969329834, 0.005396480083465576, 0.005254848003387451, 0.005263264179229736, 0.005286303997039795, 0.005297376155853272, 0.005326784133911132, 0.005295104026794434, 0.005249023914337158, 0.005272799968719482, 0.005255807876586914, 0.005281792163848877, 0.005292031764984131, 0.005268671989440918, 0.005407551765441895, 0.005281599998474121, 0.00526470422744751, 0.005282559871673584, 0.0052839360237121585, 0.005385759830474853, 0.00541974401473999, 0.005295936107635498, 0.005271423816680909, 0.005292128086090088, 0.0053043198585510255, 0.005326303958892822, 0.00530076789855957, 0.005275936126708985, 0.005384031772613525, 0.006202655792236328, 0.005323520183563232, 0.005441696166992188, 0.005287456035614014, 0.005277567863464355, 0.005273568153381348, 0.0052427840232849125, 0.005251167774200439, 0.0052453441619873045, 0.005293439865112304, 0.0053619518280029295, 0.005234432220458984, 0.0052796158790588375, 0.005251967906951904, 0.005852287769317627, 0.0052641921043395995, 0.005362783908843994, 0.005321760177612305, 0.005248479843139649, 0.005256768226623535, 0.0052804799079895015, 0.005277632236480713, 0.005294079780578613, 0.005265408039093018, 0.0050054721832275394, 0.005298399925231933, 0.005291264057159423, 0.005324736118316651, 0.005264480113983155, 0.00527126407623291, 0.0052715520858764645, 0.0053688640594482424, 0.005295072078704834, 0.005272960186004639, 0.005263264179229736, 0.0052592320442199705, 0.005405504226684571, 0.005296127796173096, 0.00530841588973999, 0.005303359985351562, 0.005280640125274658, 0.005277632236480713, 0.005267199993133545, 0.005238815784454346, 0.005265024185180664, 0.005323423862457276, 0.005388288021087646, 0.0052507839202880855, 0.005300191879272461, 0.0052391037940979004, 0.0053218879699707036, 0.005350240230560303, 0.00571721601486206, 0.005829408168792725, 0.0053303041458129885, 0.005288576126098633, 0.005261312007904053, 0.005378047943115235, 0.005275072097778321, 0.005282527923583984, 0.005344895839691162, 0.005292064189910889, 0.005282144069671631, 0.0052891201972961426, 0.005326528072357178, 0.005446656227111817, 0.005265408039093018, 0.005265696048736572, 0.005234399795532227, 0.0053424320220947265, 0.005237535953521728, 0.005314815998077393, 0.005262815952301025, 0.005248928070068359, 0.005269887924194336, 0.005279744148254394, 0.005264480113983155, 0.005258399963378907, 0.005306111812591553, 0.005287936210632324, 0.005315807819366455, 0.005392288208007813, 0.0052949762344360355, 0.005302239894866943, 0.005428864002227783, 0.005263296127319336, 0.00529033613204956, 0.005087071895599365, 0.005256351947784424, 0.005309279918670654, 0.005304448127746582, 0.005248288154602051, 0.005272319793701172, 0.005285888195037842, 0.005273407936096192, 0.0053045120239257814, 0.005283840179443359, 0.005301983833312989, 0.005305920124053955, 0.005337376117706299, 0.005392831802368164, 0.0054065918922424315, 0.005361792087554932, 0.005339136123657226, 0.005306367874145508, 0.005453472137451172, 0.005284512042999267, 0.00527843189239502, 0.005331935882568359, 0.005257408142089844, 0.00534444808959961, 0.0052739839553833006, 0.005324512004852295, 0.005294816017150879, 0.0052624640464782714, 0.005292416095733643, 0.005278016090393066, 0.005251071929931641, 0.005445824146270752, 0.005322559833526612, 0.005326752185821533, 0.005296224117279052, 0.005292064189910889, 0.005253087997436523, 0.005404672145843506, 0.0054496641159057614, 0.0053105602264404295, 0.005291039943695068, 0.005275936126708985, 0.0052551040649414064, 0.005321119785308838, 0.005317279815673828, 0.005276544094085693, 0.00532150411605835, 0.0053002238273620605, 0.005251071929931641, 0.005298304080963135, 0.005504896163940429, 0.005255167961120606, 0.005410816192626953, 0.005266560077667236, 0.005254303932189942, 0.0052873601913452145, 0.005478367805480957, 0.0052657279968261715, 0.005265408039093018, 0.005232639789581299, 0.005246975898742676, 0.005259263992309571, 0.005224448204040527, 0.004970560073852539, 0.0053069438934326175, 0.005267136096954346, 0.005275968074798584, 0.005295167922973633, 0.005258304119110107, 0.005271200180053711, 0.005286111831665039, 0.005285888195037842, 0.005277696132659912, 0.0053350400924682614, 0.0053309440612792965, 0.005273600101470947, 0.005285088062286377, 0.005274240016937256, 0.00527785587310791, 0.005287327766418457, 0.005256063938140869, 0.005422848224639893, 0.005257408142089844, 0.005277728080749512, 0.0052705597877502446, 0.005272287845611573, 0.005299967765808105, 0.005257472038269043, 0.005277503967285156, 0.005310207843780518, 0.005248479843139649, 0.005265471935272217, 0.0053853759765625, 0.005403647899627686, 0.005307295799255371, 0.0052939200401306155, 0.005273215770721436, 0.005287680149078369, 0.005293951988220215, 0.005234432220458984, 0.005266687870025635, 0.005275392055511475, 0.005285280227661133, 0.005282144069671631, 0.005245183944702149, 0.005296127796173096, 0.0052899842262268066, 0.005236288070678711, 0.005311967849731446, 0.005276959896087646, 0.005643968105316162, 0.005345280170440674, 0.005277696132659912, 0.0053142719268798825, 0.005271872043609619, 0.005271520137786865, 0.005240543842315674, 0.0052800321578979495, 0.0052713918685913085, 0.0053311681747436526, 0.005303679943084717, 0.005302080154418945, 0.005239552021026612, 0.005343232154846191, 0.005273600101470947, 0.005255424022674561, 0.005102496147155761, 0.005310463905334473, 0.005273087978363037, 0.005372191905975342, 0.005273856163024902, 0.0052750401496887206, 0.005403200149536133, 0.0053628158569335935, 0.005385056018829346, 0.005469888210296631, 0.0053333439826965335, 0.005282080173492432, 0.005326560020446777, 0.0053078079223632816, 0.005276256084442139, 0.005327167987823486, 0.005305247783660889, 0.005259071826934815, 0.005398623943328858, 0.005277984142303467, 0.005249504089355469, 0.0052631359100341795, 0.005424575805664062, 0.005364448070526123, 0.005321983814239502, 0.00627839994430542, 0.005555967807769776, 0.005296063899993896, 0.0053300800323486325, 0.00529699182510376, 0.005254591941833496, 0.005290559768676758, 0.005306367874145508, 0.005261184215545655, 0.005277184009552002, 0.005281599998474121, 0.005253503799438477, 0.00531712007522583, 0.005277632236480713, 0.005259263992309571, 0.0054167680740356446, 0.005252831935882568, 0.00534335994720459, 0.005326240062713623, 0.005252031803131103, 0.005275648117065429, 0.005279744148254394, 0.005260992050170899, 0.005278016090393066, 0.0052715520858764645, 0.005303999900817871, 0.005316448211669922, 0.005312992095947266, 0.005246784210205078, 0.00532089614868164, 0.005268703937530517, 0.00527235221862793, 0.005269311904907227, 0.005265600204467773, 0.005486783981323243, 0.005276735782623291, 0.0052518401145935055, 0.005693600177764892, 0.0050096001625061035, 0.005281599998474121, 0.005253119945526123, 0.005279744148254394, 0.005260416030883789, 0.005296639919281006, 0.005259647846221924, 0.005273695945739746, 0.005289728164672851, 0.00530847978591919, 0.005291200160980224, 0.005269599914550781, 0.005286272048950195, 0.005335487842559814, 0.005712160110473633, 0.005325952053070068, 0.00538262414932251, 0.005273727893829345, 0.005293439865112304, 0.00530291223526001, 0.00528550386428833, 0.00533951997756958, 0.005271359920501709, 0.005275263786315918, 0.005284416198730469, 0.005302271842956543, 0.005267295837402344, 0.005309951782226562, 0.00525600004196167, 0.005300064086914063, 0.0053023681640625, 0.005277599811553955, 0.0053350400924682614, 0.005365087985992432, 0.005282464027404785, 0.005275167942047119, 0.005247456073760986, 0.005386240005493164, 0.00538588809967041, 0.005433152198791504, 0.005517856121063232, 0.0053002238273620605, 0.005311583995819092, 0.00525171184539795, 0.0052841281890869144, 0.00546560001373291, 0.005250688076019287, 0.005275872230529785, 0.0052640318870544435, 0.005298175811767578, 0.005287327766418457, 0.005399136066436767, 0.005275648117065429, 0.0052633600234985355, 0.005266751766204834, 0.005278495788574219, 0.005263264179229736, 0.005285215854644775, 0.005296703815460205, 0.005269599914550781, 0.005247007846832275, 0.005289663791656494, 0.005249311923980713, 0.00499507188796997, 0.005279744148254394, 0.005282911777496338, 0.005286816120147705, 0.005289631843566895, 0.005275231838226318, 0.005265888214111328, 0.005310751914978027, 0.005295968055725098, 0.005304480075836181, 0.005297376155853272, 0.005249887943267822, 0.0052849922180175785, 0.005405663967132568, 0.0052483839988708494, 0.005270336151123047, 0.005281439781188965, 0.0052633600234985355, 0.005303999900817871, 0.005366432189941406, 0.005271200180053711, 0.0052367358207702636, 0.005248991966247558, 0.005275455951690674, 0.005269728183746338, 0.005274752140045166, 0.005276544094085693, 0.005270976066589356, 0.0052490558624267575, 0.005296671867370605, 0.005265408039093018, 0.00522649621963501, 0.005400864124298096, 0.0052971520423889164, 0.00527023983001709, 0.005281792163848877, 0.005244256019592285, 0.0052740797996521, 0.005282144069671631, 0.005269343852996826, 0.005291391849517822, 0.005333471775054932, 0.005253280162811279, 0.0053179202079772945, 0.005291039943695068, 0.005275328159332276, 0.0052938880920410155, 0.005243296146392822, 0.005293375968933105, 0.005255871772766113, 0.005258463859558106, 0.005310207843780518, 0.005276447772979737, 0.0052674880027771, 0.005280799865722656, 0.005264351844787598, 0.005254655838012695, 0.005281983852386474, 0.005257376194000244, 0.005279871940612793, 0.005433152198791504, 0.005275328159332276, 0.005302815914154053, 0.005000671863555908, 0.005245664119720459, 0.005273759841918945, 0.005270976066589356, 0.005250656127929688, 0.0052638077735900875, 0.005445919990539551, 0.005298175811767578, 0.00527180814743042, 0.0053122239112854, 0.005241119861602783, 0.005269504070281982, 0.00522982406616211, 0.005280511856079102, 0.005254335880279541, 0.005278528213500977, 0.0052674560546875, 0.005277696132659912, 0.005410399913787842, 0.005279295921325684, 0.005260128021240235, 0.005404352188110351, 0.005422560214996338, 0.005268576145172119, 0.0053450241088867185, 0.005265344142913819, 0.005275712013244629, 0.005234591960906983, 0.005306464195251465, 0.0052837119102478024, 0.005238175868988037, 0.005279520034790039, 0.005271711826324463, 0.005270400047302246, 0.005285791873931885, 0.005248799800872803, 0.0052820158004760745, 0.005322751998901368, 0.005386559963226318, 0.0052691841125488284, 0.005281951904296875, 0.005284832000732422, 0.005257184028625488, 0.005300511837005615, 0.0052639999389648435, 0.005244319915771485, 0.005304927825927734, 0.005299295902252197, 0.0052499518394470215, 0.005283840179443359, 0.005283840179443359, 0.005273407936096192, 0.005258912086486816, 0.005290527820587158, 0.0052531838417053224, 0.005242656230926514, 0.0052962880134582516, 0.005306367874145508, 0.005275392055511475, 0.005287456035614014, 0.0052623038291931155, 0.005273344039916993, 0.00548470401763916, 0.005009471893310547, 0.005242623805999756, 0.00528988790512085, 0.005271903991699219, 0.00524835205078125, 0.005280096054077149, 0.005224095821380615, 0.0052640318870544435, 0.005261312007904053, 0.0053096961975097655, 0.0052780799865722655, 0.005246399879455566, 0.005283008098602295, 0.005269375801086426, 0.005236639976501465, 0.005298143863677979, 0.005293280124664307, 0.005251455783843994, 0.005277120113372803, 0.005258207798004151, 0.0052674560546875, 0.005285888195037842, 0.0052408318519592285, 0.005265408039093018, 0.005285888195037842, 0.005262752056121826, 0.005268064022064209, 0.005306367874145508, 0.005232639789581299, 0.005292128086090088, 0.005285952091217041, 0.005280992031097412, 0.005263040065765381, 0.005249983787536621, 0.005273600101470947, 0.005255296230316162, 0.005451648235321045, 0.00527184009552002, 0.005590752124786377, 0.0052750401496887206, 0.005286176204681397, 0.005423423767089844, 0.005475359916687012, 0.005338079929351806, 0.00530841588973999, 0.005312511920928955, 0.005291264057159423, 0.00530406379699707, 0.005299424171447754, 0.005311520099639893, 0.005265312194824219, 0.005294047832489014, 0.005323967933654785, 0.00529750394821167, 0.005269472122192383, 0.005324672222137452, 0.005278207778930664, 0.005273600101470947, 0.005303584098815918, 0.005292768001556396, 0.005265344142913819, 0.005280064105987549, 0.0052600960731506346, 0.004978752136230469, 0.005302303791046142, 0.005263552188873291, 0.005242400169372559, 0.005275648117065429, 0.005286079883575439, 0.005271711826324463, 0.005310272216796875, 0.005389472007751465, 0.005316544055938721, 0.005317567825317383, 0.005277696132659912, 0.005295904159545899, 0.005320256233215332, 0.005253791809082031, 0.005337088108062744, 0.005316512107849121, 0.0052573118209838865, 0.0052715520858764645, 0.00524835205078125, 0.005272223949432373, 0.005254464149475098, 0.005233344078063965, 0.005277056217193604, 0.005274240016937256, 0.005253119945526123, 0.0052709121704101565, 0.005245567798614502, 0.005251071929931641, 0.0052668161392211916, 0.005258016109466553, 0.0052650880813598635, 0.005263391971588135, 0.005226784229278565, 0.0053656001091003415, 0.005316031932830811, 0.0052921600341796875, 0.0052904319763183594, 0.00531001615524292, 0.005288383960723877, 0.0052899842262268066, 0.005344863891601563, 0.005301919937133789, 0.005309247970581054, 0.005340832233428955, 0.0052984638214111325, 0.005293824195861816, 0.005331007957458496, 0.005316864013671875, 0.005295711994171143, 0.0052657599449157716, 0.0053821439743042, 0.00536575984954834, 0.005324160099029541, 0.005341472148895264, 0.005343584060668946, 0.005287263870239258, 0.005345727920532227, 0.005298208236694336, 0.005269120216369629, 0.0052824001312255855, 0.005297279834747314, 0.005282656192779541, 0.005017407894134521, 0.005293471813201904, 0.005266208171844483, 0.005328896045684814, 0.0052820158004760745, 0.0052624959945678715, 0.005288352012634277, 0.005273727893829345, 0.005340672016143798, 0.005284448146820068, 0.005282112121582031, 0.0052650880813598635, 0.005303679943084717, 0.005255551815032959, 0.005277376174926757, 0.0052741761207580565, 0.005271711826324463, 0.005296256065368652, 0.005293344020843506, 0.005255616188049316, 0.005277344226837158, 0.005263711929321289, 0.005230720043182373, 0.005243807792663574, 0.005237823963165284, 0.005263264179229736, 0.005258783817291259, 0.0053536958694458, 0.005334527969360352, 0.005312863826751709, 0.005243296146392822, 0.005261312007904053, 0.005254271984100342, 0.0052490878105163575, 0.0052847681045532224, 0.005290143966674805, 0.00525487995147705, 0.0052941122055053715, 0.005285888195037842, 0.005299232006072998, 0.005302752017974853, 0.005288447856903076, 0.005269472122192383, 0.0052941122055053715, 0.0052510080337524415, 0.005264992237091064, 0.00528604793548584, 0.005259552001953125, 0.0052624640464782714, 0.00523356819152832, 0.0053043198585510255, 0.005355552196502685, 0.005283360004425049, 0.0056528639793396, 0.005304543972015381, 0.005277184009552002, 0.005335455894470215, 0.005277632236480713, 0.005289184093475342, 0.005330751895904541, 0.005356287956237793, 0.005349023818969727, 0.0053376641273498535, 0.0050503678321838375, 0.005381120204925537, 0.005280096054077149, 0.005289696216583252, 0.005281727790832519, 0.00535859203338623, 0.005306015968322754, 0.00530675220489502, 0.0052997441291809085, 0.005306816101074219, 0.005274975776672363, 0.005282464027404785, 0.005275360107421875, 0.0053005762100219725, 0.005301184177398681, 0.005303296089172363, 0.0053587198257446286, 0.005429728031158447, 0.0053636798858642575, 0.0053435201644897465, 0.005392576217651367, 0.00535753583908081, 0.0053448319435119625, 0.005425759792327881, 0.005407999992370605, 0.005376607894897461, 0.005317984104156494, 0.005278175830841065, 0.005314559936523438, 0.005296319961547852, 0.0052873921394348145, 0.00525161600112915, 0.005285183906555176, 0.00534339189529419, 0.005267072200775147, 0.005269472122192383, 0.005363935947418213, 0.0052763838768005375, 0.005355519771575928, 0.0052902398109436035, 0.005280608177185058, 0.005309343814849854, 0.005302495956420898, 0.005264512062072754, 0.005498591899871826, 0.005317567825317383, 0.005316832065582275, 0.005317952156066895, 0.00525875186920166, 0.0052674880027771, 0.005286848068237305, 0.0052715520858764645, 0.005273600101470947, 0.005273824214935303, 0.005246655941009521, 0.0053078079223632816, 0.005280704021453858, 0.005371488094329834, 0.00528329610824585, 0.005290688037872315, 0.005267360210418701, 0.0052696318626403804, 0.005267039775848388, 0.005000160217285156, 0.005320864200592041, 0.005310304164886474, 0.005545983791351319, 0.005314432144165039, 0.005327040195465088, 0.005296063899993896, 0.005305888175964356, 0.005314976215362549, 0.005285696029663086, 0.005277440071105957, 0.0053294081687927245, 0.005281792163848877, 0.00532480001449585, 0.0053034558296203614, 0.005259935855865479, 0.005312704086303711, 0.005298143863677979, 0.005296448230743408, 0.005244832038879394, 0.005254720211029053, 0.005271679878234863, 0.005288032054901123, 0.005255199909210205, 0.00528553581237793, 0.005282112121582031, 0.0052408638000488285, 0.005310463905334473, 0.005287327766418457, 0.005240672111511231, 0.005274623870849609, 0.005236608028411865, 0.005262335777282715, 0.005288832187652588, 0.005270944118499756, 0.005263967990875244, 0.005304480075836181, 0.005433184146881103, 0.005327104091644287, 0.005346528053283691, 0.0053089599609375, 0.005302175998687744, 0.005322271823883056, 0.0052763519287109375, 0.005289023876190186, 0.0052845759391784665, 0.005298240184783936, 0.005320672035217285, 0.0053432960510253905, 0.005283584117889404, 0.005281311988830566, 0.005297183990478516, 0.005299839973449707, 0.0052837438583374025, 0.005299615859985352, 0.0053084478378295896, 0.005291935920715332, 0.005290847778320312, 0.005311935901641846, 0.005292287826538086, 0.005261600017547607, 0.005326848030090332, 0.005271967887878418, 0.005023583889007568, 0.005375743865966797, 0.005296448230743408, 0.0053944320678710935, 0.005288095951080323, 0.005310400009155274, 0.0052715520858764645, 0.0053043198585510255, 0.005269248008728028, 0.005363967895507813, 0.005314559936523438, 0.0053002238273620605, 0.00527129602432251, 0.0053119039535522465, 0.005282656192779541, 0.005326848030090332, 0.005308767795562744, 0.0052978239059448245, 0.005260608196258545, 0.005263264179229736, 0.005284512042999267, 0.005298304080963135, 0.005310624122619629, 0.0052650880813598635, 0.005291200160980224, 0.005311488151550293, 0.005277120113372803, 0.0052798399925231936, 0.005293504238128662, 0.005348351955413819, 0.00530841588973999, 0.0052914881706237794, 0.0052700481414794925, 0.005260479927062988, 0.005267936229705811, 0.0052657599449157716, 0.005306367874145508, 0.005277696132659912, 0.00528323221206665, 0.00528764820098877, 0.005303167819976807, 0.005300288200378418, 0.005295680046081543, 0.00529036808013916, 0.005281568050384522, 0.005279967784881592, 0.005269504070281982, 0.005338880062103271, 0.005267712116241455, 0.005294079780578613, 0.005281023979187012, 0.005267583847045899, 0.005268095970153808, 0.005320703983306885, 0.005231872081756592, 0.0053788161277770995, 0.005284160137176514, 0.005312128067016602, 0.005291456222534179, 0.0052848000526428225, 0.005313375949859619, 0.005272416114807129, 0.005282144069671631, 0.005099711894989013, 0.005310272216796875, 0.0053465600013732914, 0.005282559871673584, 0.00525110387802124, 0.005287903785705567, 0.0052624959945678715, 0.005314720153808594, 0.005284639835357666, 0.005303359985351562, 0.005294943809509277, 0.0052715520858764645, 0.0053281598091125485, 0.00527023983001709, 0.005306464195251465, 0.005296031951904297, 0.0053023681640625, 0.005250432014465332, 0.005314432144165039, 0.005259679794311523, 0.005284031867980957, 0.005300064086914063, 0.005347104072570801, 0.005316160202026367, 0.005327744007110596, 0.005298175811767578, 0.005254496097564698, 0.005292543888092041, 0.005264959812164307, 0.0052919998168945315, 0.0053002238273620605, 0.005536384105682373, 0.005307616233825684, 0.005350175857543945, 0.005261312007904053, 0.00531660795211792, 0.0052899842262268066, 0.00531660795211792, 0.0052871999740600585, 0.005353504180908203, 0.005317311763763427, 0.005373248100280762, 0.005386943817138672, 0.0053125758171081545, 0.005300000190734864, 0.005341343879699707, 0.005250879764556885, 0.005310656070709229, 0.005282144069671631, 0.00527235221862793, 0.005288608074188232, 0.005298367977142334, 0.005318687915802002, 0.005259263992309571, 0.0053117117881774905, 0.00527023983001709, 0.005318719863891602, 0.0052549118995666505, 0.005368031978607178, 0.005315648078918457, 0.005311456203460694, 0.0053043198585510255, 0.005320864200592041, 0.005034336090087891, 0.005281599998474121, 0.005300640106201172, 0.005264736175537109, 0.005319327831268311, 0.0052856640815734865, 0.00525871992111206, 0.005279295921325684, 0.005319136142730713, 0.0052473278045654295, 0.005314655780792236, 0.005264639854431152, 0.005284671783447266, 0.0052856321334838864, 0.005300479888916016, 0.005283999919891358, 0.005285727977752686, 0.005275648117065429, 0.005269055843353272, 0.005284224033355713, 0.005238560199737549, 0.005328512191772461, 0.005331615924835205, 0.005395904064178467, 0.0052978239059448245, 0.005309343814849854, 0.005251296043395996, 0.005277344226837158, 0.005281919956207275, 0.005319839954376221, 0.005265439987182617, 0.005339776039123535, 0.0052440958023071286, 0.005290175914764404, 0.005267776012420654, 0.005295872211456299, 0.005251071929931641, 0.005241600036621094, 0.0053002238273620605, 0.005349440097808838, 0.0052977919578552244, 0.005343071937561035, 0.005320288181304931, 0.005290463924407959, 0.0053190717697143555, 0.005298175811767578, 0.005275872230529785, 0.005316383838653564, 0.0053302721977233885, 0.005266079902648926, 0.005337088108062744, 0.005462016105651855, 0.00532480001449585, 0.00537395191192627, 0.005384191989898681, 0.00528985595703125, 0.005325056076049804, 0.005271423816680909, 0.005319808006286621, 0.005297056198120117, 0.005303967952728272, 0.005287871837615967, 0.005279359817504883, 0.005049344062805176, 0.005292768001556396, 0.0053517122268676755, 0.005283775806427002, 0.005395711898803711, 0.005388544082641601, 0.005404831886291504, 0.005344927787780762, 0.005369696140289306, 0.005389376163482666, 0.00535536003112793, 0.005502975940704345, 0.005396480083465576, 0.005455008029937744, 0.0053862080574035645, 0.005402815818786621, 0.005347519874572754, 0.0052941122055053715, 0.005305920124053955, 0.005338208198547363, 0.005279359817504883, 0.005334239959716797, 0.005284832000732422, 0.00530614423751831, 0.0052575359344482426, 0.005302175998687744, 0.005326848030090332, 0.005283904075622558, 0.005250847816467285, 0.0053003840446472165, 0.005239999771118164, 0.005391039848327637, 0.005355552196502685, 0.005310207843780518, 0.0052837438583374025, 0.005280191898345947, 0.0053144640922546385, 0.005277791976928711, 0.005320767879486084, 0.005285823822021484, 0.005316512107849121, 0.005305952072143555, 0.005591551780700684, 0.005295199871063232, 0.0053587198257446286, 0.005287871837615967, 0.0052789759635925295, 0.005298783779144287, 0.00528985595703125, 0.005255296230316162, 0.0053300800323486325, 0.00525603199005127, 0.0052789759635925295, 0.005289023876190186, 0.005231904029846191, 0.005305920124053955, 0.005302591800689697, 0.005311007976531982, 0.005258463859558106, 0.005427968025207519, 0.005289152145385743, 0.005341792106628418, 0.005310719966888428, 0.005097856044769287, 0.005257215976715088, 0.005400576114654541, 0.005295360088348388, 0.005268479824066162, 0.005326591968536377, 0.005258975982666015, 0.005352928161621094, 0.005322815895080567, 0.005303071975708008, 0.005320672035217285, 0.005314559936523438, 0.005270624160766601, 0.005329792022705078, 0.005307616233825684, 0.005323455810546875, 0.005283967971801758, 0.0053309440612792965, 0.0052975997924804685, 0.00535814380645752, 0.005302271842956543, 0.005275648117065429, 0.005293504238128662, 0.005270080089569092, 0.005410816192626953, 0.005326848030090332, 0.005576704025268555, 0.005279776096343994, 0.00531609582901001, 0.00528227186203003, 0.00530185604095459, 0.005284448146820068, 0.005337920188903809, 0.005272192001342773, 0.005446208000183105, 0.005287839889526367, 0.005312416076660156, 0.00528553581237793, 0.005366208076477051, 0.005304224014282226, 0.005322400093078613, 0.005273375988006592, 0.005325376033782959, 0.005289535999298095, 0.005273663997650146, 0.005284224033355713, 0.005343232154846191, 0.0054191360473632814, 0.005334464073181153, 0.0053414077758789065, 0.005267231941223144, 0.005364160060882569, 0.005286975860595703, 0.005252352237701416, 0.005287519931793213, 0.005349023818969727, 0.0053467521667480465, 0.005467199802398682, 0.005287871837615967, 0.005314208030700683, 0.005310848236083985, 0.005326560020446777, 0.005272928237915039]",tokens/s,188.00686628325704,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,825.749504,515.76832,0.0,113.246208,113.023488,s,1,9.81488671875,9.81488671875,0.0,9.81488671875,9.81488671875,9.81488671875,9.81488671875,[9.81488671875],,kWh,1.2150628758322075e-05,1.3329846746013897e-06,3.720002976000114e-06,1.720361640892358e-05,,MB,1307.365376,631.11168,0.0,213.909504,180.29568,s,38,0.25692774391174317,0.006761256418730083,0.00014062902310575903,0.00673145604133606,0.0069483616828918465,0.007099545526504517,0.007193917012214661,"[0.007087264060974121, 0.006831967830657959, 0.006732096195220947, 0.006723072052001953, 0.007102047920227051, 0.006626976013183594, 0.006730815887451172, 0.00670304012298584, 0.006770336151123047, 0.006703680038452149, 0.006655968189239502, 0.006737279891967774, 0.006803936004638672, 0.006686944007873535, 0.006622464179992676, 0.006716671943664551, 0.006637951850891113, 0.0070991039276123045, 0.006733664035797119, 0.006672160148620605, 0.006764287948608398, 0.006646368026733398, 0.006684639930725098, 0.006702784061431885, 0.006750559806823731, 0.006738560199737549, 0.00661897611618042, 0.006737887859344482, 0.006737023830413818, 0.007247871875762939, 0.006732416152954101, 0.006715744018554687, 0.006716479778289795, 0.006733503818511963, 0.006659327983856201, 0.0067760319709777835, 0.006699007987976074, 0.006888832092285156]",tokens/s,37862.7852792015,kWh,1.9459268424165984e-07,2.146016801907225e-08,1.1050064395599912e-07,3.265534962167312e-07,tokens/kWh,783945059.4339821,MB,1321.152512,643.694592,0.0,226.492416,180.29824,s,38,9.961505126953123,0.2621448717619243,0.0036949637071659594,0.2611616516113281,0.26412334899902346,0.26639075164794923,0.27683386230468754,"[0.27063922119140627, 0.28047198486328123, 0.2600816650390625, 0.2598634033203125, 0.2656410217285156, 0.2639632263183594, 0.2602741394042969, 0.2625204772949219, 0.26306265258789063, 0.2605673522949219, 0.26327975463867187, 0.2624209289550781, 0.2604398803710937, 0.261541748046875, 0.26106170654296873, 0.25912890625, 0.25884649658203124, 0.2596551208496094, 0.2597965698242187, 0.2608614501953125, 0.2609620361328125, 0.2628399963378906, 0.2612615966796875, 0.2604556884765625, 0.26199639892578125, 0.2594569091796875, 0.26211767578125, 0.26153713989257815, 0.2613963928222656, 0.26145028686523436, 0.26401309204101564, 0.26090005493164065, 0.2633656005859375, 0.2604726867675781, 0.25970208740234374, 0.2606793212890625, 0.264380615234375, 0.2603998413085937]",tokens/s,240.32512853127858,kWh,7.538726307973407e-06,8.312801868011455e-07,3.0295457277282488e-06,1.1399552222502803e-05,tokens/kWh,5526532.864653887,,s,2394,9.942681949615451,0.004153167063331444,0.00014419287471071582,0.004120895862579346,0.004245424032211303,0.004321116757392883,0.004734249401092546,"[0.003935872077941894, 0.004292992115020752, 0.004222367763519287, 0.004209023952484131, 0.004255167961120606, 0.004280096054077149, 0.004240384101867676, 0.004217984199523926, 0.004162303924560547, 0.004141183853149414, 0.004139008045196534, 0.004177919864654541, 0.004208864212036133, 0.004226848125457764, 0.004261888027191162, 0.004218111991882325, 0.00419916820526123, 0.004295711994171143, 0.004271071910858154, 0.004237311840057373, 0.0041974720954895016, 0.004631616115570068, 0.004310751914978027, 0.00417961597442627, 0.004165472030639649, 0.004311808109283447, 0.004243328094482422, 0.004230847835540771, 0.004151391983032227, 0.004196703910827637, 0.004217951774597168, 0.0042997441291809085, 0.004339615821838379, 0.004265376091003418, 0.004247968196868897, 0.004218111991882325, 0.004209504127502441, 0.004337024211883545, 0.00422873592376709, 0.004219903945922852, 0.004235263824462891, 0.004206624031066895, 0.004462560176849365, 0.004188352108001709, 0.0041736321449279785, 0.00420198392868042, 0.00428275203704834, 0.0043392000198364256, 0.004448895931243897, 0.004397056102752686, 0.00451804780960083, 0.004562719821929932, 0.004681920051574707, 0.004589439868927002, 0.004540351867675782, 0.004515039920806885, 0.004548575878143311, 0.004415520191192627, 0.004337600231170654, 0.004320159912109375, 0.004261919975280762, 0.004249567985534668, 0.004228608131408691, 0.003929984092712402, 0.004415616035461426, 0.004297920227050781, 0.004249792098999023, 0.004188928127288818, 0.0042846078872680665, 0.00419152021408081, 0.004213151931762696, 0.0041751680374145506, 0.004170432090759278, 0.00415667200088501, 0.00419916820526123, 0.004290559768676758, 0.004282368183135986, 0.0042302398681640626, 0.004216991901397705, 0.004184832096099853, 0.0042427520751953125, 0.004452544212341309, 0.004400800228118897, 0.004391935825347901, 0.004403200149536133, 0.004406464099884033, 0.004395391941070557, 0.004433375835418701, 0.00434281587600708, 0.004382495880126953, 0.004282368183135986, 0.004282368183135986, 0.004335616111755371, 0.004353568077087402, 0.00450812816619873, 0.004495584011077881, 0.005537343978881836, 0.005546207904815674, 0.00562499189376831, 0.0057762241363525394, 0.005769216060638428, 0.0057580161094665525, 0.005245888233184814, 0.004921567916870117, 0.004992800235748291, 0.004413440227508545, 0.004216032028198242, 0.005366559982299805, 0.004251455783843994, 0.004143295764923095, 0.004242976188659668, 0.0042005119323730465, 0.004155263900756836, 0.0041561598777771, 0.004148287773132324, 0.0042991042137146, 0.004159711837768555, 0.004190303802490234, 0.004165696144104004, 0.004152959823608399, 0.0041209602355957034, 0.004106143951416016, 0.0041370558738708495, 0.004113984107971191, 0.0041108160018920896, 0.004177887916564941, 0.003846143960952759, 0.0041576638221740726, 0.004121888160705567, 0.004132991790771485, 0.0041168642044067385, 0.004091648101806641, 0.004086016178131103, 0.0040797438621521, 0.004100063800811768, 0.004089824199676514, 0.004147039890289307, 0.0042046399116516114, 0.0040852479934692385, 0.00425216007232666, 0.004135231971740722, 0.00410591983795166, 0.004113984107971191, 0.0040841598510742184, 0.004118527889251709, 0.004103839874267578, 0.004089375972747803, 0.0040835199356079105, 0.004080416202545166, 0.004101823806762695, 0.0041726078987121586, 0.004124127864837647, 0.004225279808044434, 0.004128928184509277, 0.004071487903594971, 0.004162464141845703, 0.004096799850463867, 0.004074656009674073, 0.00407590389251709, 0.004077824115753173, 0.004073791980743409, 0.0040796160697937015, 0.004089312076568603, 0.00419484806060791, 0.004132991790771485, 0.004102015972137451, 0.0041512961387634275, 0.004188159942626953, 0.0041019201278686525, 0.004134528160095215, 0.004097983837127685, 0.0041478719711303715, 0.004116415977478027, 0.004171840190887451, 0.004251520156860352, 0.004124512195587158, 0.0042007360458374025, 0.004113471984863281, 0.004111296176910401, 0.004093984127044678, 0.004093920230865479, 0.004111455917358398, 0.004112607955932617, 0.004102848052978516, 0.004180191993713379, 0.004148863792419434, 0.0041149120330810545, 0.004189888000488281, 0.0041281599998474125, 0.0037969920635223388, 0.004112576007843018, 0.004091711997985839, 0.004116223812103271, 0.0040924158096313475, 0.004091135978698731, 0.00411084794998169, 0.004150815963745117, 0.004167967796325683, 0.004106656074523925, 0.004131840229034424, 0.004068128108978272, 0.004183807849884033, 0.004133120059967041, 0.004093952178955078, 0.004179359912872315, 0.004112448215484619, 0.00409984016418457, 0.0040878400802612305, 0.004081727981567383, 0.004127423763275146, 0.004083712100982666, 0.004159615993499756, 0.004093760013580322, 0.004130303859710693, 0.004106815814971924, 0.004133952140808105, 0.004078879833221436, 0.0041233282089233395, 0.004092895984649658, 0.004083712100982666, 0.00407747220993042, 0.004094016075134277, 0.004071455955505371, 0.004145120143890381, 0.004101632118225097, 0.004081503868103027, 0.0040926079750061035, 0.00410748815536499, 0.004253664016723633, 0.004111167907714844, 0.004097727775573731, 0.0041160001754760745, 0.004092544078826904, 0.004145311832427979, 0.004094207763671875, 0.004105984210968017, 0.0041512961387634275, 0.0041305599212646486, 0.004110591888427734, 0.0041248960494995116, 0.004171616077423096, 0.004263872146606445, 0.004128608226776123, 0.00410207986831665, 0.004139296054840088, 0.004168767929077149, 0.004172671794891358, 0.004130815982818604, 0.004150303840637207, 0.004156383991241455, 0.004118656158447266, 0.004144512176513672, 0.0038707840442657473, 0.004215007781982422, 0.004131199836730957, 0.004163904190063477, 0.004173503875732422, 0.004126719951629639, 0.004154560089111328, 0.004111167907714844, 0.00414035177230835, 0.0041080322265625, 0.004112671852111816, 0.004113376140594482, 0.004109792232513428, 0.004122687816619873, 0.004182432174682617, 0.004093696117401123, 0.004143008232116699, 0.004101408004760742, 0.004145984172821045, 0.0040878081321716305, 0.004261343955993652, 0.005149280071258545, 0.0050126399993896485, 0.005165120124816894, 0.005286943912506103, 0.004218560218811035, 0.0042434558868408205, 0.004122144222259522, 0.0041212477684020995, 0.004146656036376953, 0.0041149120330810545, 0.004134304046630859, 0.0041149120330810545, 0.0041147198677062985, 0.004136672019958496, 0.004140192031860351, 0.004135839939117432, 0.00415123176574707, 0.004132287979125977, 0.004178463935852051, 0.004263840198516846, 0.004183487892150879, 0.004113088130950928, 0.004124671936035156, 0.0041454720497131345, 0.004166656017303467, 0.004143104076385498, 0.00413708782196045, 0.0041209602355957034, 0.004087999820709228, 0.004134560108184814, 0.004641119956970215, 0.0040999679565429685, 0.004143231868743896, 0.004098048210144043, 0.004105504035949707, 0.0041287999153137205, 0.004211135864257812, 0.00414131212234497, 0.0041077442169189455, 0.0041651840209960935, 0.00409603214263916, 0.00410044813156128, 0.0037888638973236086, 0.00413267183303833, 0.004104351997375489, 0.004105728149414062, 0.004095967769622803, 0.0040861120223999025, 0.004106592178344726, 0.00410207986831665, 0.004120480060577392, 0.004126719951629639, 0.004124991893768311, 0.004075295925140381, 0.004079520225524902, 0.004089856147766113, 0.004112415790557862, 0.004270143985748291, 0.004108191967010498, 0.004110047817230225, 0.0041145920753479, 0.004126688003540039, 0.004109888076782226, 0.004111135959625244, 0.004093760013580322, 0.0041039037704467774, 0.004098336219787598, 0.004073631763458252, 0.004089695930480957, 0.00414025592803955, 0.004153696060180664, 0.0041845121383667, 0.004139008045196534, 0.004088863849639892, 0.004123616218566895, 0.004370431900024414, 0.005193727970123291, 0.005301407814025879, 0.004147039890289307, 0.0041561598777771, 0.004131135940551758, 0.00410207986831665, 0.004118207931518555, 0.004708032131195069, 0.004667679786682129, 0.004160160064697265, 0.004138207912445068, 0.004159008026123047, 0.004191167831420898, 0.004191967964172363, 0.00415772819519043, 0.0041472001075744626, 0.004193952083587646, 0.004129312038421631, 0.004122432231903076, 0.004108287811279297, 0.004186111927032471, 0.004144383907318115, 0.0041131839752197264, 0.004165599822998047, 0.004349952220916748, 0.0041146240234375, 0.004129759788513184, 0.00409881591796875, 0.00409609603881836, 0.0038224320411682127, 0.004097536087036133, 0.004190720081329345, 0.004122623920440674, 0.004133088111877441, 0.0041244478225708, 0.004110367774963379, 0.0041140799522399905, 0.004090079784393311, 0.0041305599212646486, 0.004090208053588867, 0.00409929609298706, 0.004068031787872315, 0.0040962882041931156, 0.004101088047027588, 0.004151679992675781, 0.004177728176116943, 0.004098656177520752, 0.004094207763671875, 0.0041142401695251464, 0.004108287811279297, 0.0040954241752624515, 0.004119135856628418, 0.004186079978942871, 0.004196352005004883, 0.004202752113342285, 0.004216383934020996, 0.004351871967315674, 0.004194719791412354, 0.004224703788757324, 0.0041305599212646486, 0.0041374402046203615, 0.004107679843902588, 0.004091872215270996, 0.0040997757911682125, 0.004163871765136719, 0.004092576026916504, 0.004085760116577148, 0.004099487781524658, 0.004088352203369141, 0.004077631950378418, 0.004075520038604737, 0.004179903984069824, 0.004284480094909668, 0.004087232112884522, 0.004071296215057373, 0.0041082239151000975, 0.004094719886779785, 0.004089536190032959, 0.0041025919914245606, 0.004062975883483887, 0.004069503784179688, 0.004286464214324951, 0.004110496044158935, 0.004067168235778808, 0.004155168056488037, 0.0040795841217041015, 0.00411075210571289, 0.004079552173614502, 0.004145055770874023, 0.004246528148651123, 0.004075679779052734, 0.004116511821746826, 0.0038809599876403807, 0.004077760219573975, 0.004091616153717041, 0.0042061758041381835, 0.004164095878601074, 0.004081567764282226, 0.004106143951416016, 0.00409929609298706, 0.0040869760513305665, 0.004093728065490723, 0.0041045122146606446, 0.004079455852508545, 0.004071263790130615, 0.004082880020141602, 0.0040821762084960935, 0.004127039909362793, 0.004128608226776123, 0.004174015998840332, 0.004165599822998047, 0.004291744232177735, 0.004369247913360596, 0.004558847904205322, 0.004427775859832763, 0.004334720134735107, 0.004284832000732422, 0.00420911979675293, 0.0042147841453552244, 0.004245471954345703, 0.004140768051147461, 0.0041365442276000974, 0.0041602239608764646, 0.004128767967224121, 0.00414515209197998, 0.004094016075134277, 0.00410207986831665, 0.004113599777221679, 0.0040965437889099125, 0.004079775810241699, 0.0040935039520263675, 0.004102367877960205, 0.004179711818695068, 0.0042256321907043455, 0.004130815982818604, 0.004109375953674317, 0.004170688152313232, 0.004098048210144043, 0.004124127864837647, 0.004112959861755371, 0.004142240047454834, 0.004117311954498291, 0.004146431922912598, 0.004143712043762207, 0.0041084480285644535, 0.004118495941162109, 0.004122655868530273, 0.004116479873657227, 0.004097695827484131, 0.00419865608215332, 0.004091648101806641, 0.004221280097961425, 0.00413263988494873, 0.004107935905456543, 0.004727424144744873, 0.004240992069244384, 0.004571072101593018, 0.004391392230987549, 0.004509151935577393, 0.0052128958702087405, 0.0044130878448486325, 0.004132415771484375, 0.004143199920654297, 0.00412937593460083, 0.004144480228424072, 0.0041110081672668455, 0.004115744113922119, 0.004148032188415527, 0.004118336200714111, 0.004123744010925293, 0.004132832050323486, 0.004238272190093994, 0.004130208015441895, 0.004156000137329101, 0.004093952178955078, 0.004114463806152344, 0.004074656009674073, 0.004057919979095459, 0.004090911865234375, 0.004081823825836182, 0.004242432117462158, 0.004099487781524658, 0.004072959899902344, 0.0041361918449401855, 0.004099808216094971, 0.004099232196807861, 0.004332448005676269, 0.004131968021392822, 0.004129216194152832, 0.004106560230255127, 0.004090976238250732, 0.00412662410736084, 0.0040908799171447754, 0.004093952178955078, 0.0041207680702209475, 0.004093760013580322, 0.004059072017669678, 0.0041179838180541994, 0.004114880084991455, 0.004135072231292725, 0.004442080020904541, 0.004106272220611573, 0.004222879886627198, 0.004092000007629395, 0.004091040134429931, 0.004124800205230713, 0.004074207782745361, 0.004110335826873779, 0.004089888095855713, 0.00410316801071167, 0.004117472171783447, 0.004097919940948486, 0.004194560050964355, 0.004244671821594238, 0.004104159832000733, 0.004086495876312256, 0.004056128025054931, 0.004062143802642823, 0.00379040002822876, 0.004130368232727051, 0.0040787200927734375, 0.0040993280410766605, 0.004139008045196534, 0.004127232074737549, 0.0040878081321716305, 0.00415664005279541, 0.004081791877746582, 0.00417244815826416, 0.004083775997161866, 0.004106175899505615, 0.004122560024261475, 0.004109375953674317, 0.004097023963928223, 0.004136127948760986, 0.004095808029174804, 0.00410316801071167, 0.004110335826873779, 0.004192255973815918, 0.004202623844146728, 0.004077631950378418, 0.004140639781951904, 0.004135136127471924, 0.0040878081321716305, 0.004124671936035156, 0.004089983940124511, 0.004102015972137451, 0.004099552154541016, 0.004279007911682129, 0.004113440036773682, 0.004129119873046875, 0.004118912220001221, 0.004144320011138916, 0.004084095954895019, 0.004082143783569336, 0.004100128173828125, 0.00409600019454956, 0.004085760116577148, 0.004132319927215576, 0.004153696060180664, 0.004179168224334717, 0.004247744083404541, 0.004141856193542481, 0.004098048210144043, 0.004686079978942871, 0.00414844799041748, 0.004120255947113037, 0.00409657621383667, 0.004112768173217774, 0.004117824077606201, 0.004092448234558106, 0.004102208137512207, 0.004096191883087158, 0.004085696220397949, 0.004302720069885254, 0.0041512961387634275, 0.004128767967224121, 0.004130815982818604, 0.004100096225738525, 0.004163008213043213, 0.00408838415145874, 0.0040796160697937015, 0.0038957440853118895, 0.004161856174468994, 0.004120031833648682, 0.004254176139831543, 0.004140607833862305, 0.004110367774963379, 0.004186495780944825, 0.004118527889251709, 0.004204383850097656, 0.004140927791595459, 0.004145023822784424, 0.004115039825439453, 0.004134719848632813, 0.004112383842468262, 0.004218080043792725, 0.004131936073303223, 0.00413267183303833, 0.004214655876159668, 0.004216159820556641, 0.00420147180557251, 0.004253344058990478, 0.0042815361022949215, 0.004227231979370117, 0.004541088104248047, 0.004174047946929931, 0.004150047779083252, 0.004121119976043701, 0.004125152111053467, 0.004103199958801269, 0.004110335826873779, 0.00409600019454956, 0.004105440139770508, 0.004083424091339111, 0.004122719764709472, 0.0040852479934692385, 0.004235712051391602, 0.004209695816040039, 0.004206624031066895, 0.00433568000793457, 0.004254144191741943, 0.0041981439590454105, 0.0043424639701843265, 0.004245503902435303, 0.004225024223327637, 0.004351967811584472, 0.004193696022033691, 0.004160128116607666, 0.004172031879425049, 0.004194143772125244, 0.004142943859100342, 0.0041339840888977054, 0.004229119777679443, 0.004130879878997803, 0.004096384048461914, 0.004090688228607178, 0.004111104011535645, 0.004101088047027588, 0.004126880168914795, 0.004138847827911377, 0.004111551761627198, 0.004153279781341552, 0.004170623779296875, 0.0041799678802490236, 0.0038682239055633544, 0.004135359764099121, 0.004103839874267578, 0.004228447914123535, 0.004106400012969971, 0.004111199855804443, 0.004138656139373779, 0.00411027193069458, 0.004096415996551514, 0.004154719829559326, 0.004131231784820557, 0.004163839817047119, 0.004089983940124511, 0.004097919940948486, 0.004123680114746094, 0.004090847969055175, 0.004124671936035156, 0.004242656230926514, 0.004155648231506348, 0.004119071960449219, 0.004085760116577148, 0.004091807842254639, 0.00426636791229248, 0.004132383823394775, 0.004151487827301025, 0.004181439876556396, 0.004410208225250244, 0.004273375988006592, 0.004172512054443359, 0.0041468157768249515, 0.004151391983032227, 0.0041114878654479985, 0.004113344192504883, 0.004114431858062744, 0.004231167793273926, 0.004196512222290039, 0.004185887813568115, 0.004106304168701172, 0.004247104167938233, 0.004129216194152832, 0.004089856147766113, 0.0041197118759155275, 0.0041068801879882814, 0.004128640174865723, 0.004102496147155762, 0.00414246416091919, 0.004117055892944336, 0.004104256153106689, 0.004222976207733154, 0.004374144077301026, 0.004168064117431641, 0.004169568061828614, 0.0041168642044067385, 0.004128543853759765, 0.00414515209197998, 0.004106048107147217, 0.004116479873657227, 0.004247776031494141, 0.004220895767211914, 0.00424351978302002, 0.004284351825714111, 0.004245312213897705, 0.004319231986999511, 0.003902719974517822, 0.0041495041847229, 0.004143616199493408, 0.004138336181640625, 0.004098720073699951, 0.004122879981994629, 0.004134175777435303, 0.004368864059448242, 0.004134751796722412, 0.0041166400909423825, 0.004091904163360595, 0.004204544067382812, 0.00429804801940918, 0.004158143997192383, 0.0040852479934692385, 0.004107935905456543, 0.004170591831207275, 0.004103360176086426, 0.00410265588760376, 0.004095839977264404, 0.004118815898895263, 0.004132192134857177, 0.00410700798034668, 0.004134399890899658, 0.004176383972167969, 0.004096159934997559, 0.004103328227996826, 0.004153408050537109, 0.004111072063446045, 0.004107423782348633, 0.004330143928527832, 0.004137152194976806, 0.004124671936035156, 0.004092031955718994, 0.004081664085388184, 0.004122208118438721, 0.004089248180389404, 0.004150144100189209, 0.004120672225952148, 0.0041082558631896976, 0.004157375812530517, 0.004093696117401123, 0.004071680068969727, 0.004120895862579346, 0.00411411190032959, 0.004093215942382812, 0.004103040218353272, 0.0040917439460754395, 0.004106239795684814, 0.004077568054199219, 0.004066527843475342, 0.00415721607208252, 0.004077631950378418, 0.004079679965972901, 0.004121471881866455, 0.004122560024261475, 0.004087007999420166, 0.004103295803070069, 0.004240384101867676, 0.004184768199920654, 0.004078911781311035, 0.0040967359542846676, 0.004171103954315186, 0.003898560047149658, 0.004141119956970215, 0.004101151943206787, 0.0041571521759033205, 0.0041073598861694335, 0.0041452798843383785, 0.0041025600433349605, 0.004132832050323486, 0.004143519878387451, 0.004108287811279297, 0.004103231906890869, 0.004133183956146241, 0.004104159832000733, 0.004184768199920654, 0.004087999820709228, 0.004115392208099365, 0.004141503810882568, 0.004079967975616455, 0.0051221117973327636, 0.004354047775268555, 0.004110335826873779, 0.004122911930084228, 0.004111551761627198, 0.004308928012847901, 0.004274784088134765, 0.004204288005828858, 0.004118783950805664, 0.004111743927001953, 0.004100736141204834, 0.004138495922088623, 0.004083456039428711, 0.0041049599647521974, 0.004118688106536865, 0.004097887992858887, 0.004108287811279297, 0.004120384216308594, 0.004085087776184082, 0.004117343902587891, 0.00408076810836792, 0.004180863857269287, 0.0041472001075744626, 0.00422819185256958, 0.004109024047851562, 0.004129280090332031, 0.004120255947113037, 0.0041157760620117185, 0.004108992099761963, 0.004100255966186524, 0.0041420478820800785, 0.0040887041091918945, 0.0041077442169189455, 0.004112927913665771, 0.004155488014221192, 0.00407747220993042, 0.0041140480041503905, 0.0040861120223999025, 0.004114208221435547, 0.004139359951019287, 0.00409116792678833, 0.0041212158203125, 0.004074944019317627, 0.004268320083618164, 0.0041389760971069335, 0.0038563520908355712, 0.004154719829559326, 0.004145088195800781, 0.004113152027130127, 0.004327360153198242, 0.0041288318634033205, 0.004109504222869873, 0.0041025919914245606, 0.004139391899108886, 0.00411414384841919, 0.0040962882041931156, 0.004149248123168945, 0.0041205759048461915, 0.004130815982818604, 0.0041775679588317875, 0.004102240085601807, 0.004135168075561523, 0.004097951889038086, 0.004220863819122314, 0.004163904190063477, 0.004094111919403076, 0.004134719848632813, 0.00412659215927124, 0.004185855865478516, 0.005164415836334229, 0.004119423866271973, 0.004165631771087646, 0.004200448036193847, 0.004126719951629639, 0.004106239795684814, 0.004116479873657227, 0.004074528217315674, 0.004113120079040527, 0.004114687919616699, 0.004167679786682129, 0.004204415798187256, 0.004130943775177002, 0.004083712100982666, 0.004097760200500488, 0.004088448047637939, 0.0040850238800048825, 0.0041209602355957034, 0.004105279922485352, 0.004112607955932617, 0.004085504055023194, 0.004088768005371094, 0.004081151962280274, 0.004061728000640869, 0.0040711359977722164, 0.004087935924530029, 0.004190368175506592, 0.0041166720390319825, 0.004103295803070069, 0.004076223850250244, 0.0041468157768249515, 0.004086400032043457, 0.004093696117401123, 0.004097824096679688, 0.004085984230041504, 0.004085855960845947, 0.004090112209320068, 0.004136608123779297, 0.004150815963745117, 0.0038645761013031007, 0.004163584232330322, 0.0041576638221740726, 0.004125951766967773, 0.004094272136688232, 0.004101471900939941, 0.004098720073699951, 0.004122943878173828, 0.004085663795471191, 0.004157440185546875, 0.004089600086212159, 0.004079872131347656, 0.004177311897277832, 0.004115295886993408, 0.00409065580368042, 0.004105184078216553, 0.00411414384841919, 0.004108575820922852, 0.004097919940948486, 0.004116767883300781, 0.004109632015228271, 0.0040965437889099125, 0.00408512020111084, 0.004084352016448974, 0.004218239784240723, 0.004094175815582276, 0.004104063987731933, 0.0041018881797790525, 0.004080416202545166, 0.004126719951629639, 0.004095744132995605, 0.004090464115142823, 0.004095647811889648, 0.004114431858062744, 0.004110335826873779, 0.004181344032287598, 0.004116799831390381, 0.0040813441276550295, 0.004104864120483398, 0.004113759994506836, 0.0040843839645385745, 0.004084959983825683, 0.0040887041091918945, 0.004104095935821533, 0.0041287999153137205, 0.004175839900970459, 0.004104447841644287, 0.0040648641586303715, 0.004093183994293213, 0.004102431774139405, 0.004078207969665527, 0.004087967872619629, 0.004080671787261963, 0.004080416202545166, 0.004166975975036621, 0.004110239982604981, 0.0041244797706604, 0.0040867838859558105, 0.004085824012756347, 0.004085152149200439, 0.004110879898071289, 0.004075520038604737, 0.004164671897888184, 0.0038930559158325195, 0.004114528179168701, 0.00410207986831665, 0.004093984127044678, 0.004100575923919678, 0.004118080139160156, 0.004141536235809326, 0.004107935905456543, 0.004115871906280518, 0.004113408088684082, 0.004106143951416016, 0.004098048210144043, 0.0040878081321716305, 0.004071104049682617, 0.004088128089904785, 0.004095808029174804, 0.004087967872619629, 0.0040878400802612305, 0.004112383842468262, 0.004072832107543945, 0.004078112125396729, 0.004078911781311035, 0.0040860800743103024, 0.0040797438621521, 0.0041027522087097165, 0.004208608150482178, 0.00411030387878418, 0.004103936195373535, 0.004098112106323242, 0.004095839977264404, 0.004105567932128906, 0.00410646390914917, 0.004091872215270996, 0.0041006717681884765, 0.004112768173217774, 0.0040997757911682125, 0.004104191780090332, 0.004081664085388184, 0.004093567848205567, 0.004100704193115234, 0.004087232112884522, 0.004075647830963135, 0.0040432000160217285, 0.004070271968841552, 0.004062111854553223, 0.004093952178955078, 0.0040850238800048825, 0.004151648044586182, 0.004203072071075439, 0.004175680160522461, 0.004134912014007569, 0.004154911994934082, 0.004116960048675537, 0.004089856147766113, 0.0040932798385620115, 0.0041110081672668455, 0.004116479873657227, 0.004120863914489746, 0.004142816066741943, 0.004107295989990234, 0.004092895984649658, 0.004092959880828857, 0.004092895984649658, 0.003932159900665283, 0.004102208137512207, 0.004104127883911133, 0.004118752002716064, 0.004095776081085205, 0.004106239795684814, 0.004101119995117188, 0.004131840229034424, 0.0041015357971191406, 0.004123231887817383, 0.004087264060974121, 0.004089600086212159, 0.00409878396987915, 0.004086880207061767, 0.004084703922271729, 0.0040796160697937015, 0.004087391853332519, 0.0040776958465576175, 0.004071712017059326, 0.004218463897705078, 0.00419212818145752, 0.004106783866882324, 0.004112383842468262, 0.004083839893341065, 0.004103551864624024, 0.004123136043548584, 0.004189888000488281, 0.00421724796295166, 0.004165535926818848, 0.004174880027770996, 0.004155712127685547, 0.0041151041984558104, 0.0041175041198730465, 0.004139679908752442, 0.004083903789520263, 0.004086143970489502, 0.004094111919403076, 0.004191872119903565, 0.004124671936035156, 0.004081791877746582, 0.00407539176940918, 0.004075104236602783, 0.0041016960144042965, 0.004117343902587891, 0.004113791942596435, 0.004105152130126953, 0.004085311889648438, 0.004087039947509766, 0.0040661759376525875, 0.004085760116577148, 0.004074560165405273, 0.004095967769622803, 0.004092895984649658, 0.00406499195098877, 0.004231040000915527, 0.004114175796508789, 0.004065824031829834, 0.004106400012969971, 0.004214752197265625, 0.004098048210144043, 0.004173600196838379, 0.004169951915740966, 0.004176000118255615, 0.0039055359363555906, 0.00424454402923584, 0.0041437759399414065, 0.0041372480392456055, 0.004120160102844239, 0.004098656177520752, 0.0041080322265625, 0.0041365442276000974, 0.004116960048675537, 0.0041270718574523925, 0.004115712165832519, 0.00435148811340332, 0.0041212477684020995, 0.004118783950805664, 0.004216224193572998, 0.00414576005935669, 0.004107679843902588, 0.004094751834869384, 0.004125919818878174, 0.004182496070861816, 0.0040852479934692385, 0.004068031787872315, 0.004128704071044922, 0.004085760116577148, 0.004058176040649414, 0.004078783988952637, 0.00405628776550293, 0.004205344200134277, 0.004078656196594238, 0.004096191883087158, 0.0040821762084960935, 0.004098048210144043, 0.00407587194442749, 0.004066495895385742, 0.00405347204208374, 0.004102143764495849, 0.00406547212600708, 0.004095808029174804, 0.004073472023010254, 0.0041883840560913085, 0.00411625623703003, 0.004093344211578369, 0.004076096057891845, 0.004089888095855713, 0.004087039947509766, 0.0041027522087097165, 0.004074687957763672, 0.004078656196594238, 0.0040950078964233395, 0.004103040218353272, 0.004167263984680176, 0.004139008045196534, 0.004094111919403076, 0.0041142401695251464, 0.004112832069396973, 0.00410643196105957, 0.004097856044769287, 0.004076543807983398, 0.004436160087585449, 0.004090688228607178, 0.004183807849884033, 0.004092160224914551, 0.0041205759048461915, 0.0038256640434265136, 0.0041142401695251464, 0.004130655765533447, 0.004110176086425782, 0.0041082558631896976, 0.004223616123199463, 0.004091711997985839, 0.004096320152282715, 0.004094048023223877, 0.004136799812316894, 0.004108128070831299, 0.004118527889251709, 0.004089375972747803, 0.004094431877136231, 0.004099679946899414, 0.00413478422164917, 0.004137792110443115, 0.004097536087036133, 0.004087391853332519, 0.004135839939117432, 0.004099808216094971, 0.00410207986831665, 0.004085855960845947, 0.004087391853332519, 0.004120480060577392, 0.004080095767974854, 0.004274015903472901, 0.004102303981781006, 0.004116479873657227, 0.00427785587310791, 0.0041025600433349605, 0.004114304065704346, 0.004141215801239014, 0.004824927806854248, 0.004104479789733887, 0.00411414384841919, 0.004093791961669922, 0.004143392086029053, 0.004111839771270752, 0.004110879898071289, 0.004101280212402344, 0.004105055809020996, 0.0040796160697937015, 0.004083712100982666, 0.00406496000289917, 0.004114816188812256, 0.00406931209564209, 0.0040958719253540036, 0.004548736095428467, 0.004136640071868897, 0.004133152008056641, 0.004316832065582275, 0.004138783931732178, 0.004137407779693603, 0.004153600215911865, 0.004125664234161377, 0.004152128219604492, 0.004073599815368653, 0.0041084480285644535, 0.0040850558280944825, 0.004129280090332031, 0.004083263874053955, 0.004098752021789551, 0.0038351678848266603, 0.004103104114532471, 0.004084832191467285, 0.004135647773742676, 0.004116384029388428, 0.004126783847808838, 0.004092160224914551, 0.004100096225738525, 0.0040850238800048825, 0.004083648204803467, 0.0040793919563293455, 0.004074240207672119, 0.004103807926177978, 0.004111968040466309, 0.0041090879440307615, 0.004081759929656982, 0.004105567932128906, 0.00408019208908081, 0.004089856147766113, 0.0040932798385620115, 0.004090528011322022, 0.004225279808044434, 0.004128511905670166, 0.004142560005187988, 0.0042336640357971194, 0.004124671936035156, 0.004116352081298828, 0.0041108160018920896, 0.0040711679458618165, 0.004126719951629639, 0.004105728149414062, 0.004231679916381836, 0.004108287811279297, 0.004075168132781982, 0.00409225606918335, 0.0040796160697937015, 0.004095647811889648, 0.004083104133605957, 0.004461535930633545, 0.00420249605178833, 0.004108287811279297, 0.004290815830230713, 0.0041810240745544435, 0.004149727821350098, 0.004120800018310547, 0.004141056060791016, 0.004136960029602051, 0.004157599925994873, 0.004314976215362549, 0.004155392169952392, 0.004140096187591553, 0.004123583793640136, 0.00415334415435791, 0.004110335826873779, 0.004114431858062744, 0.004119743824005127, 0.0044757437705993655, 0.0041199040412902834, 0.004094912052154541, 0.004244607925415039, 0.0041489281654357914, 0.004138016223907471, 0.004140895843505859, 0.0038051838874816894, 0.004403103828430175, 0.004257120132446289, 0.0041296639442443845, 0.0041593599319458005, 0.00414515209197998, 0.004122367858886719, 0.0041372160911560055, 0.00410041618347168, 0.004135839939117432, 0.004100895881652832, 0.004094272136688232, 0.004116159915924072, 0.004098048210144043, 0.004124671936035156, 0.0041205759048461915, 0.004234272003173828, 0.0041329278945922854, 0.00450764799118042, 0.004141983985900879, 0.00417900800704956, 0.004168799877166748, 0.004120416164398193, 0.004114431858062744, 0.004136960029602051, 0.004122848033905029, 0.004089632034301758, 0.004132031917572021, 0.004076352119445801, 0.0040991039276123045, 0.004092127799987793, 0.004246143817901611, 0.004266111850738525, 0.004110623836517334, 0.0041123518943786625, 0.004120319843292236, 0.00416153621673584, 0.004128767967224121, 0.004118624210357666, 0.004218783855438233, 0.00427785587310791, 0.004139232158660889, 0.004104288101196289, 0.0041160001754760745, 0.004229695796966553, 0.0041512961387634275, 0.004107295989990234, 0.004127711772918701, 0.004108287811279297, 0.004110367774963379, 0.00410595178604126, 0.004108543872833252, 0.004126719951629639, 0.004099520206451416, 0.004167327880859375, 0.005360544204711914, 0.0041422080993652344, 0.004260735988616943, 0.004130815982818604, 0.004139008045196534, 0.0041238398551940916, 0.004109248161315918, 0.004110208034515381, 0.003819519996643066, 0.004110335826873779, 0.004134143829345703, 0.004130655765533447, 0.004109216213226318, 0.004108287811279297, 0.004110079765319824, 0.00413708782196045, 0.004184192180633545, 0.004158783912658692, 0.004157919883728028, 0.004167488098144531, 0.004147903919219971, 0.004132575988769531, 0.004119679927825927, 0.004129248142242432, 0.004120863914489746, 0.004144735813140869, 0.004129312038421631, 0.0041205759048461915, 0.00411414384841919, 0.004133152008056641, 0.004099520206451416, 0.004084288120269775, 0.004088895797729492, 0.004089888095855713, 0.0040969281196594235, 0.004138815879821778, 0.004310656070709229, 0.004176447868347168, 0.004103839874267578, 0.0041166400909423825, 0.004126272201538086, 0.0041502399444580075, 0.0041365442276000974, 0.004140160083770752, 0.0041194877624511714, 0.004161791801452637, 0.004142848014831543, 0.004210944175720215, 0.004220799922943115, 0.004138175964355469, 0.004149663925170899, 0.004118815898895263, 0.0041595520973205565, 0.00416319990158081, 0.004167103767395019, 0.004166751861572266, 0.004140704154968262, 0.004141183853149414, 0.004132575988769531, 0.004099423885345459, 0.004129087924957276, 0.004135839939117432, 0.004138720035552978, 0.004300735950469971, 0.004188223838806153, 0.004130815982818604, 0.004169055938720703, 0.004173535823822022, 0.004195263862609863, 0.004134912014007569, 0.00414515209197998, 0.003840735912322998, 0.004220672130584717, 0.004126368045806884, 0.004149856090545655, 0.004109824180603027, 0.004101823806762695, 0.004114783763885498, 0.00409984016418457, 0.004095808029174804, 0.004112544059753418, 0.0041315841674804685, 0.00409935998916626, 0.004087711811065674, 0.00406547212600708, 0.004102687835693359, 0.004097439765930176, 0.004092576026916504, 0.004132415771484375, 0.004108767986297607, 0.004085184097290039, 0.004103040218353272, 0.004109663963317871, 0.004085824012756347, 0.004274240016937256, 0.004167808055877686, 0.004110688209533691, 0.004153088092803955, 0.004107872009277344, 0.0041250882148742675, 0.004101568222045898, 0.004157951831817627, 0.004091328144073487, 0.004116223812103271, 0.004139904022216797, 0.004095359802246094, 0.004096864223480224, 0.004109920024871826, 0.004153120040893555, 0.004148896217346191, 0.004117248058319092, 0.004114431858062744, 0.004110335826873779, 0.0041001601219177244, 0.004094912052154541, 0.00427126407623291, 0.0041485438346862795, 0.004093920230865479, 0.004082240104675293, 0.004136703968048096, 0.004113823890686035, 0.004086815834045411, 0.004095456123352051, 0.0041147518157958985, 0.0041736640930175786, 0.004225120067596435, 0.004215199947357178, 0.0042165122032165525, 0.004298751831054688, 0.00421999979019165, 0.004156320095062256, 0.004134880065917969, 0.004126751899719238, 0.004101344108581543, 0.0037962241172790527, 0.004117472171783447, 0.004112160205841065, 0.004116032123565674, 0.004104640007019043, 0.004118527889251709, 0.004098048210144043, 0.004121823787689209, 0.004104991912841797, 0.004139008045196534, 0.0041428799629211425, 0.004137184143066406, 0.004112383842468262, 0.004231040000915527, 0.004151423931121826, 0.004142111778259278, 0.004257984161376953, 0.004304831981658936, 0.004138912200927734, 0.00416864013671875, 0.004126719951629639, 0.004143104076385498, 0.004142111778259278, 0.004135839939117432, 0.004225024223327637, 0.004120672225952148, 0.004134047985076905, 0.004168511867523194, 0.004171840190887451, 0.004171296119689942, 0.0041456642150878905, 0.004261856079101563, 0.0042016000747680665, 0.004186592102050781, 0.004323071956634521, 0.004131423950195313, 0.004122623920440674, 0.004173823833465576, 0.004268032073974609, 0.004468192100524902, 0.004169504165649414, 0.004149695873260498, 0.004170048236846924, 0.004113440036773682, 0.004156383991241455, 0.004182015895843506, 0.004107776165008545, 0.004105760097503662, 0.004117055892944336, 0.004250016212463379, 0.00414086389541626, 0.0041224641799926754, 0.004093696117401123, 0.004115039825439453, 0.004108287811279297, 0.004128960132598877, 0.004177184104919434, 0.004089695930480957, 0.00416380786895752, 0.004094111919403076, 0.004105663776397705, 0.004097023963928223, 0.004108160018920898, 0.003808864116668701, 0.004100512027740479, 0.004126368045806884, 0.004085567951202393, 0.004119071960449219, 0.0041242241859436035, 0.004098495960235596, 0.0041511039733886715, 0.0041077442169189455, 0.004095776081085205, 0.004098495960235596, 0.004089600086212159, 0.004119296073913575, 0.004102143764495849, 0.004114463806152344, 0.004245312213897705, 0.004120800018310547, 0.0041224961280822755, 0.004134687900543213, 0.004084000110626221, 0.004225024223327637, 0.004083807945251465, 0.004104383945465088, 0.004101856231689453, 0.004110335826873779, 0.004085760116577148, 0.004071424007415772, 0.0040796160697937015, 0.004093408107757568, 0.0041231679916381835, 0.004295711994171143, 0.004101088047027588, 0.004088031768798828, 0.004132512092590332, 0.0041025600433349605, 0.004116191864013672, 0.004100096225738525, 0.004159679889678955, 0.004119359970092774, 0.004078591823577881, 0.004124351978302002, 0.004074848175048828, 0.004056032180786133, 0.004104191780090332, 0.0040797438621521, 0.00407539176940918, 0.00409600019454956, 0.004145120143890381, 0.00408512020111084, 0.004105120182037353, 0.00410972785949707, 0.004104415893554688, 0.004089119911193847, 0.0040989117622375485, 0.00419817590713501, 0.004085984230041504, 0.0040878081321716305, 0.004090047836303711, 0.004282464027404785, 0.004114336013793946, 0.004126527786254883, 0.004108287811279297, 0.004091231822967529, 0.003973920106887818, 0.00413808012008667, 0.004094912052154541, 0.004080832004547119, 0.004076191902160645, 0.004077536106109619, 0.004104383945465088, 0.004101856231689453, 0.004097983837127685, 0.004120160102844239, 0.00408019208908081, 0.004101344108581543, 0.004092160224914551, 0.004082560062408447, 0.004113247871398926, 0.004078400135040283, 0.004102143764495849, 0.004091487884521484, 0.004096447944641113, 0.004084991931915283, 0.004087679862976075, 0.004086624145507813, 0.004139008045196534, 0.004318719863891602, 0.004317535877227783, 0.004339871883392334, 0.004159488201141357, 0.00417299222946167, 0.0040950078964233395, 0.00408348798751831, 0.004110335826873779, 0.004081759929656982, 0.0040952000617980955, 0.004096255779266357, 0.004092351913452148, 0.00414518404006958, 0.004100063800811768, 0.0040878081321716305, 0.004114431858062744, 0.00414515209197998, 0.0041164479255676265, 0.004085792064666748, 0.004130815982818604, 0.004138847827911377, 0.00413097620010376, 0.00413100814819336, 0.0040869760513305665, 0.004122528076171875, 0.004437952041625976, 0.004353087902069092, 0.0043435201644897465, 0.004304575920104981, 0.004304704189300537, 0.004305408000946045, 0.004362495899200439, 0.004291456222534179, 0.004268928050994873, 0.004216127872467041, 0.004179711818695068, 0.004209887981414795, 0.004148960113525391, 0.004130815982818604, 0.00417196798324585, 0.0038428800106048585, 0.004118336200714111, 0.004115839958190918, 0.0041576638221740726, 0.004125152111053467, 0.004124671936035156, 0.004116127967834473, 0.004106944084167481, 0.004124320030212402, 0.004120128154754639, 0.004198847770690918, 0.004175871849060059, 0.004197760105133057, 0.004225152015686035, 0.00419481611251831, 0.0041799678802490236, 0.004177919864654541, 0.004132095813751221, 0.004117248058319092, 0.004259840011596679, 0.00413040018081665, 0.00415990400314331, 0.004141056060791016, 0.004143231868743896, 0.004144256114959717, 0.004121344089508056, 0.004098048210144043, 0.0041205759048461915, 0.004089856147766113, 0.004112544059753418, 0.004122655868530273, 0.004091775894165039, 0.004104127883911133, 0.004108287811279297, 0.004092160224914551, 0.0041262078285217284, 0.00409830379486084, 0.004291840076446533, 0.004254655838012695, 0.004192063808441162, 0.004212800025939941, 0.004199903964996338, 0.004200960159301758, 0.004206495761871338, 0.004173888206481934, 0.00416153621673584, 0.004142111778259278, 0.00414188814163208, 0.004144927978515625, 0.004131199836730957, 0.00414243221282959, 0.004124959945678711, 0.0041003842353820805, 0.004106336116790772, 0.004226336002349854, 0.004188704013824463, 0.004112576007843018, 0.0041018881797790525, 0.004100351810455322, 0.004118527889251709, 0.004235263824462891, 0.004116479873657227, 0.004108287811279297, 0.003875679969787598, 0.004173120021820068, 0.004155871868133545, 0.0042147841453552244, 0.004268127918243408, 0.004284192085266114, 0.004320960044860839, 0.0042659521102905275, 0.004223552227020264, 0.004196512222290039, 0.004146048069000244, 0.0041595840454101566, 0.004123424053192139, 0.004132863998413086, 0.004138239860534668, 0.004133632183074951, 0.0041142721176147465, 0.00413097620010376, 0.004128767967224121, 0.00413862419128418, 0.004106624126434326, 0.0041156158447265626, 0.004367199897766114, 0.004105279922485352, 0.0041127362251281736, 0.004126880168914795, 0.004089503765106201, 0.004083775997161866, 0.004116479873657227, 0.004208799839019776, 0.0041682558059692384, 0.0041472001075744626, 0.004321407794952392, 0.004147071838378907, 0.004144480228424072, 0.004344480037689209, 0.004110335826873779, 0.004148255825042724, 0.004113152027130127, 0.004107903957366943, 0.004201056003570557, 0.004136832237243653, 0.004057216167449951, 0.004081247806549072, 0.0040759358406066894, 0.00408732795715332, 0.004069791793823242, 0.004101759910583496, 0.004056735992431641, 0.0040718398094177245, 0.0041025919914245606, 0.004119743824005127, 0.004091839790344238, 0.004094783782958984, 0.004103551864624024, 0.004092000007629395, 0.004094304084777832, 0.004089695930480957, 0.004104544162750244, 0.004065279960632324, 0.00409116792678833, 0.004084512233734131, 0.004284031867980957, 0.003950592041015625, 0.0041892480850219726, 0.004222943782806397, 0.0041420478820800785, 0.004136703968048096, 0.004102208137512207, 0.004101664066314697, 0.004102143764495849, 0.0041084480285644535, 0.004119040012359619, 0.004087456226348877, 0.004088160037994385, 0.004099071979522705, 0.004248703956604004, 0.004124544143676758, 0.004105311870574951, 0.00412553596496582, 0.004069471836090088, 0.0040999999046325685, 0.004092127799987793, 0.004409215927124024, 0.004874527931213379, 0.004099711894989013, 0.004092063903808594, 0.0041307201385498045, 0.004134016036987305, 0.004282303810119629, 0.004139808177947998, 0.004104351997375489, 0.004122623920440674, 0.004125728130340576, 0.004133855819702149, 0.004098048210144043, 0.004110335826873779, 0.004159264087677002, 0.004102464199066162, 0.00410591983795166, 0.004092351913452148, 0.004096864223480224, 0.004125631809234619, 0.004126719951629639, 0.004118112087249756, 0.004112800121307373, 0.004130815982818604, 0.004114431858062744, 0.004132863998413086, 0.004111839771270752, 0.004120351791381836, 0.004117055892944336, 0.0041166720390319825, 0.004146944046020508, 0.004298336029052734, 0.004135583877563477, 0.004173823833465576, 0.004108287811279297, 0.004124671936035156, 0.004108320236206055, 0.0041389760971069335, 0.004142335891723633, 0.004133632183074951, 0.0041281280517578125, 0.004135744094848633, 0.0041142401695251464, 0.003832832098007202, 0.004188320159912109, 0.00421017599105835, 0.004222815990447998, 0.004254079818725586, 0.004181375980377197, 0.004155424118041992, 0.004188223838806153, 0.0041456961631774905, 0.004152480125427246, 0.004115295886993408, 0.004125919818878174, 0.004126815795898437, 0.004135168075561523, 0.004157887935638428, 0.004244927883148193, 0.004264512062072754, 0.004333568096160889, 0.0043151359558105465, 0.004323008060455322, 0.004329887866973877, 0.004563936233520508, 0.004338143825531006, 0.004294271945953369, 0.004241375923156738, 0.004248064041137695, 0.00418617582321167, 0.004197023868560791, 0.004150944232940674, 0.004149151802062988, 0.0041250882148742675, 0.004131648063659668, 0.004122752189636231, 0.0041151041984558104, 0.004145055770874023, 0.004138720035552978, 0.004106719970703125, 0.004110496044158935, 0.0040950078964233395, 0.004111167907714844, 0.004095136165618896, 0.0043078079223632815, 0.004131135940551758, 0.004132544040679932, 0.004114336013793946, 0.004112544059753418, 0.004087744235992431, 0.004163392066955566, 0.004124927997589112, 0.004126880168914795, 0.004115359783172607, 0.004103040218353272, 0.004105696201324463, 0.005145120143890381, 0.004163584232330322, 0.0041288318634033205, 0.004100287914276123, 0.004140799999237061, 0.0040878081321716305, 0.004104191780090332, 0.004098207950592041, 0.0040991039276123045, 0.004165503978729248, 0.00382694411277771, 0.004089920043945312, 0.004123680114746094, 0.004117472171783447, 0.0041316161155700686, 0.004119616031646728, 0.0040969281196594235, 0.00410422420501709, 0.004100096225738525, 0.004095808029174804, 0.004128960132598877, 0.004100096225738525, 0.004111455917358398, 0.004113088130950928, 0.0041019201278686525, 0.00412656021118164, 0.004141119956970215, 0.004100639820098877, 0.004115712165832519, 0.00412281608581543, 0.004126527786254883, 0.00414131212234497, 0.004108575820922852, 0.004110559940338134, 0.004145088195800781, 0.004149151802062988, 0.004120895862579346, 0.0041183681488037105, 0.004130368232727051, 0.004174272060394287, 0.00413593578338623, 0.004123648166656494, 0.004346975803375244, 0.00411078405380249, 0.004140672206878662, 0.004171807765960694, 0.004242144107818604, 0.00416755199432373, 0.004154880046844483, 0.004172031879425049, 0.004186207771301269, 0.004175936222076416, 0.0042804799079895015, 0.004163743972778321, 0.0041428799629211425, 0.004167903900146484, 0.004153183937072754, 0.004206079959869385, 0.004108928203582764, 0.00409935998916626, 0.004129536151885986, 0.004110496044158935, 0.004094048023223877, 0.0040974078178405765, 0.0041077442169189455, 0.0041316161155700686, 0.004104095935821533, 0.004088352203369141, 0.004108128070831299, 0.004089663982391358, 0.004160704135894776, 0.00409446382522583, 0.004310880184173584, 0.0038383998870849608, 0.004098048210144043, 0.004093120098114014, 0.004109119892120362, 0.004433216094970703, 0.004141695976257324, 0.004108352184295654, 0.004231167793273926, 0.004148608207702637, 0.004295104026794434, 0.004317376136779785, 0.0040958719253540036, 0.004107999801635742, 0.004131231784820557, 0.004118688106536865, 0.004112224102020264, 0.004116159915924072, 0.004155712127685547, 0.00410211181640625, 0.0042575039863586425, 0.0041454720497131345, 0.004155327796936035, 0.004142399787902832, 0.004129536151885986, 0.004110047817230225, 0.004127007961273193, 0.004120448112487793, 0.004308351993560791, 0.004148255825042724, 0.0041857919692993165, 0.0042557759284973145, 0.004208640098571777, 0.0042631678581237795, 0.004262656211853027, 0.004241759777069092, 0.0042041921615600586, 0.004177696228027343, 0.00416380786895752, 0.004253695964813233, 0.004114655971527099, 0.004112031936645508, 0.004141183853149414, 0.004122623920440674, 0.004146783828735351, 0.004129312038421631, 0.004128320217132568, 0.00412608003616333, 0.004120672225952148, 0.004135776042938233, 0.004165311813354492, 0.004120895862579346, 0.004132863998413086, 0.004370175838470459, 0.004188000202178955, 0.004247968196868897, 0.004235136032104492, 0.004243743896484375, 0.004280159950256348, 0.004233215808868408, 0.004209792137145996, 0.0042007360458374025, 0.004188767910003662, 0.00415558385848999, 0.0038482561111450194, 0.004120831966400147, 0.004111519813537598, 0.004137375831604004, 0.00412716817855835, 0.004162943840026855, 0.004124576091766357, 0.004117216110229492, 0.004089375972747803, 0.004119008064270019, 0.004126143932342529, 0.004125408172607422, 0.004099167823791504, 0.00409881591796875, 0.0040982398986816404, 0.004109888076782226, 0.004167520046234131, 0.004080031871795654, 0.004153632164001465, 0.004089056015014648, 0.004089888095855713, 0.0042644162178039555, 0.004116479873657227, 0.004140096187591553, 0.004109600067138672, 0.004093632221221923, 0.004130847930908203, 0.004113376140594482, 0.004083903789520263, 0.004071680068969727, 0.004104735851287841, 0.004136832237243653, 0.004145376205444336, 0.004102047920227051, 0.00409600019454956, 0.004098048210144043, 0.004091904163360595, 0.004092063903808594, 0.004107872009277344, 0.004112639904022217, 0.004083136081695557, 0.004209375858306884, 0.00417571210861206, 0.004136640071868897, 0.004116799831390381, 0.004118239879608155, 0.0041370558738708495, 0.004118720054626465, 0.004138688087463379, 0.004139328002929687, 0.004112383842468262, 0.004194464206695557, 0.004402112007141113, 0.004144256114959717, 0.004138463973999023, 0.004151648044586182, 0.004126688003540039, 0.004124671936035156, 0.004171008110046386, 0.004135680198669433, 0.004271872043609619, 0.004134304046630859, 0.0040941438674926755, 0.0038338561058044434, 0.0041363520622253415, 0.004102143764495849, 0.004108320236206055, 0.004122591972351074, 0.004118271827697754, 0.004108575820922852, 0.0041454720497131345, 0.004094207763671875, 0.004122591972351074, 0.004101151943206787, 0.0041418237686157225, 0.004100096225738525, 0.004107647895812988, 0.004109024047851562, 0.004141215801239014, 0.0041363840103149415, 0.0041539201736450196, 0.004106239795684814, 0.004124000072479248, 0.004151968002319336, 0.0041160001754760745, 0.0041108160018920896, 0.004105535984039307, 0.004093791961669922, 0.004119391918182373, 0.004091584205627442, 0.004095839977264404, 0.00410262393951416, 0.004093952178955078, 0.004176928043365479, 0.00409494400024414, 0.0041389760971069335, 0.004157472133636475, 0.004302847862243653, 0.004143167972564697, 0.004170015811920166, 0.004148896217346191, 0.004141215801239014, 0.004128608226776123, 0.004108287811279297, 0.004118495941162109, 0.004100128173828125, 0.004141056060791016, 0.004162975788116455, 0.0041027522087097165, 0.004120448112487793, 0.004147327899932861, 0.00409600019454956, 0.004105696201324463, 0.004101791858673096, 0.004108640193939209, 0.004110879898071289, 0.004104191780090332, 0.004087200164794922, 0.004114560127258301, 0.00406883192062378, 0.004103551864624024, 0.004079232215881348, 0.004080639839172363, 0.004094880104064941, 0.004083295822143554, 0.004057600021362305, 0.003860192060470581, 0.004102431774139405, 0.004116223812103271, 0.004092095851898193, 0.004085375785827637, 0.00411075210571289, 0.004114463806152344, 0.004108287811279297, 0.004183040142059326, 0.004146175861358643, 0.004112383842468262, 0.0040993280410766605, 0.004092671871185303, 0.004116576194763183, 0.004121856212615967, 0.004106368064880371, 0.0041429119110107425, 0.004102880001068115, 0.004100096225738525, 0.0040859198570251465, 0.004085728168487549, 0.004169600009918213, 0.004136256217956543, 0.004091775894165039, 0.004087615966796875, 0.004187039852142334, 0.0041207680702209475, 0.004110239982604981, 0.0040754880905151366, 0.004378464221954346, 0.004136960029602051, 0.004130047798156738, 0.004124991893768311, 0.004125440120697021, 0.004102015972137451, 0.004095680236816406, 0.004128223896026612, 0.004105055809020996, 0.004227071762084961, 0.004142240047454834, 0.00412278413772583, 0.004103007793426513, 0.004144447803497315, 0.004215328216552734, 0.004159327983856201, 0.00412611198425293, 0.004102911949157715, 0.0040999999046325685, 0.004105855941772461, 0.004190400123596192, 0.004186399936676025, 0.004124095916748047, 0.004328000068664551, 0.004112383842468262, 0.004118527889251709, 0.004271903991699219, 0.00411414384841919, 0.004159999847412109, 0.004136415958404541, 0.0041468157768249515, 0.004130815982818604, 0.004105408191680909, 0.0041324481964111325, 0.0038645761013031007, 0.004141056060791016, 0.004125696182250976, 0.004114431858062744, 0.004152416229248047, 0.004695168018341064, 0.004264736175537109, 0.004345856189727783, 0.004581151962280273, 0.004425951957702637, 0.0049500160217285155, 0.004597951889038086, 0.004255871772766113, 0.00421446418762207, 0.004200448036193847, 0.00420627212524414, 0.0042022719383239746, 0.004176544189453125, 0.00414412784576416, 0.0041082558631896976, 0.00411568021774292, 0.0041123518943786625, 0.004144864082336426, 0.004360191822052002, 0.004139008045196534, 0.004205728054046631, 0.004108511924743653, 0.004139328002929687, 0.0041045122146606446, 0.004163584232330322, 0.0041205759048461915, 0.004130815982818604, 0.004103487968444825, 0.004218624114990234, 0.004101151943206787, 0.0041082558631896976, 0.004114208221435547, 0.00412278413772583, 0.004124671936035156, 0.004141056060791016, 0.004097983837127685, 0.004123871803283692, 0.004088863849639892, 0.004116288185119629, 0.004100096225738525, 0.004365791797637939, 0.004112607955932617, 0.0041188478469848635, 0.004106207847595215, 0.004385824203491211, 0.00414412784576416, 0.00414025592803955, 0.004092576026916504, 0.004139423847198486, 0.004104159832000733, 0.004158751964569092, 0.004103775978088379, 0.004213632106781006, 0.004108287811279297, 0.004115903854370117, 0.004106175899505615, 0.004104991912841797, 0.004101984024047852, 0.0038949759006500244, 0.004136032104492188, 0.004111775875091553, 0.004140768051147461, 0.004141695976257324, 0.00410211181640625, 0.0041381120681762695, 0.004131743907928467, 0.004093952178955078, 0.004080992221832275, 0.0040804800987243655, 0.004133887767791748, 0.004108640193939209, 0.004085375785827637, 0.004096223831176758, 0.00410313606262207, 0.004107935905456543, 0.004107647895812988, 0.004122399806976319, 0.0041480641365051266, 0.0041123518943786625, 0.004132895946502685, 0.004098048210144043, 0.004134528160095215, 0.004116543769836426, 0.004128575801849365, 0.004097824096679688, 0.0041370558738708495, 0.004090496063232422, 0.004101471900939941, 0.004081791877746582, 0.004098944187164307, 0.0041036481857299805, 0.0041576318740844725, 0.004093952178955078, 0.004105567932128906, 0.00431990385055542, 0.0041472001075744626, 0.004184031963348388, 0.00413699197769165, 0.004132863998413086, 0.004208640098571777, 0.004155551910400391, 0.004194143772125244, 0.00414515209197998, 0.004157440185546875, 0.004164608001708985, 0.004205567836761475, 0.0041472001075744626, 0.004134912014007569, 0.004132959842681885, 0.004147103786468506, 0.004118336200714111, 0.004111648082733154, 0.004110527992248535, 0.0041868481636047365, 0.00411356782913208, 0.004098944187164307, 0.004103295803070069, 0.0041108160018920896, 0.004122176170349121, 0.004136799812316894, 0.004123263835906983]",tokens/s,240.78010461680162,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,825.344,515.76832,0.0,113.246208,113.023488,s,1,9.4605458984375,9.4605458984375,0.0,9.4605458984375,9.4605458984375,9.4605458984375,9.4605458984375,[9.4605458984375],,kWh,1.1916416895833967e-05,1.3066823956649972e-06,3.7216696440016406e-06,1.6944768935500605e-05,,MB,1304.121344,631.11168,0.0,213.909504,180.29568,s,38,0.25368854427337645,0.0066760143229835905,7.173184282647193e-05,0.00666155219078064,0.006783660888671874,0.00680408148765564,0.006854984927177429,"[0.006741119861602783, 0.006837728023529052, 0.006690847873687744, 0.0066275839805603025, 0.006865119934082032, 0.006616511821746826, 0.006642303943634033, 0.006595871925354004, 0.006592383861541748, 0.006660672187805175, 0.006624320030212402, 0.0066730880737304685, 0.006571904182434082, 0.006661568164825439, 0.006593215942382813, 0.006679488182067871, 0.0066089601516723636, 0.00678652811050415, 0.00666153621673584, 0.006632448196411133, 0.006659711837768555, 0.006589824199676513, 0.006707168102264404, 0.006639039993286132, 0.006676415920257569, 0.006782432079315185, 0.006643072128295898, 0.006731455802917481, 0.006739136219024658, 0.0067981438636779785, 0.006671103954315185, 0.006738016128540039, 0.006589888095855713, 0.006688831806182861, 0.006664447784423828, 0.006761375904083252, 0.006629119873046875, 0.006616159915924073]",tokens/s,38346.232889085615,kWh,1.926383165924151e-07,2.1244459178636713e-08,1.0950595485808664e-07,3.2338873062913847e-07,tokens/kWh,791616948.129155,MB,1317.84704,643.694592,0.0,226.492416,180.29824,s,38,9.833551315307616,0.2587776661923057,0.0026704974102336678,0.25807623291015624,0.2620928527832031,0.26296537017822263,0.2674549118041992,"[0.2690152282714844, 0.2647981567382813, 0.258569091796875, 0.25635028076171873, 0.2567657470703125, 0.2624165344238281, 0.2561003112792969, 0.2561700439453125, 0.2566266174316406, 0.2568833923339844, 0.2573973388671875, 0.25804296875, 0.2567245788574219, 0.2581094970703125, 0.2596156311035156, 0.2588731079101563, 0.25854656982421875, 0.26059884643554687, 0.2604513244628906, 0.2558199005126953, 0.2626419372558594, 0.2580408630371094, 0.2567113037109375, 0.25819155883789063, 0.26135006713867187, 0.2619541320800781, 0.2566661682128906, 0.2606456298828125, 0.2580175170898438, 0.2582845153808594, 0.2586575927734375, 0.25801068115234377, 0.260249755859375, 0.2582100524902344, 0.2567503662109375, 0.2576535034179688, 0.25705062866210937, 0.2565898742675781]",tokens/s,243.4522303527645,kWh,7.522293734175191e-06,8.295698474445673e-07,3.0382597212472537e-06,1.139012330286701e-05,tokens/kWh,5531107.813744409,,s,2394,9.814899709939963,0.0040997910233667315,0.00012561016895635033,0.004069648027420044,0.004196352005004883,0.004278148913383483,0.0046196376419067415,"[0.003960832118988037, 0.00427619218826294, 0.00424348783493042, 0.004391039848327637, 0.004257120132446289, 0.004219423770904541, 0.004195648193359375, 0.004813504219055176, 0.00417142391204834, 0.004174272060394287, 0.004192160129547119, 0.004287775993347168, 0.004309088230133057, 0.0042397122383117675, 0.004249887943267822, 0.004200032234191894, 0.004295072078704834, 0.0042016000747680665, 0.004189375877380371, 0.004312352180480957, 0.004611551761627197, 0.004139711856842041, 0.004241375923156738, 0.004224448204040527, 0.004332640171051025, 0.004164671897888184, 0.004137663841247558, 0.004130815982818604, 0.004161056041717529, 0.004254176139831543, 0.004265408039093018, 0.0044224319458007815, 0.004283840179443359, 0.0041496000289916995, 0.004179232120513916, 0.004147935867309571, 0.004146399974822998, 0.004182816028594971, 0.004177919864654541, 0.004143263816833496, 0.004163424015045166, 0.0044011521339416505, 0.004534272193908692, 0.004145023822784424, 0.004185855865478516, 0.004182240009307861, 0.004219327926635742, 0.00425164794921875, 0.004226687908172607, 0.004306687831878662, 0.004348256111145019, 0.004328896045684814, 0.004291423797607422, 0.004301919937133789, 0.004293504238128662, 0.004345600128173828, 0.004349408149719239, 0.004254047870635986, 0.004241600036621094, 0.004222976207733154, 0.00420582389831543, 0.0047926721572875974, 0.004229695796966553, 0.004380671977996826, 0.00466323184967041, 0.004435935974121094, 0.004282623767852783, 0.004247392177581787, 0.004284224033355713, 0.004247647762298584, 0.004272096157073975, 0.0042329921722412105, 0.004175392150878907, 0.004238048076629639, 0.0053433279991149905, 0.0048635520935058595, 0.0042863678932189946, 0.00419484806060791, 0.00420249605178833, 0.004157440185546875, 0.004136928081512451, 0.004357728004455566, 0.004206048011779785, 0.004200607776641846, 0.00416870403289795, 0.004175487995147705, 0.004174015998840332, 0.004192255973815918, 0.004163584232330322, 0.004196127891540527, 0.004118752002716064, 0.004212480068206787, 0.004164095878601074, 0.004460288047790527, 0.00415334415435791, 0.004122623920440674, 0.004130591869354248, 0.0041270718574523925, 0.004339871883392334, 0.004156415939331055, 0.00418671989440918, 0.004093247890472412, 0.004088640213012695, 0.004102143764495849, 0.004073472023010254, 0.004038559913635254, 0.00406332778930664, 0.004063231945037842, 0.004040703773498535, 0.004048480033874512, 0.0040819840431213375, 0.0040367040634155275, 0.0040776638984680175, 0.0041320638656616214, 0.00416425609588623, 0.004061151981353759, 0.004074783802032471, 0.004063263893127441, 0.00405295991897583, 0.004053120136260986, 0.004039584159851074, 0.004043744087219238, 0.004039455890655517, 0.0040365118980407715, 0.0040425281524658204, 0.004052671909332275, 0.0038135359287261965, 0.0040794239044189455, 0.004063263893127441, 0.004075520038604737, 0.004063231945037842, 0.0040629119873046874, 0.004075263977050781, 0.0040962882041931156, 0.0041003842353820805, 0.0040878081321716305, 0.004068863868713379, 0.004071936130523681, 0.004111423969268799, 0.0041782078742980955, 0.0040698561668396, 0.004111999988555908, 0.004106272220611573, 0.004094431877136231, 0.004110400199890137, 0.004169727802276611, 0.004196063995361328, 0.004193600177764893, 0.0041791038513183595, 0.004174752235412598, 0.004176799774169922, 0.004235263824462891, 0.004171520233154297, 0.004165855884552002, 0.004069407939910888, 0.004102143764495849, 0.004077055931091309, 0.004036992073059082, 0.004040544033050537, 0.004093440055847168, 0.004035359859466553, 0.004029600143432617, 0.0041439681053161625, 0.0041324481964111325, 0.004040575981140136, 0.0041272640228271484, 0.00403769588470459, 0.00423964786529541, 0.004082335948944092, 0.004063231945037842, 0.004102143764495849, 0.004061183929443359, 0.004173151969909668, 0.004096960067749024, 0.004077280044555664, 0.004072735786437989, 0.004055583953857422, 0.0040655040740966795, 0.004063199996948242, 0.004112383842468262, 0.0040586237907409665, 0.004162047863006592, 0.004101823806762695, 0.004090176105499267, 0.004056128025054931, 0.004076479911804199, 0.004061183929443359, 0.004085663795471191, 0.004061279773712158, 0.0037794559001922607, 0.004136960029602051, 0.00407260799407959, 0.0040620799064636235, 0.004063199996948242, 0.004052000045776367, 0.00404585599899292, 0.004118271827697754, 0.004069664001464844, 0.004071328163146972, 0.004040768146514892, 0.004050271987915039, 0.004085375785827637, 0.004058080196380615, 0.004093952178955078, 0.004126719951629639, 0.004062975883483887, 0.004147456169128418, 0.004034560203552246, 0.004085760116577148, 0.004145215988159179, 0.004108543872833252, 0.004038047790527344, 0.004065567970275879, 0.004047071933746338, 0.004056863784790039, 0.004042751789093017, 0.00407750415802002, 0.004163648128509521, 0.004027872085571289, 0.004028480052947998, 0.0040302081108093265, 0.0040516481399536135, 0.004024127960205078, 0.004016160011291504, 0.004024511814117432, 0.004024447917938233, 0.004064320087432861, 0.0040581121444702144, 0.00405676794052124, 0.00409929609298706, 0.004043680191040039, 0.004143104076385498, 0.004029920101165771, 0.004054592132568359, 0.004068319797515869, 0.004077600002288818, 0.004257823944091797, 0.004055295944213867, 0.004013760089874268, 0.004050047874450684, 0.004022240161895752, 0.004039743900299072, 0.004032415866851807, 0.004055103778839111, 0.004140704154968262, 0.004053215980529785, 0.004068863868713379, 0.004024831771850586, 0.004040895938873291, 0.004034463882446289, 0.004018080234527588, 0.004040895938873291, 0.003761023998260498, 0.004064608097076416, 0.004048992156982422, 0.00408019208908081, 0.0041164479255676265, 0.004063488006591797, 0.004041759967803955, 0.004053760051727295, 0.004052864074707031, 0.0040590081214904785, 0.004050240039825439, 0.004055776119232178, 0.004037888050079346, 0.004051487922668457, 0.004037055969238281, 0.0040430397987365726, 0.0040672321319580075, 0.004048480033874512, 0.004118752002716064, 0.00406063985824585, 0.004176415920257568, 0.0040711679458618165, 0.004055200099945068, 0.004048992156982422, 0.004046847820281982, 0.0040274238586425785, 0.004043744087219238, 0.0040282559394836425, 0.004050335884094238, 0.004089856147766113, 0.0040433921813964845, 0.0041001920700073245, 0.004034592151641846, 0.004029568195343018, 0.004028416156768798, 0.004029088020324707, 0.004028895854949951, 0.004036352157592773, 0.0040570878982543945, 0.004198463916778564, 0.004111839771270752, 0.004061344146728515, 0.004095808029174804, 0.0043729920387268065, 0.004073472023010254, 0.004108287811279297, 0.004063231945037842, 0.004075583934783936, 0.0040672640800476076, 0.004022143840789795, 0.004106368064880371, 0.004028768062591553, 0.004044447898864746, 0.004063488006591797, 0.0040384001731872555, 0.004069471836090088, 0.004165535926818848, 0.004063039779663086, 0.004145311832427979, 0.004089632034301758, 0.0041495041847229, 0.0040570878982543945, 0.004071424007415772, 0.0038140480518341065, 0.004105663776397705, 0.004110911846160889, 0.004082015991210938, 0.004085631847381592, 0.004140704154968262, 0.004178048133850098, 0.0042599039077758786, 0.004190144062042237, 0.004154687881469727, 0.004127423763275146, 0.0040871682167053225, 0.004078207969665527, 0.004062655925750732, 0.004166207790374756, 0.004077631950378418, 0.004087007999420166, 0.004083871841430664, 0.004059072017669678, 0.004067967891693116, 0.004134912014007569, 0.004115456104278564, 0.004076320171356201, 0.004139232158660889, 0.004126719951629639, 0.004149248123168945, 0.004198400020599366, 0.0041470079421997074, 0.004192448139190673, 0.00409600019454956, 0.004081664085388184, 0.004034560203552246, 0.004073344230651856, 0.004091104030609131, 0.004055200099945068, 0.004049376010894775, 0.004089471817016602, 0.00411078405380249, 0.004176095962524414, 0.004200448036193847, 0.004280223846435547, 0.004282464027404785, 0.004217088222503662, 0.0042247681617736815, 0.004150559902191162, 0.004113120079040527, 0.0040982398986816404, 0.004128575801849365, 0.004075520038604737, 0.004052095890045166, 0.004031328201293946, 0.004141088008880615, 0.004062655925750732, 0.00403052806854248, 0.0040594878196716306, 0.00406496000289917, 0.0040432319641113285, 0.004314239978790283, 0.005235231876373291, 0.004392831802368164, 0.005196288108825684, 0.004319200038909912, 0.00406441593170166, 0.0037890880107879638, 0.004078911781311035, 0.004063263893127441, 0.004061567783355713, 0.004052159786224366, 0.0040672321319580075, 0.004052159786224366, 0.004040512084960937, 0.004095903873443604, 0.004062848091125488, 0.004067488193511963, 0.00406108808517456, 0.004048927783966065, 0.004363615989685058, 0.004049856185913086, 0.004073472023010254, 0.004064256191253662, 0.004047743797302246, 0.00402239990234375, 0.004048895835876465, 0.0040439038276672366, 0.004035295963287354, 0.004145311832427979, 0.004020287990570069, 0.004084959983825683, 0.004032447814941407, 0.004055840015411377, 0.004215968132019043, 0.004060319900512695, 0.004042240142822265, 0.004046847820281982, 0.00410643196105957, 0.004046847820281982, 0.004029600143432617, 0.004031263828277588, 0.004099552154541016, 0.004037407875061036, 0.004032320022583008, 0.004046847820281982, 0.004024191856384278, 0.004040832042694092, 0.004032512187957763, 0.0040382399559020996, 0.004047232151031494, 0.0040325760841369625, 0.004014048099517822, 0.004040703773498535, 0.004034048080444336, 0.004056704044342041, 0.004127840042114258, 0.004032288074493408, 0.004038080215454102, 0.004205120086669922, 0.004067327976226807, 0.00404307222366333, 0.004033311843872071, 0.004031583786010742, 0.004041952133178711, 0.004028543949127197, 0.004036064147949219, 0.004041728019714356, 0.004011744022369384, 0.004084000110626221, 0.003841887950897217, 0.0040406079292297364, 0.0040351681709289555, 0.0040505919456481935, 0.004054463863372802, 0.004049471855163574, 0.004065152168273926, 0.004058432102203369, 0.004051328182220459, 0.004048768043518066, 0.004047391891479492, 0.00409555196762085, 0.004058944225311279, 0.004059199810028076, 0.004125152111053467, 0.004073599815368653, 0.004062848091125488, 0.004034687995910644, 0.004030720233917236, 0.004031551837921143, 0.004053664207458496, 0.004161471843719482, 0.004082015991210938, 0.004041952133178711, 0.004055007934570313, 0.004098656177520752, 0.004036928176879883, 0.004038559913635254, 0.0040503678321838375, 0.004022304058074951, 0.0040330557823181155, 0.0042206401824951174, 0.004031968116760254, 0.004036640167236328, 0.004037343978881836, 0.00404691219329834, 0.004052768230438233, 0.004055263996124267, 0.004110335826873779, 0.004041024208068848, 0.00402400016784668, 0.004173823833465576, 0.004069248199462891, 0.004050848007202149, 0.004044000148773193, 0.004039775848388672, 0.004034463882446289, 0.004060383796691895, 0.004018752098083496, 0.004044127941131592, 0.0040414400100708, 0.004051104068756104, 0.00411353588104248, 0.004053887844085694, 0.0040713601112365725, 0.004054783821105957, 0.0040512638092041016, 0.004061183929443359, 0.004052000045776367, 0.004158432006835937, 0.004090976238250732, 0.004064159870147705, 0.00404307222366333, 0.0037949440479278563, 0.004111839771270752, 0.004048543930053711, 0.00405401611328125, 0.004042111873626709, 0.004044672012329101, 0.004020544052124023, 0.004056543827056885, 0.004042655944824219, 0.004061247825622559, 0.004053599834442138, 0.004066624164581299, 0.004023136138916016, 0.004028575897216797, 0.004124639987945556, 0.00415334415435791, 0.004093823909759521, 0.004038784027099609, 0.004038656234741211, 0.0040503997802734375, 0.0040432319641113285, 0.00408787202835083, 0.0040423359870910645, 0.0040308799743652346, 0.004036191940307617, 0.004026303768157959, 0.004054975986480713, 0.004020256042480469, 0.004079936027526855, 0.0040163202285766605, 0.00414515209197998, 0.004032512187957763, 0.0040234880447387695, 0.004062240123748779, 0.004019999980926513, 0.004026400089263916, 0.004382080078125, 0.004081632137298584, 0.004074304103851318, 0.004091711997985839, 0.004079904079437256, 0.004120287895202637, 0.004074912071228027, 0.0041942081451416015, 0.004060863971710205, 0.004060160160064697, 0.004028575897216797, 0.0040692157745361324, 0.0040202240943908694, 0.004037888050079346, 0.004071712017059326, 0.004049088001251221, 0.004054783821105957, 0.00406166410446167, 0.004054272174835205, 0.004194784164428711, 0.004045343875885009, 0.004079232215881348, 0.004110559940338134, 0.004069344043731689, 0.004081664085388184, 0.004069375991821289, 0.004095776081085205, 0.003821216106414795, 0.0040934720039367675, 0.004077375888824463, 0.004104576110839844, 0.004058944225311279, 0.0040776958465576175, 0.0040800638198852535, 0.004126368045806884, 0.004108384132385254, 0.004078080177307129, 0.004116479873657227, 0.004093567848205567, 0.004114816188812256, 0.004104063987731933, 0.004093920230865479, 0.004104351997375489, 0.004074912071228027, 0.004142816066741943, 0.00410265588760376, 0.004118080139160156, 0.004078495979309082, 0.00404259204864502, 0.0040653438568115235, 0.004075520038604737, 0.004040319919586182, 0.004079999923706054, 0.004019968032836914, 0.004053247928619385, 0.0041840639114379885, 0.004106175899505615, 0.004097119808197021, 0.0040455999374389645, 0.004051136016845703, 0.004146495819091797, 0.004027071952819824, 0.004042751789093017, 0.004019680023193359, 0.0041147518157958985, 0.004046592235565185, 0.004063712120056152, 0.004027647972106934, 0.004039391994476318, 0.004044832229614258, 0.004130815982818604, 0.004095168113708496, 0.00403769588470459, 0.004021984100341797, 0.004036384105682373, 0.004034815788269043, 0.0040570878982543945, 0.004040287971496582, 0.004036928176879883, 0.00403056001663208, 0.004050432205200195, 0.004178336143493652, 0.004101664066314697, 0.004046720027923584, 0.004078271865844726, 0.0040447998046875, 0.004021312236785889, 0.004066239833831787, 0.004069568157196045, 0.004024127960205078, 0.003909631967544556, 0.004161151885986328, 0.004089888095855713, 0.004122432231903076, 0.004057631969451904, 0.004073472023010254, 0.004150527954101563, 0.004076128005981445, 0.00405724811553955, 0.004075520038604737, 0.004611616134643555, 0.00408019208908081, 0.004048799991607666, 0.004054944038391114, 0.004045919895172119, 0.00407209587097168, 0.004059775829315186, 0.004099264144897461, 0.004051487922668457, 0.004110335826873779, 0.004028416156768798, 0.004095744132995605, 0.0040200319290161135, 0.004202943801879883, 0.004083712100982666, 0.004048895835876465, 0.0041205759048461915, 0.004184127807617188, 0.0040430078506469725, 0.004075200080871582, 0.004029888153076172, 0.004048511981964111, 0.004033088207244873, 0.0041068801879882814, 0.004048543930053711, 0.0040614080429077146, 0.004023392200469971, 0.004059328079223633, 0.004045407772064209, 0.0040696320533752444, 0.004032351970672607, 0.004081056118011474, 0.004039167881011963, 0.004046175956726074, 0.00416534423828125, 0.004063424110412598, 0.0040982398986816404, 0.004074048042297363, 0.004015200138092041, 0.004053919792175293, 0.004062880039215088, 0.004036320209503174, 0.004133503913879394, 0.004038527965545654, 0.004040832042694092, 0.004054175853729248, 0.004066048145294189, 0.004042367935180664, 0.004051360130310058, 0.0040694398880004885, 0.004118783950805664, 0.004065023899078369, 0.004041823863983154, 0.0038662400245666503, 0.004111264228820801, 0.004064127922058105, 0.004092127799987793, 0.004077375888824463, 0.004075615882873535, 0.004088479995727539, 0.004056159973144531, 0.00406822395324707, 0.004105408191680909, 0.00403766393661499, 0.004038496017456055, 0.004163584232330322, 0.004056960105895996, 0.004038784027099609, 0.004055039882659912, 0.004298431873321533, 0.0042417278289794925, 0.004105408191680909, 0.004049856185913086, 0.004053919792175293, 0.004039872169494629, 0.004112160205841065, 0.004032671928405762, 0.004050975799560547, 0.004052800178527832, 0.004019743919372558, 0.004067808151245117, 0.0040154561996459965, 0.004036960124969483, 0.004102464199066162, 0.004028607845306396, 0.004054848194122314, 0.004146783828735351, 0.004060704231262207, 0.004218912124633789, 0.005079360008239746, 0.004061247825622559, 0.004146783828735351, 0.004086912155151367, 0.00406006383895874, 0.004043615818023681, 0.004048895835876465, 0.004054944038391114, 0.004035776138305664, 0.004049824237823486, 0.004038656234741211, 0.004057055950164795, 0.004160768032073975, 0.004133152008056641, 0.004028160095214844, 0.004030591964721679, 0.00403219223022461, 0.004035520076751709, 0.004065279960632324, 0.004031744003295899, 0.004039072036743164, 0.0040451521873474125, 0.004032512187957763, 0.004042751789093017, 0.0040507521629333494, 0.004158815860748291, 0.004131680011749268, 0.0037748799324035646, 0.0040655360221862795, 0.0040570878982543945, 0.004037951946258545, 0.004051231861114502, 0.0040544958114624025, 0.004053599834442138, 0.004044479846954345, 0.004036960124969483, 0.004036736011505127, 0.004028895854949951, 0.004095712184906006, 0.004137311935424805, 0.004109983921051026, 0.0040553598403930665, 0.004076863765716553, 0.004058720111846924, 0.004039135932922364, 0.0040432319641113285, 0.004030303955078125, 0.004032512187957763, 0.004032512187957763, 0.0040202240943908694, 0.0040878081321716305, 0.004052768230438233, 0.004096223831176758, 0.004040703773498535, 0.004026368141174317, 0.004025375843048096, 0.0040430397987365726, 0.004040863990783692, 0.0040572800636291505, 0.004036543846130371, 0.004106656074523925, 0.0040570878982543945, 0.004046847820281982, 0.004042240142822265, 0.004055391788482666, 0.004097663879394532, 0.004088064193725586, 0.004038464069366455, 0.004054751873016358, 0.00411078405380249, 0.004045119762420654, 0.004047935962677002, 0.004050879955291748, 0.004038879871368408, 0.004022143840789795, 0.004046815872192383, 0.004072383880615234, 0.004077631950378418, 0.004200191974639893, 0.004176064014434815, 0.004124671936035156, 0.0040869760513305665, 0.004158559799194336, 0.004095680236816406, 0.004128736019134521, 0.004163648128509521, 0.004214111804962158, 0.004087679862976075, 0.004112256050109863, 0.00408460807800293, 0.0038735361099243165, 0.004110335826873779, 0.00407539176940918, 0.00409116792678833, 0.004086624145507813, 0.004093952178955078, 0.004175871849060059, 0.004114431858062744, 0.004107679843902588, 0.004138879776000977, 0.004080160140991211, 0.00406982421875, 0.0040952000617980955, 0.0040596799850463865, 0.004139008045196534, 0.004050943851470947, 0.0040796160697937015, 0.004076672077178955, 0.004070271968841552, 0.004085760116577148, 0.0040878081321716305, 0.004055136203765869, 0.004073376178741455, 0.004093952178955078, 0.004196352005004883, 0.0040895681381225586, 0.004079360008239746, 0.004119071960449219, 0.004063519954681396, 0.004050687789916992, 0.004189472198486328, 0.004049568176269531, 0.004032544136047363, 0.00404204797744751, 0.004079648017883301, 0.004059807777404785, 0.004036608219146728, 0.004087103843688965, 0.004067135810852051, 0.004086431980133057, 0.004083936214447022, 0.004268032073974609, 0.004075424194335937, 0.004048768043518066, 0.004069600105285644, 0.004048480033874512, 0.004040736198425293, 0.004053055763244629, 0.004091872215270996, 0.004042912006378174, 0.004054848194122314, 0.004033152103424072, 0.004429376125335694, 0.004047039985656738, 0.004126976013183594, 0.004071455955505371, 0.004054975986480713, 0.0041244478225708, 0.004042399883270264, 0.0040421438217163085, 0.004195263862609863, 0.004165567874908447, 0.0040917439460754395, 0.003825792074203491, 0.004084832191467285, 0.004084512233734131, 0.004114431858062744, 0.004059135913848877, 0.004074848175048828, 0.004053664207458496, 0.0040447998046875, 0.004097856044769287, 0.0041305599212646486, 0.004059584140777588, 0.004040703773498535, 0.004068672180175781, 0.004035264015197754, 0.0040447998046875, 0.004055039882659912, 0.004109888076782226, 0.00403276777267456, 0.004171103954315186, 0.004073408126831055, 0.004051551818847656, 0.004054975986480713, 0.004039040088653564, 0.004061279773712158, 0.004080575942993164, 0.004207359790802002, 0.004304096221923828, 0.004268832206726074, 0.004313151836395263, 0.004249311923980713, 0.004295104026794434, 0.004196352005004883, 0.004319327831268311, 0.004257760047912598, 0.004256864070892334, 0.004415552139282227, 0.004305280208587646, 0.00426643180847168, 0.004206240177154541, 0.0041454720497131345, 0.004095808029174804, 0.004089471817016602, 0.004084288120269775, 0.0041205759048461915, 0.00405299186706543, 0.0040850558280944825, 0.0040533761978149415, 0.004030144214630127, 0.004055679798126221, 0.004025919914245606, 0.004116928100585938, 0.004140096187591553, 0.004059936046600342, 0.004034719944000244, 0.004081664085388184, 0.0040447998046875, 0.0041512961387634275, 0.004073472023010254, 0.004045087814331055, 0.004058271884918213, 0.0040310401916503905, 0.004026175975799561, 0.004023712158203125, 0.003782655954360962, 0.004055327892303466, 0.004085663795471191, 0.0040663681030273435, 0.0040430078506469725, 0.004150784015655518, 0.004068416118621826, 0.004676896095275879, 0.0040507521629333494, 0.004096608161926269, 0.004067584037780762, 0.00407692813873291, 0.004147808074951172, 0.004116576194763183, 0.004066688060760498, 0.004045375823974609, 0.0040878081321716305, 0.0040795841217041015, 0.00416156816482544, 0.004072959899902344, 0.004081823825836182, 0.00413702392578125, 0.0040381760597229, 0.004155519962310791, 0.004089856147766113, 0.004653696060180664, 0.004083360195159912, 0.004089407920837403, 0.004074272155761719, 0.0041699838638305665, 0.004106112003326416, 0.004169600009918213, 0.004046847820281982, 0.004023871898651123, 0.004112959861755371, 0.0043760638236999515, 0.004286272048950195, 0.004112959861755371, 0.0041402878761291504, 0.0040636157989501955, 0.004108672142028809, 0.004032512187957763, 0.004056735992431641, 0.00410422420501709, 0.004033952236175537, 0.004072288036346436, 0.004044479846954345, 0.004156000137329101, 0.004036064147949219, 0.00404310417175293, 0.004022240161895752, 0.004038591861724853, 0.004118303775787354, 0.004034368038177491, 0.004066976070404052, 0.0040497279167175295, 0.004040800094604492, 0.004019519805908203, 0.004090464115142823, 0.004050303936004638, 0.004127359867095947, 0.004018367767333985, 0.00401907205581665, 0.0038761279582977294, 0.0040085439682006835, 0.004292255878448487, 0.004930016040802002, 0.004070591926574707, 0.004086368083953858, 0.004147424221038819, 0.004472832202911377, 0.004083168029785156, 0.004077568054199219, 0.004084256172180176, 0.004062687873840332, 0.004055583953857422, 0.004032512187957763, 0.004067359924316406, 0.004066751956939697, 0.004052735805511474, 0.004029215812683106, 0.004053088188171386, 0.00408735990524292, 0.00411683177947998, 0.004061183929443359, 0.004091807842254639, 0.004032864093780518, 0.004079232215881348, 0.004034687995910644, 0.004030464172363281, 0.004023776054382324, 0.0040425281524658204, 0.004028416156768798, 0.004031199932098389, 0.00416156816482544, 0.004139296054840088, 0.004079328060150146, 0.004124320030212402, 0.004057184219360351, 0.0040507521629333494, 0.004053440093994141, 0.004084864139556885, 0.004037759780883789, 0.004036191940307617, 0.004040256023406983, 0.004043168067932129, 0.00411027193069458, 0.004043263912200928, 0.0040343041419982914, 0.004054719924926758, 0.004118752002716064, 0.00404259204864502, 0.004128608226776123, 0.004110591888427734, 0.00412278413772583, 0.004116159915924072, 0.0041045122146606446, 0.0042287039756774905, 0.004112800121307373, 0.004171775817871094, 0.0040531520843505855, 0.004046688079833985, 0.004126719951629639, 0.004055039882659912, 0.004128255844116211, 0.004041215896606445, 0.0037601280212402346, 0.004041120052337647, 0.004047039985656738, 0.004040512084960937, 0.0040152640342712405, 0.004030752182006836, 0.004084288120269775, 0.004058911800384522, 0.0040364799499511715, 0.004144959926605225, 0.004040895938873291, 0.004059519767761231, 0.004060256004333496, 0.0040579838752746586, 0.00404585599899292, 0.004037087917327881, 0.004196000099182129, 0.004049215793609619, 0.004043295860290528, 0.004177152156829834, 0.004092735767364502, 0.00408787202835083, 0.004058271884918213, 0.004149856090545655, 0.004057184219360351, 0.004061215877532959, 0.004067327976226807, 0.004068511962890625, 0.004032703876495361, 0.004076064109802246, 0.004153183937072754, 0.004297215938568116, 0.004362016201019287, 0.004376575946807861, 0.0043089919090271, 0.004398335933685302, 0.004398975849151611, 0.004647808074951172, 0.004288479804992675, 0.0042824001312255855, 0.0043745279312133786, 0.004263199806213379, 0.0042605757713317875, 0.004244736194610596, 0.004193024158477783, 0.004154560089111328, 0.004078144073486328, 0.0041064958572387695, 0.004052000045776367, 0.0041294717788696285, 0.004055456161499023, 0.004058335781097412, 0.004066143989562988, 0.0041773438453674315, 0.004081664085388184, 0.0040636157989501955, 0.004048895835876465, 0.004118144035339355, 0.004059135913848877, 0.004049280166625977, 0.00404915189743042, 0.004054783821105957, 0.004089856147766113, 0.0038743999004364014, 0.004057504177093506, 0.004048031806945801, 0.004069983959197998, 0.004036767959594727, 0.004057184219360351, 0.004059135913848877, 0.0040858879089355465, 0.004051008224487305, 0.0040466561317443845, 0.004038015842437744, 0.004090496063232422, 0.004072671890258789, 0.004063039779663086, 0.004166848182678222, 0.004110112190246582, 0.004077216148376465, 0.004122144222259522, 0.0040759677886962895, 0.004122015953063965, 0.004139264106750488, 0.004249695777893066, 0.004319200038909912, 0.004244128227233887, 0.004354047775268555, 0.004505343914031982, 0.004134592056274414, 0.004321951866149903, 0.0041307201385498045, 0.004159488201141357, 0.004476704120635986, 0.004784351825714112, 0.004120096206665039, 0.004107967853546142, 0.004088895797729492, 0.004190176010131836, 0.004052735805511474, 0.004132863998413086, 0.004059135913848877, 0.004204959869384766, 0.004064799785614014, 0.004097631931304932, 0.004049376010894775, 0.0040811200141906735, 0.004028960227966309, 0.004050943851470947, 0.004067327976226807, 0.004056320190429687, 0.0040653438568115235, 0.004123583793640136, 0.004115712165832519, 0.004129439830780029, 0.004040544033050537, 0.004084896087646484, 0.004293471813201904, 0.004064703941345215, 0.004061247825622559, 0.004028607845306396, 0.004038976192474365, 0.004106239795684814, 0.004067455768585205, 0.0040440959930419925, 0.004069952011108399, 0.003835871934890747, 0.004063871860504151, 0.004061312198638916, 0.004050816059112549, 0.004036767959594727, 0.0040731201171875, 0.004053343772888183, 0.004050943851470947, 0.004036736011505127, 0.004060383796691895, 0.004044672012329101, 0.004053088188171386, 0.004059775829315186, 0.004220064163208008, 0.004027232170104981, 0.004053055763244629, 0.004037856101989746, 0.004041696071624756, 0.004031744003295899, 0.004035136222839355, 0.00403439998626709, 0.004044288158416748, 0.004039328098297119, 0.004024672031402588, 0.004013728141784668, 0.004089920043945312, 0.004169663906097412, 0.004105472087860108, 0.004039423942565918, 0.004087679862976075, 0.004055168151855469, 0.004042751789093017, 0.004029920101165771, 0.004030111789703369, 0.004023392200469971, 0.004054944038391114, 0.004021632194519043, 0.004043263912200928, 0.0040273919105529785, 0.004021024227142334, 0.0040748162269592285, 0.004045728206634521, 0.004011775970458984, 0.004046688079833985, 0.004020639896392822, 0.00401196813583374, 0.0040018558502197266, 0.004042687892913818, 0.0040059518814086915, 0.004026368141174317, 0.004079679965972901, 0.004216767787933349, 0.00415334415435791, 0.004119967937469483, 0.004161151885986328, 0.004068319797515869, 0.004055200099945068, 0.004044447898864746, 0.004039904117584228, 0.0040869760513305665, 0.0040382719039917, 0.004024767875671386, 0.004038368225097656, 0.003778559923171997, 0.004046304225921631, 0.004098495960235596, 0.004075615882873535, 0.004038015842437744, 0.004019968032836914, 0.004031551837921143, 0.004052256107330322, 0.0040432319641113285, 0.004053055763244629, 0.004063231945037842, 0.004019423961639405, 0.004033311843872071, 0.0040464639663696286, 0.004045184135437012, 0.004034560203552246, 0.004204544067382812, 0.0040401601791381836, 0.00404860782623291, 0.004012351989746094, 0.004049280166625977, 0.004905087947845459, 0.004554751873016357, 0.004816895961761475, 0.005495967864990234, 0.004383647918701172, 0.004200128078460694, 0.0041658878326416016, 0.004157440185546875, 0.00416153621673584, 0.004101952075958252, 0.004063744068145752, 0.004079296112060547, 0.004070655822753906, 0.004076191902160645, 0.004016543865203857, 0.004044479846954345, 0.004182015895843506, 0.0044356160163879395, 0.0041468157768249515, 0.004270815849304199, 0.004126719951629639, 0.004196544170379638, 0.004123936176300049, 0.00409881591796875, 0.0041262397766113285, 0.004096255779266357, 0.004155424118041992, 0.004233535766601563, 0.004247200012207031, 0.004190207958221436, 0.004153503894805908, 0.004124512195587158, 0.0040941438674926755, 0.004068192005157471, 0.004148191928863525, 0.00406883192062378, 0.004079936027526855, 0.0042109122276306156, 0.004116479873657227, 0.004097760200500488, 0.004107583999633789, 0.004091904163360595, 0.003780992031097412, 0.004096223831176758, 0.004071072101593017, 0.004058495998382568, 0.004139552116394043, 0.004105408191680909, 0.0042501440048217775, 0.0040943360328674315, 0.004069248199462891, 0.004067455768585205, 0.004063231945037842, 0.00404857587814331, 0.0040692157745361324, 0.004069248199462891, 0.004110943794250488, 0.004262944221496582, 0.004155744075775147, 0.004205344200134277, 0.004076511859893799, 0.004055935859680176, 0.004062848091125488, 0.004272384166717529, 0.004069727897644043, 0.004089632034301758, 0.004092991828918457, 0.004066048145294189, 0.004047200202941894, 0.004058303833007812, 0.004027040004730225, 0.004182015895843506, 0.004173279762268067, 0.004135456085205078, 0.0040878081321716305, 0.004042463779449463, 0.004071296215057373, 0.004049312114715576, 0.004046495914459229, 0.004048927783966065, 0.0040648322105407714, 0.0040471358299255375, 0.004032991886138916, 0.004050335884094238, 0.004075295925140381, 0.004137375831604004, 0.004164000034332275, 0.004050848007202149, 0.004034656047821045, 0.004053279876708984, 0.004074304103851318, 0.004043136119842529, 0.004046751976013184, 0.0040698561668396, 0.004073503971099853, 0.004038784027099609, 0.004083199977874756, 0.0042091522216796875, 0.004167679786682129, 0.004196191787719726, 0.00407913589477539, 0.004084352016448974, 0.0040746240615844725, 0.00407155179977417, 0.004058207988739014, 0.0037925119400024415, 0.004081855773925782, 0.004083360195159912, 0.004180831909179688, 0.004059072017669678, 0.004058879852294922, 0.004122623920440674, 0.004051296234130859, 0.004048160076141357, 0.004055615901947022, 0.004071231842041016, 0.004054848194122314, 0.004050496101379394, 0.004092448234558106, 0.004040800094604492, 0.004017375946044922, 0.004041120052337647, 0.004024320125579834, 0.0040614080429077146, 0.00404204797744751, 0.004215104103088379, 0.0040655360221862795, 0.004034687995910644, 0.00405724811553955, 0.004044767856597901, 0.004052256107330322, 0.004109055995941162, 0.004046847820281982, 0.00422815990447998, 0.004092991828918457, 0.004091775894165039, 0.00405724811553955, 0.004046688079833985, 0.004108287811279297, 0.0040581121444702144, 0.00403382396697998, 0.004033311843872071, 0.004248288154602051, 0.0040347838401794435, 0.0040570878982543945, 0.0040564160346984865, 0.0040451521873474125, 0.004034048080444336, 0.00401801586151123, 0.004035039901733399, 0.004032832145690918, 0.004020415782928467, 0.004113791942596435, 0.004026144027709961, 0.004028512001037598, 0.00405785608291626, 0.0040796160697937015, 0.0040430078506469725, 0.004033504009246826, 0.0042005119323730465, 0.004075615882873535, 0.0041006717681884765, 0.00404099178314209, 0.004032288074493408, 0.004048895835876465, 0.004093088150024414, 0.004062047958374023, 0.004183231830596924, 0.0037457919120788574, 0.004102143764495849, 0.004065408229827881, 0.004044384002685547, 0.0040471358299255375, 0.00402345609664917, 0.004031424045562744, 0.00405456018447876, 0.004030367851257324, 0.004112864017486572, 0.004048351764678955, 0.0040596799850463865, 0.004019904136657715, 0.004000063896179199, 0.004032512187957763, 0.004005887985229492, 0.004032512187957763, 0.004032512187957763, 0.0040161280632019045, 0.0040133118629455565, 0.004018720149993897, 0.004016575813293457, 0.004179359912872315, 0.004172128200531006, 0.004095327854156494, 0.004078271865844726, 0.004118080139160156, 0.004113920211791992, 0.0041194877624511714, 0.004103968143463135, 0.004095615863800049, 0.004054719924926758, 0.004069536209106445, 0.004051712036132813, 0.004101984024047852, 0.00446070384979248, 0.004106368064880371, 0.004034431934356689, 0.004091904163360595, 0.0040382080078125, 0.004034495830535888, 0.004071936130523681, 0.004300352096557617, 0.00435859203338623, 0.004093952178955078, 0.004097568035125732, 0.004097695827484131, 0.0041132159233093265, 0.004155392169952392, 0.004220928192138672, 0.004218880176544189, 0.004158527851104737, 0.004175936222076416, 0.0041922879219055175, 0.0041420478820800785, 0.004066815853118896, 0.004061791896820068, 0.004063007831573487, 0.004059135913848877, 0.004048895835876465, 0.00404636812210083, 0.0040348801612854, 0.0041938238143920895, 0.003799936056137085, 0.004064928054809571, 0.00405244779586792, 0.00406499195098877, 0.004059167861938476, 0.004061312198638916, 0.004071455955505371, 0.004037407875061036, 0.004044384002685547, 0.004091487884521484, 0.004068192005157471, 0.004107808113098145, 0.004061471939086914, 0.004171360015869141, 0.004051360130310058, 0.004071072101593017, 0.004038623809814453, 0.004059360027313232, 0.004060832023620606, 0.004135424137115478, 0.004124671936035156, 0.004113984107971191, 0.0041001601219177244, 0.0051142401695251465, 0.00409600019454956, 0.004140448093414306, 0.0040945601463317875, 0.004087264060974121, 0.004083615779876709, 0.004079520225524902, 0.0040960640907287595, 0.004131840229034424, 0.004101600170135498, 0.004077760219573975, 0.004102143764495849, 0.004542816162109375, 0.004856512069702148, 0.004139776229858398, 0.004167903900146484, 0.004095776081085205, 0.004088128089904785, 0.004063136100769043, 0.004071616172790528, 0.00413267183303833, 0.004141056060791016, 0.004117919921875, 0.00411894416809082, 0.004085951805114746, 0.004091231822967529, 0.004073631763458252, 0.004057600021362305, 0.00414035177230835, 0.0041489281654357914, 0.004340576171875, 0.004234816074371338, 0.004221536159515381, 0.004196352005004883, 0.004190464019775391, 0.004218751907348633, 0.004212607860565185, 0.0041842560768127444, 0.004125728130340576, 0.004084352016448974, 0.0037742719650268554, 0.004060895919799805, 0.0040525121688842775, 0.004103104114532471, 0.0040447998046875, 0.004048223972320557, 0.004047679901123047, 0.004364223957061768, 0.004030399799346924, 0.004050911903381348, 0.00406876802444458, 0.004327904224395752, 0.004204671859741211, 0.004618239879608154, 0.0044316802024841305, 0.00470246410369873, 0.0041818881034851076, 0.004172063827514648, 0.0042638077735900875, 0.004540319919586182, 0.004576863765716553, 0.004638207912445068, 0.004166143894195557, 0.004280799865722656, 0.004086751937866211, 0.004184095859527588, 0.004121535778045654, 0.00409600019454956, 0.004440063953399658, 0.004070464134216308, 0.004273087978363037, 0.004093952178955078, 0.004098368167877197, 0.0040854401588439945, 0.004067359924316406, 0.004070784091949463, 0.004063712120056152, 0.0040685758590698244, 0.004072351932525635, 0.004108191967010498, 0.0040367040634155275, 0.004032800197601319, 0.004011744022369384, 0.004024320125579834, 0.004073472023010254, 0.004032159805297851, 0.004047200202941894, 0.004054848194122314, 0.004051136016845703, 0.004056575775146484, 0.004085728168487549, 0.004151584148406983, 0.004155392169952392, 0.004079808235168457, 0.004040768146514892, 0.004054880142211914, 0.004063392162322998, 0.004244927883148193, 0.004081952095031738, 0.004116767883300781, 0.004046527862548828, 0.004071199893951416, 0.004085599899291992, 0.004055200099945068, 0.0041530561447143555, 0.004059743881225586, 0.004165631771087646, 0.004107327938079834, 0.004041920185089112, 0.0040384001731872555, 0.004044703960418701, 0.004042111873626709, 0.004039328098297119, 0.004048480033874512, 0.004080383777618408, 0.004018911838531494, 0.00403334379196167, 0.004131296157836914, 0.004058847904205323, 0.004067103862762451, 0.004044479846954345, 0.004031007766723633, 0.004245503902435303, 0.004067391872406006, 0.004036543846130371, 0.004030464172363281, 0.004002848148345947, 0.004186079978942871, 0.004038879871368408, 0.004093855857849121, 0.004088352203369141, 0.004132480144500733, 0.0040661759376525875, 0.004071104049682617, 0.004036448001861572, 0.0040553598403930665, 0.00405299186706543, 0.004038656234741211, 0.004104191780090332, 0.004034560203552246, 0.004028416156768798, 0.004032479763031006, 0.004280352115631104, 0.004028543949127197, 0.004081535816192627, 0.004046783924102783, 0.0040325760841369625, 0.00405244779586792, 0.004075424194335937, 0.004194176197052002, 0.004054815769195557, 0.004060224056243896, 0.004046751976013184, 0.004027488231658935, 0.004042975902557373, 0.004050655841827393, 0.004016575813293457, 0.004024864196777344, 0.004073311805725097, 0.004032320022583008, 0.004041056156158448, 0.0040447998046875, 0.004036287784576416, 0.004077600002288818, 0.004032864093780518, 0.004031551837921143, 0.0037592959403991697, 0.004025184154510498, 0.004072864055633545, 0.0040571198463439945, 0.004108575820922852, 0.004030720233917236, 0.004034048080444336, 0.0040514559745788575, 0.00405299186706543, 0.004047071933746338, 0.004030240058898926, 0.004012288093566895, 0.004034272193908691, 0.004044832229614258, 0.004067488193511963, 0.004081727981567383, 0.004077343940734863, 0.004114431858062744, 0.004044191837310791, 0.004054751873016358, 0.004094848155975342, 0.004081952095031738, 0.004079328060150146, 0.004085855960845947, 0.00406108808517456, 0.004067327976226807, 0.004034560203552246, 0.00404204797744751, 0.004016831874847412, 0.004058559894561767, 0.0040348801612854, 0.004092160224914551, 0.004005343914031983, 0.004017951965332031, 0.004008255958557129, 0.004028863906860351, 0.004222976207733154, 0.004138495922088623, 0.004063744068145752, 0.004059135913848877, 0.004071424007415772, 0.00405017614364624, 0.004030367851257324, 0.004156256198883056, 0.004165631771087646, 0.004143104076385498, 0.004423808097839355, 0.0047870721817016605, 0.006110464096069336, 0.0047470078468322754, 0.004202400207519531, 0.004130911827087402, 0.004126912117004395, 0.004108096122741699, 0.004077760219573975, 0.004118336200714111, 0.0040665922164916995, 0.004102272033691406, 0.004063648223876953, 0.004057375907897949, 0.004103936195373535, 0.00414134407043457, 0.004085631847381592, 0.003792992115020752, 0.004128767967224121, 0.004113696098327637, 0.004141183853149414, 0.004117087841033935, 0.0041512961387634275, 0.004097663879394532, 0.004090240001678467, 0.004064735889434815, 0.004104288101196289, 0.0040455999374389645, 0.004056735992431641, 0.004110047817230225, 0.004073503971099853, 0.004083968162536621, 0.004069087982177734, 0.004075808048248291, 0.004104063987731933, 0.004061312198638916, 0.004119872093200683, 0.004074175834655761, 0.004089856147766113, 0.004056640148162842, 0.004091360092163086, 0.0040457921028137205, 0.004060256004333496, 0.004064320087432861, 0.004081727981567383, 0.004128543853759765, 0.004245759963989258, 0.004055840015411377, 0.0042997441291809085, 0.004144927978515625, 0.004116703987121582, 0.004048895835876465, 0.004063263893127441, 0.004073440074920654, 0.004083968162536621, 0.004050079822540283, 0.00408022403717041, 0.004038015842437744, 0.004075136184692383, 0.0040499200820922855, 0.004081664085388184, 0.0040791997909545895, 0.004073887825012207, 0.004119872093200683, 0.004121280193328857, 0.00405017614364624, 0.004061952114105225, 0.004093408107757568, 0.004081920146942138, 0.0041250882148742675, 0.004068607807159424, 0.00403110408782959, 0.004078879833221436, 0.0040291519165039065, 0.004044159889221192, 0.004067008018493652, 0.0042724800109863285, 0.004091775894165039, 0.004139135837554932, 0.004096672058105468, 0.0038339200019836426, 0.00409600019454956, 0.004177504062652588, 0.004077983856201172, 0.004100096225738525, 0.004070623874664307, 0.004056096076965332, 0.0040993280410766605, 0.0041221117973327635, 0.004066720008850098, 0.004089439868927002, 0.004050623893737793, 0.004142784118652344, 0.004102303981781006, 0.004082143783569336, 0.004115903854370117, 0.004103968143463135, 0.004074272155761719, 0.0041238398551940916, 0.004056064128875733, 0.004082943916320801, 0.00412716817855835, 0.004034687995910644, 0.004083199977874756, 0.004138815879821778, 0.00429091215133667, 0.004149663925170899, 0.004140992164611817, 0.004045951843261719, 0.004089759826660156, 0.00409494400024414, 0.00410316801071167, 0.0040908799171447754, 0.004059072017669678, 0.0041001601219177244, 0.004140895843505859, 0.004098207950592041, 0.004109407901763916, 0.004076064109802246, 0.004104159832000733, 0.004043168067932129, 0.004061247825622559, 0.00407750415802002, 0.004064928054809571, 0.004042240142822265, 0.004067776203155518, 0.004053408145904541, 0.004124800205230713, 0.004161407947540283, 0.004241151809692383, 0.004118783950805664, 0.004100287914276123, 0.004087615966796875, 0.00409600019454956, 0.004130623817443848, 0.00405295991897583, 0.004075744152069092, 0.004052224159240722, 0.004074592113494873, 0.004040224075317383, 0.004028543949127197, 0.004171775817871094, 0.004036448001861572, 0.0037510080337524415, 0.004047679901123047, 0.004075551986694336, 0.004044864177703857, 0.0040767998695373535, 0.00410041618347168, 0.0040571198463439945, 0.0040730562210083004, 0.004035391807556152, 0.004071424007415772, 0.00436624002456665, 0.0040488319396972654, 0.004048511981964111, 0.004067135810852051, 0.004047584056854248, 0.00406879997253418, 0.004161375999450684, 0.004070112228393555, 0.0040860800743103024, 0.004062816143035889, 0.004263328075408935, 0.004144063949584961, 0.00404249620437622, 0.004065279960632324, 0.004132127761840821, 0.004061728000640869, 0.004069568157196045, 0.00405244779586792, 0.004035103797912598, 0.004079936027526855, 0.0040384321212768555, 0.004054272174835205, 0.00407209587097168, 0.004026368141174317, 0.004038015842437744, 0.004075520038604737, 0.0040433921813964845, 0.004153503894805908, 0.004187104225158692, 0.0041188478469848635, 0.004071072101593017, 0.004062111854553223, 0.0040323839187622074, 0.004089983940124511, 0.004032479763031006, 0.004106272220611573, 0.004073472023010254, 0.0040685758590698244, 0.004085631847381592, 0.004053952217102051, 0.0040300159454345705, 0.004129280090332031, 0.004050528049468994, 0.005169568061828614, 0.004160704135894776, 0.004141759872436523, 0.004130847930908203, 0.00412175989151001, 0.004082528114318848, 0.0041205759048461915, 0.004055136203765869, 0.004083615779876709, 0.004075583934783936, 0.003822335958480835, 0.00408076810836792, 0.004106400012969971, 0.004086592197418213, 0.004073344230651856, 0.004138271808624267, 0.004050943851470947, 0.004077888011932373, 0.004104320049285889, 0.004084288120269775, 0.004364031791687012, 0.004101151943206787, 0.004141056060791016, 0.004086751937866211, 0.004084928035736084, 0.004090496063232422, 0.004095615863800049, 0.004052671909332275, 0.004049471855163574, 0.004071775913238525, 0.004057055950164795, 0.004073631763458252, 0.004036448001861572, 0.004063231945037842, 0.004077568054199219, 0.0040447998046875, 0.0041512961387634275, 0.004083712100982666, 0.00405017614364624, 0.004102367877960205, 0.004086400032043457, 0.0042800002098083495, 0.004218719959259033, 0.004126880168914795, 0.004137184143066406, 0.004083231925964355, 0.004038656234741211, 0.004058976173400879, 0.004082592010498047, 0.0041223359107971196, 0.004122176170349121, 0.00406496000289917, 0.004064223766326904, 0.004081439971923828, 0.004057055950164795, 0.004059167861938476, 0.004118271827697754, 0.004081535816192627, 0.00404694414138794, 0.004059423923492431, 0.004067008018493652, 0.0040486397743225095, 0.004038464069366455, 0.00409881591796875, 0.004091904163360595, 0.004057055950164795, 0.004058464050292969, 0.004080319881439209, 0.004032512187957763, 0.004042272090911865, 0.004233695983886719, 0.004098048210144043, 0.004089759826660156, 0.0038461120128631594, 0.004160031795501709, 0.004077760219573975, 0.004094240188598633, 0.004073535919189453, 0.004063231945037842, 0.004097568035125732, 0.004063648223876953, 0.004091423988342285, 0.004082143783569336, 0.004044159889221192, 0.004075424194335937, 0.004055295944213867, 0.004096191883087158, 0.004083263874053955, 0.004121312141418457, 0.004049215793609619, 0.004083392143249512, 0.004036191940307617, 0.0040616002082824705, 0.004036096096038819, 0.004073247909545898, 0.00418179178237915, 0.004260799884796143, 0.0042287039756774905, 0.004232704162597656, 0.004420544147491455, 0.004238944053649902, 0.0042928318977355955, 0.004196512222290039, 0.004182240009307861, 0.0041244478225708, 0.004100063800811768, 0.004109856128692627, 0.004092063903808594, 0.004094304084777832, 0.0040796160697937015, 0.004067103862762451, 0.004124735832214356, 0.004094111919403076, 0.004046783924102783, 0.0041608958244323735, 0.004277152061462402, 0.00406006383895874, 0.004065919876098633, 0.004040800094604492, 0.00407692813873291, 0.0041480641365051266, 0.004372416019439697, 0.004191775798797607, 0.004345471858978271, 0.004197247982025146, 0.004220895767211914, 0.004115903854370117, 0.004132895946502685, 0.004177631855010987, 0.004082496166229248, 0.004112383842468262, 0.004069375991821289, 0.0040570878982543945, 0.004081664085388184, 0.004053311824798584, 0.004095327854156494, 0.003794368028640747, 0.004045055866241455, 0.004044832229614258, 0.004127232074737549, 0.004075232028961181, 0.004044447898864746, 0.004091968059539795, 0.004074079990386963, 0.00407257604598999, 0.00410207986831665, 0.0040662078857421876, 0.004095776081085205, 0.004079872131347656, 0.004098048210144043, 0.004124671936035156, 0.004097631931304932, 0.004050528049468994, 0.0041398401260375976, 0.0040590081214904785, 0.004079552173614502, 0.004281919956207275, 0.004087935924530029, 0.004153855800628662, 0.004111584186553955, 0.0041090879440307615, 0.00410368013381958, 0.0041437759399414065, 0.004093344211578369, 0.00411683177947998, 0.004083424091339111, 0.0041753602027893065, 0.0040797438621521, 0.004113152027130127, 0.00408080005645752, 0.0040273919105529785, 0.004043935775756836, 0.004045023918151856, 0.004030943870544434, 0.004042272090911865, 0.004283167839050293, 0.004089536190032959, 0.004072735786437989, 0.004070112228393555, 0.004059135913848877, 0.004133952140808105, 0.004020832061767578, 0.004118879795074463, 0.004077568054199219, 0.004055039882659912, 0.004067391872406006, 0.004052256107330322, 0.004045472145080566, 0.00420249605178833, 0.004051231861114502, 0.0040711359977722164, 0.004102143764495849, 0.0042147841453552244, 0.004132287979125977, 0.004262464046478271, 0.0040460162162780765, 0.004108543872833252, 0.0040412797927856445, 0.0040590400695800785, 0.003784768104553223, 0.0040919361114501954, 0.004051775932312012, 0.004036191940307617, 0.004055776119232178, 0.004054719924926758, 0.004203936100006103, 0.004113152027130127, 0.004036448001861572, 0.00415334415435791, 0.004046304225921631, 0.004053855895996094, 0.0040629119873046874, 0.004026368141174317, 0.004028416156768798, 0.004085760116577148, 0.004062687873840332, 0.0040796160697937015, 0.004172416210174561, 0.004108191967010498, 0.004050079822540283, 0.004043136119842529, 0.004042816162109375, 0.004067647933959961, 0.004038368225097656, 0.004064896106719971, 0.004063712120056152, 0.0041244478225708, 0.004068160057067871, 0.004033984184265137, 0.004034912109375, 0.004063136100769043, 0.004036608219146728, 0.004380159854888916, 0.0040821762084960935, 0.004058527946472168, 0.004108767986297607, 0.004086239814758301, 0.0040935039520263675, 0.004095744132995605, 0.004059360027313232, 0.004047232151031494, 0.004052735805511474, 0.004061183929443359, 0.004018176078796387, 0.004067327976226807, 0.004114528179168701, 0.00406928014755249, 0.0040202240943908694, 0.004036608219146728, 0.004202208042144775, 0.0040430397987365726, 0.00403439998626709, 0.004073472023010254, 0.004020351886749268, 0.0040878400802612305, 0.004034624099731445, 0.004032095909118652, 0.004047200202941894, 0.004036608219146728, 0.004073535919189453, 0.004065216064453125, 0.004009376049041748, 0.003789247989654541, 0.004050079822540283, 0.004098048210144043, 0.00409609603881836, 0.0040599040985107426, 0.004069375991821289, 0.004061183929443359, 0.004077343940734863, 0.004118144035339355, 0.004057184219360351, 0.004194303989410401, 0.004051487922668457, 0.004040671825408936, 0.004071424007415772, 0.004056831836700439, 0.004108543872833252, 0.004126976013183594, 0.004129600048065185, 0.004127679824829101, 0.004069568157196045, 0.004038464069366455, 0.0040665922164916995, 0.004114880084991455, 0.004028704166412354, 0.004043935775756836, 0.004039807796478272, 0.004095712184906006, 0.004030464172363281, 0.004229119777679443, 0.004080863952636718, 0.004088479995727539, 0.004132991790771485, 0.004142367839813232, 0.004069983959197998, 0.004106368064880371, 0.004130815982818604, 0.004040512084960937, 0.004020415782928467, 0.0041677761077880855, 0.004091872215270996, 0.004038591861724853, 0.004072864055633545, 0.004062016010284424, 0.004069151878356933, 0.004056831836700439, 0.00408403205871582, 0.004069151878356933, 0.004035871982574463, 0.0040412797927856445, 0.004106239795684814, 0.004112703800201416, 0.004044223785400391, 0.004094079971313476, 0.004065279960632324, 0.004260287761688232, 0.004134912014007569, 0.004066463947296143, 0.004064095973968506, 0.004071424007415772, 0.004198495864868164, 0.004044640064239502, 0.004034527778625488, 0.004124767780303955, 0.0037654719352722167, 0.00406009578704834, 0.004051040172576905, 0.004062304019927979, 0.00404259204864502, 0.0040516481399536135, 0.004091519832611084, 0.004045472145080566, 0.004061183929443359, 0.0040767998695373535, 0.004061952114105225, 0.004111584186553955, 0.004197152137756348, 0.0040653438568115235, 0.0040795841217041015, 0.00408684778213501, 0.004080543994903565, 0.004081664085388184, 0.004067423820495605, 0.00406931209564209, 0.004039904117584228, 0.004026624202728271, 0.004235072135925293, 0.004068031787872315, 0.004177631855010987, 0.004109824180603027, 0.0040496959686279295, 0.004072544097900391, 0.004315616130828858, 0.004115007877349854, 0.0040505919456481935, 0.004038911819458008, 0.00402953577041626, 0.0040273919105529785, 0.004153312206268311, 0.004048799991607666, 0.004036608219146728, 0.004061183929443359, 0.004110335826873779, 0.004065279960632324, 0.004040703773498535, 0.004029632091522217, 0.00401196813583374, 0.0042024641036987305, 0.004082496166229248, 0.004057184219360351, 0.004056479930877686, 0.004055647850036621, 0.004112383842468262, 0.004065279960632324, 0.004055039882659912, 0.0041365761756896975, 0.00402675199508667, 0.004059135913848877, 0.00405676794052124, 0.004064799785614014, 0.00406009578704834, 0.004023359775543213, 0.004035103797912598, 0.0040381760597229, 0.004025055885314942, 0.004036128044128418, 0.004166111946105957, 0.003848191976547241, 0.004065279960632324, 0.0040570878982543945, 0.004050911903381348, 0.0040284481048583985, 0.0040319361686706546, 0.004057663917541504, 0.004061183929443359, 0.004098207950592041, 0.004071263790130615, 0.004078911781311035, 0.004260543823242187, 0.004046847820281982, 0.004054592132568359, 0.004108736038208008, 0.004046847820281982, 0.004063231945037842, 0.0040791997909545895, 0.004103871822357177, 0.00410697603225708, 0.004034048080444336, 0.004080128192901611, 0.004046847820281982, 0.004046175956726074, 0.004041376113891601, 0.004094240188598633, 0.00404860782623291, 0.004104191780090332, 0.00401203203201294, 0.004046847820281982, 0.004034560203552246, 0.0040202240943908694, 0.004046847820281982, 0.004081664085388184, 0.004024511814117432, 0.004282048225402832, 0.004054431915283203, 0.0041233601570129395, 0.0040260801315307616, 0.004065919876098633, 0.004032159805297851, 0.004116479873657227, 0.004295743942260742, 0.004103104114532471, 0.004036223888397217, 0.004051328182220459, 0.004034560203552246, 0.004072800159454346, 0.004030399799346924, 0.0040475201606750486, 0.004061247825622559, 0.004060192108154297, 0.004059616088867187, 0.00404256010055542, 0.004121280193328857, 0.004054272174835205, 0.004050015926361084, 0.0040731201171875, 0.004028416156768798, 0.004028416156768798, 0.004026432037353516, 0.004028351783752442, 0.004028192043304443]",tokens/s,243.91487134356524,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1901.580288,2770.20672,0.0,2367.684608,2328.95488,s,1,10.46066796875,10.46066796875,0.0,10.46066796875,10.46066796875,10.46066796875,10.46066796875,[10.46066796875],,kWh,5.517073100000497e-05,6.0781905122529655e-06,1.7701403050001535e-05,7.895032456225947e-05,,MB,1999.241216,2847.801344,0.0,2430.599168,2386.812928,s,10,0.7637565841674805,0.07637565841674805,0.00010971755525195534,0.07637790298461913,0.07644877014160156,0.0765452507019043,0.07662243515014648,"[0.07664173126220702, 0.07622160339355469, 0.07631350708007813, 0.07639052581787109, 0.0764046401977539, 0.07625241851806641, 0.07633174133300781, 0.07642733001708985, 0.07640780639648438, 0.07636528015136719]",tokens/s,3351.853264598017,kWh,2.2785984820413296e-06,2.512863487842243e-07,1.511325498465108e-06,4.041210329290662e-06,tokens/kWh,63347358.62286452,MB,2004.918272,2877.161472,0.0,2459.959296,2398.931968,s,10,29.079150878906248,2.9079150878906246,0.010816753622299707,2.9095372314453125,2.9202213623046878,2.9211350952148436,2.9218660815429685,"[2.89596044921875, 2.901680908203125, 2.922048828125, 2.91255908203125, 2.89134716796875, 2.894736328125, 2.920018310546875, 2.9172177734375, 2.917066650390625, 2.906515380859375]",tokens/s,21.665006747394276,kWh,8.36754062104587e-05,9.229299223197705e-06,3.970509636353491e-05,0.0001326098017971913,tokens/kWh,475078.00438726204,,s,630,29.077490089416546,0.04615474617367699,0.0006891342515722406,0.046031391143798825,0.046577865219116214,0.046901227951049805,0.04831680328369141,"[0.049148223876953126, 0.046387168884277345, 0.04576310348510742, 0.04580767822265625, 0.04560822296142578, 0.04990224075317383, 0.04598374557495117, 0.04566425704956055, 0.0458012809753418, 0.04595516967773437, 0.04613504028320312, 0.04617001724243164, 0.04608412933349609, 0.046543262481689454, 0.04588748931884765, 0.04561907196044922, 0.046004352569580076, 0.04571750259399414, 0.04614553451538086, 0.04670003128051758, 0.04640204620361328, 0.0466063346862793, 0.046619743347167966, 0.04600924682617188, 0.046016128540039065, 0.04576396942138672, 0.045654655456542965, 0.045656448364257814, 0.04585062408447266, 0.04561270523071289, 0.04565641784667969, 0.04556342315673828, 0.04552751922607422, 0.045625343322753906, 0.045939743041992186, 0.04549731063842773, 0.04544220733642578, 0.04543331146240234, 0.045438720703125, 0.045746112823486326, 0.04569187164306641, 0.04552675247192383, 0.04544230270385742, 0.04563225555419922, 0.04594627380371094, 0.04615433502197266, 0.04619776153564453, 0.04575120162963867, 0.04582924652099609, 0.04559779357910156, 0.045528671264648435, 0.045695262908935545, 0.046071807861328126, 0.04581276702880859, 0.04578134536743164, 0.04572412872314453, 0.045672351837158204, 0.04595849609375, 0.04606246566772461, 0.045784961700439455, 0.045680801391601564, 0.045649696350097656, 0.04551916885375976, 0.046671966552734374, 0.04590121459960937, 0.045800033569335936, 0.046341567993164065, 0.04613382339477539, 0.04585910415649414, 0.0458298568725586, 0.04591107177734375, 0.04581679916381836, 0.04580556869506836, 0.04599808120727539, 0.045866943359375, 0.04568870544433594, 0.04561324691772461, 0.04588544082641602, 0.04561715316772461, 0.04729446411132812, 0.04554547119140625, 0.045731231689453124, 0.04561318588256836, 0.04556233596801758, 0.04574835205078125, 0.0459076156616211, 0.045938911437988283, 0.04556307220458984, 0.04588601684570313, 0.045770912170410155, 0.04598332977294922, 0.0457525749206543, 0.04571100616455078, 0.04554198455810547, 0.04558848190307617, 0.04575641632080078, 0.04572159957885742, 0.04599398422241211, 0.04635443115234375, 0.04581932830810547, 0.04570783996582031, 0.04626800155639649, 0.04593027114868164, 0.04612979125976562, 0.04796124649047852, 0.04753084945678711, 0.046854145050048826, 0.04632947158813477, 0.046354110717773435, 0.0462825927734375, 0.046193504333496095, 0.04662886428833008, 0.046268417358398435, 0.04602880096435547, 0.046292991638183595, 0.04611276626586914, 0.046003742218017576, 0.046088672637939455, 0.04613731384277344, 0.046015743255615235, 0.04585551834106445, 0.04592832183837891, 0.046238014221191406, 0.046241600036621096, 0.046448638916015625, 0.04615107345581055, 0.046715679168701174, 0.046358177185058594, 0.04639350509643555, 0.04645808029174805, 0.046373600006103514, 0.04618016052246094, 0.0465494384765625, 0.04608969497680664, 0.04614608001708984, 0.04616755294799805, 0.04625254440307617, 0.04612444686889648, 0.04609641647338867, 0.04602470397949219, 0.04654880142211914, 0.04983270263671875, 0.046519905090332034, 0.04643183898925781, 0.0462487678527832, 0.04657766342163086, 0.04655452728271484, 0.04639516830444336, 0.04635526275634766, 0.04634822463989258, 0.04680857467651367, 0.04647283172607422, 0.046262496948242186, 0.04624252700805664, 0.04649964904785156, 0.04666796875, 0.0462295036315918, 0.04636262512207031, 0.04679894256591797, 0.04695030212402344, 0.04648502349853516, 0.04626480102539063, 0.04669235229492188, 0.04645478439331055, 0.046192638397216795, 0.046421470642089846, 0.046408287048339845, 0.04602259063720703, 0.04595507049560547, 0.04632985687255859, 0.04601830291748047, 0.04609049606323242, 0.04629913711547851, 0.04629094314575195, 0.04624118423461914, 0.04597615814208984, 0.046179359436035156, 0.04626531219482422, 0.04585472106933594, 0.04756070327758789, 0.04636073684692383, 0.04623507308959961, 0.04587356948852539, 0.046296096801757815, 0.0463595199584961, 0.0460530891418457, 0.045946144104003904, 0.04574310302734375, 0.0459769287109375, 0.04662489700317383, 0.045857345581054684, 0.04587519836425781, 0.04620259094238281, 0.046199073791503904, 0.04581785583496094, 0.04595014572143555, 0.04694236755371094, 0.04651068878173828, 0.04578310394287109, 0.04560620880126953, 0.04556367874145508, 0.04599260711669922, 0.045658592224121095, 0.04585859298706055, 0.04590748977661133, 0.04576831817626953, 0.046029022216796875, 0.04593689727783203, 0.04624339294433594, 0.04572198486328125, 0.04600608062744141, 0.04632342529296875, 0.04689369583129883, 0.04594921493530273, 0.046115840911865234, 0.045835262298583986, 0.045725536346435544, 0.0542344970703125, 0.04649679946899414, 0.0459832649230957, 0.04587961578369141, 0.045864608764648436, 0.04574028778076172, 0.04599008178710937, 0.04628572845458984, 0.04636336135864258, 0.046090431213378906, 0.046483455657958986, 0.046349342346191404, 0.04776240158081055, 0.046458881378173826, 0.047830463409423825, 0.04669292831420899, 0.04621823883056641, 0.04596591949462891, 0.045875614166259765, 0.04644659042358398, 0.046069217681884767, 0.04586323165893555, 0.04561689758300781, 0.04573641586303711, 0.04575564956665039, 0.046372638702392575, 0.04593967819213867, 0.04578406524658203, 0.045679134368896486, 0.0456954574584961, 0.04596736145019531, 0.045678398132324216, 0.04576480102539063, 0.045693950653076174, 0.04686297607421875, 0.04594841766357422, 0.046004222869873046, 0.04599766540527344, 0.04568467330932617, 0.0462591667175293, 0.04604844665527344, 0.04599833679199219, 0.0458430061340332, 0.045700126647949216, 0.04554671859741211, 0.04558179092407227, 0.046104095458984376, 0.045805694580078125, 0.045758560180664064, 0.045920417785644534, 0.045967742919921874, 0.045878623962402346, 0.04611753463745117, 0.04589382553100586, 0.04585059356689453, 0.04567836761474609, 0.04550662231445313, 0.045748222351074216, 0.04577648162841797, 0.045597087860107424, 0.04554342269897461, 0.04567180633544922, 0.04565670394897461, 0.046047233581542966, 0.04560275268554687, 0.045719615936279295, 0.04580364990234375, 0.045584255218505856, 0.04570111846923828, 0.046028575897216796, 0.04569247817993164, 0.045635936737060546, 0.04576803207397461, 0.045618335723876954, 0.04573171234130859, 0.04574614334106445, 0.0458639030456543, 0.045502944946289064, 0.04560073471069336, 0.04587782287597656, 0.04605033493041992, 0.0457529296875, 0.045642208099365235, 0.04583987045288086, 0.045787551879882815, 0.04588854217529297, 0.04577347183227539, 0.04567071914672852, 0.04571305465698242, 0.046497695922851565, 0.046336448669433594, 0.04678860855102539, 0.046327808380126956, 0.04602265548706055, 0.04679411315917969, 0.04644895935058594, 0.04646124649047852, 0.046763423919677735, 0.046215232849121095, 0.04620083236694336, 0.046473217010498044, 0.04648755264282227, 0.04701388931274414, 0.046542430877685545, 0.0459958381652832, 0.04614924621582031, 0.04601084899902344, 0.047272449493408204, 0.047058334350585936, 0.04645334243774414, 0.045997215270996095, 0.04605219268798828, 0.046609569549560546, 0.04603171157836914, 0.045948928833007815, 0.04571052932739258, 0.045673183441162106, 0.04602889633178711, 0.04581792068481445, 0.04562527847290039, 0.045799072265625, 0.045846527099609374, 0.04566255950927734, 0.04578508758544922, 0.045541374206542966, 0.046177440643310544, 0.04600064086914062, 0.046039390563964847, 0.0458158073425293, 0.045705024719238284, 0.045648193359375, 0.045543296813964844, 0.04560486221313476, 0.04579647827148438, 0.0458658561706543, 0.04566835021972656, 0.047841537475585935, 0.04586227035522461, 0.04571993637084961, 0.04559609603881836, 0.045804031372070314, 0.04558601760864258, 0.04559475326538086, 0.04585862350463867, 0.045728065490722655, 0.04577507019042969, 0.04561715316772461, 0.045641727447509765, 0.045819358825683595, 0.04579587173461914, 0.04560425567626953, 0.04584883117675781, 0.04558883285522461, 0.045557758331298825, 0.045666622161865233, 0.045893310546875, 0.04568064117431641, 0.04612822341918945, 0.04560540771484375, 0.04549587249755859, 0.04540703964233399, 0.046090686798095706, 0.04567363357543945, 0.0457696647644043, 0.04561510467529297, 0.045709022521972655, 0.045623584747314455, 0.04567244720458984, 0.04576361465454101, 0.046063838958740236, 0.045574783325195316, 0.04582976150512695, 0.045648384094238284, 0.04773846435546875, 0.046063678741455075, 0.04605168151855469, 0.04578303909301758, 0.04638924789428711, 0.046357505798339846, 0.046004737854003906, 0.045994495391845705, 0.04613324737548828, 0.04646092987060547, 0.04651443099975586, 0.046063358306884766, 0.04649574279785156, 0.045967041015625, 0.0466736946105957, 0.046252513885498045, 0.04655110549926758, 0.046274559020996094, 0.04618854522705078, 0.04628899383544922, 0.04604937744140625, 0.04631094360351563, 0.046003742218017576, 0.0459920654296875, 0.04599785614013672, 0.04622975921630859, 0.046491615295410155, 0.04678867340087891, 0.04640387344360351, 0.046207134246826174, 0.04647686386108398, 0.04611539077758789, 0.04662300872802734, 0.046332801818847656, 0.04745273590087891, 0.046000415802001954, 0.04586700820922852, 0.047915008544921874, 0.0474521598815918, 0.049995742797851565, 0.04634012985229492, 0.046333953857421874, 0.046236961364746094, 0.04644454574584961, 0.04624873733520508, 0.046034881591796875, 0.047120094299316406, 0.04653219223022461, 0.04649027252197266, 0.046827552795410156, 0.047255550384521484, 0.047906558990478514, 0.0473623046875, 0.04696054458618164, 0.04625241470336914, 0.04653875350952148, 0.04622335815429687, 0.04611686325073242, 0.046104576110839846, 0.04645478439331055, 0.04639539337158203, 0.046128768920898434, 0.046321342468261716, 0.046293697357177734, 0.04622297668457031, 0.046452129364013675, 0.046099422454833984, 0.046107646942138675, 0.04596761703491211, 0.04652300643920899, 0.046507904052734375, 0.04610835266113281, 0.04598432159423828, 0.04643996810913086, 0.04618675231933594, 0.046639328002929685, 0.046063617706298826, 0.04595667266845703, 0.046061344146728515, 0.04612745666503906, 0.046717247009277346, 0.04656947326660156, 0.047050750732421875, 0.045963264465332034, 0.04612035369873047, 0.0464615364074707, 0.04667801666259765, 0.046031070709228517, 0.04625539016723633, 0.04599244689941406, 0.04586086273193359, 0.046526561737060546, 0.04615472030639649, 0.046023616790771486, 0.04605535888671875, 0.04618451309204102, 0.046534656524658206, 0.04628684616088867, 0.04621068954467773, 0.04595526504516602, 0.045819263458251956, 0.04602758407592773, 0.04637696075439453, 0.04620217514038086, 0.04586156845092773, 0.045780990600585936, 0.04609228897094726, 0.045963264465332034, 0.04636000061035156, 0.04638163375854492, 0.04760985565185547, 0.0464486083984375, 0.04588111877441406, 0.04610259246826172, 0.046317310333251954, 0.046101856231689456, 0.0460164794921875, 0.04660115051269531, 0.04605747222900391, 0.0458342399597168, 0.04597145462036133, 0.04584447860717773, 0.046319232940673825, 0.04609686279296875, 0.045856288909912106, 0.0458520622253418, 0.04599824142456055, 0.046239936828613284, 0.04612502288818359, 0.04605980682373047, 0.04629865646362305, 0.046785377502441404, 0.04645478439331055, 0.04621311950683594, 0.04599193572998047, 0.04626367950439453, 0.04627724838256836, 0.04597139358520508, 0.046153247833251955, 0.046145889282226564, 0.045819934844970704, 0.04612112045288086, 0.045666305541992185, 0.045758464813232425, 0.04606771087646484, 0.046061569213867185, 0.04603868865966797, 0.04585424041748047, 0.04595526504516602, 0.0459076156616211, 0.04638832092285156, 0.04597481536865235, 0.04657968139648438, 0.045756767272949216, 0.0459769287109375, 0.045980609893798825, 0.04650598526000976, 0.04623574447631836, 0.04621097564697266, 0.04662451171875, 0.04638880157470703, 0.046236255645751956, 0.04594921493530273, 0.04640134429931641, 0.04605500793457031, 0.0463282241821289, 0.04621657562255859, 0.04653343963623047, 0.04645663833618164, 0.04627017593383789, 0.046057056427001954, 0.04607878494262695, 0.04575423812866211, 0.04626822280883789, 0.04593068695068359, 0.04828688049316406, 0.05434249496459961, 0.04629497528076172, 0.04629212951660156, 0.0463614387512207, 0.04690739059448242, 0.04597555160522461, 0.04573513412475586, 0.04583913421630859, 0.04571254348754883, 0.04622012710571289, 0.04594483184814453, 0.04602675247192383, 0.04612220764160156, 0.0462262077331543, 0.04593459320068359, 0.04632995223999024, 0.046289920806884766, 0.04634307098388672, 0.04602262496948242, 0.04581481552124023, 0.04584534454345703, 0.04617379379272461, 0.0460252799987793, 0.045987838745117186, 0.046383102416992186, 0.04674150466918945, 0.04647225570678711, 0.04618950271606445, 0.046358528137207033, 0.04652640151977539, 0.046061344146728515, 0.04589596939086914, 0.046243839263916016, 0.04650188827514649, 0.0459117431640625, 0.04832902526855469, 0.046214847564697265, 0.046230945587158206, 0.04586393737792969, 0.04645059204101563, 0.04598374557495117, 0.046057758331298826, 0.045973217010498044, 0.0458158073425293, 0.045965312957763675, 0.045862945556640625, 0.04617417526245117, 0.046317344665527345, 0.04581193542480469, 0.04602470397949219, 0.04610995101928711, 0.04578326416015625, 0.04597200012207031, 0.04623360061645508, 0.04606268692016602, 0.04612089538574219, 0.04587401580810547, 0.046126625061035154, 0.045873729705810544, 0.04587728118896484, 0.045875263214111325, 0.04617398452758789, 0.0457504653930664, 0.045735424041748046]",tokens/s,21.66624416559614,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1898.635264,2770.20672,0.0,2367.684608,2328.95488,s,1,10.68803515625,10.68803515625,0.0,10.68803515625,10.68803515625,10.68803515625,10.68803515625,[10.68803515625],,kWh,5.7005303250006514e-05,6.28092268150032e-06,1.892612625200435e-05,8.221235218351119e-05,,MB,2039.267328,2847.801344,0.0,2430.599168,2386.812928,s,10,0.7647531051635743,0.07647531051635742,0.00010733638561097944,0.07650809860229492,0.07659334030151367,0.0766134693145752,0.07662957252502442,"[0.07652518463134765, 0.07643536376953125, 0.07626335906982422, 0.07652301025390625, 0.07663359832763672, 0.07633609771728515, 0.07642121887207032, 0.07649318695068359, 0.0765888671875, 0.07653321838378906]",tokens/s,3347.485590728576,kWh,2.2844664299479954e-06,2.519366203714402e-07,1.5225272596875198e-06,4.058930310006955e-06,tokens/kWh,63070804.48483022,MB,2040.459264,2877.161472,0.0,2459.959296,2398.931968,s,10,26.883248046875,2.6883248046875,0.009725434383678993,2.6911368408203127,2.6995618652343754,2.6996403076171878,2.699703061523438,"[2.69954443359375, 2.693630615234375, 2.69225732421875, 2.687345703125, 2.684614013671875, 2.69971875, 2.693890380859375, 2.672791259765625, 2.669439208984375, 2.690016357421875]",tokens/s,23.43466827005055,kWh,7.829285721505293e-05,8.635617708625796e-06,3.7935782084712085e-05,0.0001248642570083908,tokens/kWh,504547.9107425148,,s,630,26.88162311935425,0.042669243046594044,0.000697824155558709,0.042534318923950196,0.04310775489807129,0.04339898052215576,0.04554624248504639,"[0.04367769622802734, 0.043268096923828124, 0.042951774597167966, 0.04302121734619141, 0.04262911987304688, 0.04230255889892578, 0.042674560546875, 0.0431475830078125, 0.04230780792236328, 0.04269385528564453, 0.04258486557006836, 0.04278460693359375, 0.04285660934448242, 0.04268422317504883, 0.042621120452880856, 0.04256703948974609, 0.042371776580810545, 0.0425615348815918, 0.04278220748901367, 0.043106689453125, 0.0424420166015625, 0.042504032135009764, 0.04265369415283203, 0.043751808166503904, 0.04254470443725586, 0.043084800720214846, 0.042700801849365234, 0.04276019287109375, 0.04254719924926758, 0.042829822540283204, 0.04309811019897461, 0.04349747085571289, 0.04253833770751953, 0.04260224151611328, 0.042740577697753905, 0.04303059387207031, 0.04287590408325195, 0.04278374481201172, 0.042741153717041014, 0.04268092727661133, 0.04253497695922852, 0.04278182220458984, 0.04272825622558594, 0.04261824035644531, 0.042705535888671875, 0.04270489501953125, 0.04291769790649414, 0.04282124710083008, 0.042999553680419925, 0.043037502288818356, 0.042552959442138674, 0.043003551483154295, 0.04237385559082031, 0.0424796142578125, 0.04322233581542969, 0.04256985473632813, 0.04253139114379883, 0.04292931365966797, 0.042644287109375, 0.04639664077758789, 0.043243873596191404, 0.042848735809326174, 0.04272537612915039, 0.04314112091064453, 0.04252822494506836, 0.043063358306884764, 0.04285897445678711, 0.04248524856567383, 0.042538944244384765, 0.042407966613769534, 0.04266652679443359, 0.04277248001098633, 0.04271104049682617, 0.04244889450073242, 0.04263727951049805, 0.04298076629638672, 0.043635231018066406, 0.0429835205078125, 0.04284201431274414, 0.04273775863647461, 0.04250211334228516, 0.04270083236694336, 0.04239270401000977, 0.0423900146484375, 0.042359169006347654, 0.04237855911254883, 0.04222739028930664, 0.04237823867797851, 0.042393760681152345, 0.042305152893066404, 0.04254025650024414, 0.04268134307861328, 0.04267007827758789, 0.04355289459228515, 0.04339260864257812, 0.04241641616821289, 0.042883071899414066, 0.04228505706787109, 0.04268255996704102, 0.04337667083740234, 0.04257702255249023, 0.042594974517822265, 0.04225228881835937, 0.04271897506713867, 0.042555583953857425, 0.04281769561767578, 0.04244675064086914, 0.04237644958496094, 0.04244473648071289, 0.04245996856689453, 0.042541217803955075, 0.04217177581787109, 0.04234451293945313, 0.042242462158203126, 0.04244070434570312, 0.043169792175292966, 0.04267571258544922, 0.04297983932495117, 0.04304870223999024, 0.04254723358154297, 0.04273299026489258, 0.04257628631591797, 0.0485871353149414, 0.043227134704589845, 0.042517696380615234, 0.04245996856689453, 0.0432088623046875, 0.042950401306152346, 0.042573856353759765, 0.04331827163696289, 0.04251148986816406, 0.04245491027832031, 0.04271308898925781, 0.04253702545166015, 0.04480198287963867, 0.04352000045776367, 0.0427081298828125, 0.04235513687133789, 0.04279497528076172, 0.04260895919799805, 0.042524799346923825, 0.04226012802124023, 0.04225878524780274, 0.042426368713378904, 0.042379264831542966, 0.042657791137695314, 0.04253452682495117, 0.04259993743896484, 0.04249008178710938, 0.04254377746582031, 0.042850303649902347, 0.04263859176635742, 0.0423284797668457, 0.04245743942260742, 0.042297054290771484, 0.042369312286376956, 0.042401695251464845, 0.04249200057983398, 0.04259328079223633, 0.04253769683837891, 0.04242457580566406, 0.04257379150390625, 0.04279507064819336, 0.04281113433837891, 0.04289766311645508, 0.04298342514038086, 0.04252390289306641, 0.042474239349365235, 0.042278911590576174, 0.04244179153442383, 0.04237788772583008, 0.04237052917480469, 0.04235696029663086, 0.04254576110839844, 0.042897407531738284, 0.04249942398071289, 0.042697376251220706, 0.04485292816162109, 0.043245887756347655, 0.043879806518554686, 0.0425928955078125, 0.04290560150146484, 0.04286614227294922, 0.043022174835205075, 0.04272115325927734, 0.04323958587646484, 0.04255219268798828, 0.042726558685302736, 0.04284070587158203, 0.04299164962768555, 0.04528835296630859, 0.04308780670166015, 0.042754112243652345, 0.04243804931640625, 0.042549854278564454, 0.042393856048583985, 0.042536705017089844, 0.04362857437133789, 0.04248934555053711, 0.042367744445800784, 0.042530529022216795, 0.042567680358886716, 0.042874881744384766, 0.04235862350463867, 0.04244496154785156, 0.04242220687866211, 0.042348606109619144, 0.04253401565551758, 0.042424736022949217, 0.04229987335205078, 0.042534912109375, 0.04268230438232422, 0.04285446548461914, 0.04229276657104492, 0.04245142364501953, 0.0425082893371582, 0.0426391372680664, 0.04306880187988281, 0.04298620986938476, 0.043012222290039065, 0.04291584014892578, 0.04256358337402344, 0.04248166275024414, 0.042405025482177734, 0.04226319885253906, 0.04223161697387695, 0.04228476715087891, 0.04229391860961914, 0.04228505706787109, 0.04254515075683594, 0.04316729736328125, 0.04266643142700195, 0.04255481719970703, 0.04251910400390625, 0.04243251037597656, 0.042842113494873046, 0.04290089416503906, 0.042816097259521485, 0.042483039855957035, 0.04227942276000977, 0.04245724868774414, 0.042579071044921875, 0.04244713592529297, 0.04262972640991211, 0.043146270751953125, 0.04308886337280273, 0.042858238220214846, 0.04282598495483399, 0.042614112854003905, 0.04280963134765625, 0.04228953552246094, 0.042148895263671875, 0.04347663879394531, 0.04286703872680664, 0.04247500610351562, 0.042373790740966796, 0.042367008209228514, 0.04303238296508789, 0.04290969467163086, 0.04248476791381836, 0.04229833602905273, 0.04257756805419922, 0.04234447860717774, 0.04233798217773437, 0.0423164176940918, 0.042348480224609374, 0.04265785598754883, 0.04247107315063477, 0.04230998229980469, 0.04227052688598633, 0.04230368041992188, 0.04236220932006836, 0.04233801651000976, 0.04222048187255859, 0.042390846252441404, 0.043160255432128904, 0.04307465744018555, 0.04273859024047852, 0.04249190521240234, 0.04257763290405273, 0.04272304153442383, 0.042450592041015624, 0.043404193878173826, 0.042379264831542966, 0.04247049713134766, 0.04229955291748047, 0.04235340881347656, 0.04231103897094726, 0.042328704833984376, 0.04253833770751953, 0.04277110290527344, 0.04250998306274414, 0.042484062194824215, 0.042549312591552736, 0.042342334747314456, 0.042355873107910155, 0.042490142822265625, 0.04260230255126953, 0.042608638763427735, 0.042533153533935546, 0.04259888076782226, 0.042247840881347656, 0.042275169372558596, 0.04247737503051758, 0.04276185607910156, 0.04266656112670898, 0.043243518829345705, 0.04296844863891602, 0.04296480178833008, 0.042522945404052735, 0.04244483184814453, 0.042469375610351565, 0.04410940933227539, 0.043325790405273436, 0.04327801513671875, 0.04344623947143555, 0.04283599853515625, 0.042786750793457035, 0.04259356689453125, 0.042490657806396485, 0.04246323013305664, 0.042727134704589845, 0.04246156692504883, 0.042304798126220705, 0.04242287826538086, 0.04231510543823242, 0.04255926513671875, 0.04300483322143555, 0.042600318908691405, 0.04263510513305664, 0.04333737564086914, 0.042805599212646483, 0.04244508743286133, 0.04239769744873047, 0.042534912109375, 0.042534912109375, 0.04243046569824219, 0.042434558868408204, 0.04252262496948242, 0.04236883163452149, 0.042200927734375, 0.04245318222045898, 0.04246707153320312, 0.042618881225585936, 0.043262367248535154, 0.042813438415527344, 0.0426577262878418, 0.042754112243652345, 0.04283612823486328, 0.043508991241455075, 0.0525645751953125, 0.04282777786254883, 0.04282572937011719, 0.04273551940917969, 0.042646751403808594, 0.042754432678222654, 0.04264191818237305, 0.04291584014892578, 0.04284620666503906, 0.04279827117919922, 0.042428993225097654, 0.042434337615966794, 0.042287582397460936, 0.045797374725341795, 0.04333916854858398, 0.042363456726074215, 0.042415233612060545, 0.04239247894287109, 0.04259619140625, 0.0422476806640625, 0.04228915023803711, 0.04254089736938477, 0.04265875244140625, 0.04288499069213867, 0.04269811248779297, 0.042471424102783206, 0.04270512008666992, 0.042422367095947267, 0.042469375610351565, 0.04314521789550781, 0.04304399871826172, 0.04247564697265625, 0.04230831909179687, 0.042691680908203126, 0.04288399887084961, 0.04272684860229492, 0.0427628173828125, 0.04258201599121094, 0.04233334350585938, 0.042550113677978514, 0.042240001678466796, 0.04210483169555664, 0.04213145446777344, 0.042270721435546874, 0.04242540740966797, 0.04234336090087891, 0.042040992736816406, 0.04216048049926758, 0.04237311935424805, 0.04247552108764648, 0.04228214263916016, 0.042249214172363284, 0.04648739242553711, 0.0437716178894043, 0.045441184997558594, 0.04287648010253906, 0.042369598388671874, 0.04236288070678711, 0.04218675231933594, 0.04237516784667969, 0.04227686309814453, 0.04220723342895508, 0.042899070739746095, 0.04318003082275391, 0.04235615921020508, 0.04322835159301758, 0.04272835159301758, 0.042824382781982424, 0.04375183868408203, 0.04740480041503906, 0.04370403289794922, 0.042852638244628906, 0.042591808319091796, 0.04342367935180664, 0.04238956832885742, 0.04216432189941406, 0.04250790405273437, 0.04262575912475586, 0.04252467346191406, 0.04250435256958008, 0.04213948822021484, 0.042264575958251956, 0.04224739074707031, 0.042339103698730465, 0.042178558349609374, 0.04223347091674805, 0.04276700973510742, 0.04294831848144531, 0.04238041687011719, 0.042474369049072265, 0.04267331314086914, 0.04269043350219726, 0.04229830551147461, 0.04242639923095703, 0.04233420944213867, 0.04230688095092774, 0.04242812728881836, 0.04222665786743164, 0.04283801651000976, 0.04255244827270508, 0.04316864013671875, 0.04228505706787109, 0.042213375091552735, 0.042315582275390624, 0.04227091217041016, 0.04250124740600586, 0.042242942810058595, 0.04258201599121094, 0.04284415817260742, 0.04230348968505859, 0.042221569061279295, 0.0420860481262207, 0.04222608184814453, 0.04212319946289062, 0.042095840454101564, 0.042085151672363284, 0.0421662712097168, 0.0429117431640625, 0.042872833251953124, 0.04261404800415039, 0.042756832122802735, 0.042412033081054686, 0.04243558502197266, 0.04242124938964844, 0.0423295669555664, 0.04244313430786133, 0.042270881652832035, 0.042246143341064454, 0.042240001678466796, 0.04218675231933594, 0.042160064697265624, 0.043259456634521486, 0.04271155166625976, 0.04244617462158203, 0.0423389778137207, 0.04224409484863281, 0.042443840026855466, 0.04221427154541016, 0.042168384552001954, 0.04224204635620117, 0.04266169738769531, 0.04215980911254883, 0.04208486557006836, 0.04232134246826172, 0.04225286483764648, 0.042237377166748045, 0.04255596923828125, 0.04265548706054687, 0.04237542343139648, 0.04263116836547851, 0.043035999298095706, 0.042635936737060544, 0.04253094482421875, 0.04229926300048828, 0.0428337287902832, 0.04247763061523437, 0.042254463195800784, 0.042387454986572266, 0.04242227172851563, 0.04231887817382812, 0.042343391418457034, 0.04280924987792969, 0.04283318328857422, 0.042584896087646484, 0.042212543487548826, 0.042361663818359374, 0.04214169692993164, 0.042372127532958985, 0.04231676864624023, 0.04263520050048828, 0.04242969512939453, 0.042147838592529296, 0.04207519912719727, 0.042229503631591794, 0.04213721466064453, 0.04217187118530273, 0.04216073608398437, 0.0429543342590332, 0.042482398986816404, 0.04228623962402344, 0.04215280151367187, 0.042679649353027344, 0.042240673065185544, 0.04235059356689453, 0.0422022705078125, 0.04229536056518555, 0.042259231567382816, 0.042158176422119144, 0.042608543395996096, 0.04232806396484375, 0.04213056182861328, 0.04245363235473633, 0.04238313674926758, 0.042425918579101565, 0.042324897766113284, 0.042190593719482423, 0.04251878356933594, 0.04262879943847656, 0.042493408203125, 0.04249686431884766, 0.04280448150634766, 0.042305374145507814, 0.04226326370239258, 0.04210435104370117, 0.04247824096679687, 0.042180641174316406, 0.04214371109008789, 0.04215008163452148, 0.04208620834350586, 0.042411998748779295, 0.04225846481323242, 0.04214169692993164, 0.04240582275390625, 0.042083614349365236, 0.04209743881225586, 0.04253411102294922, 0.04311734390258789, 0.04254339218139649, 0.042213375091552735, 0.04220908737182617, 0.04220332717895508, 0.04222915267944336, 0.04240873718261719, 0.042530624389648435, 0.042880382537841794, 0.04261529541015625, 0.04300198364257812, 0.04266393661499023, 0.04493100738525391, 0.04322723388671875, 0.042510078430175784, 0.0424409294128418, 0.043032768249511716, 0.042528865814208984, 0.042299102783203125, 0.042315582275390624, 0.042430656433105465, 0.04252284622192383, 0.042632991790771485, 0.042966751098632815, 0.04306086349487305, 0.045589153289794924, 0.04264755249023437, 0.04290332794189453, 0.04266985702514649, 0.04378374481201172, 0.04327718353271484, 0.04293123245239258, 0.04274256134033203, 0.04289759826660156, 0.042575614929199215, 0.042494209289550784, 0.0423768310546875, 0.042418560028076174, 0.04245219039916992, 0.0424681282043457, 0.042248191833496096, 0.04263705444335938, 0.042409408569335935, 0.04219372940063477, 0.04287209701538086, 0.04356560134887695, 0.04241632080078125, 0.04257177734375, 0.044025856018066405, 0.043210750579833986, 0.042137374877929686, 0.042479137420654296, 0.04234297561645508, 0.04259187316894531, 0.04255583953857422, 0.04236089706420899, 0.04227199935913086, 0.04276505661010742, 0.04223526382446289, 0.04221196746826172, 0.04211916732788086, 0.0422031364440918, 0.0425728645324707, 0.042241024017333983]",tokens/s,23.436084837690185,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 1047, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 890, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 366, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1900.93312,2770.20672,0.0,2367.684608,2328.95488,s,1,10.4542060546875,10.4542060546875,0.0,10.4542060546875,10.4542060546875,10.4542060546875,10.4542060546875,[10.4542060546875],,kWh,5.590069485000603e-05,6.159026360145332e-06,1.7947514358001432e-05,8.000723556815279e-05,,MB,2007.728128,2847.801344,0.0,2430.599168,2386.812928,s,10,0.7663075256347657,0.07663075256347655,0.0001503826511966106,0.07660469055175781,0.07680296783447266,0.07687882843017578,0.07693951690673828,"[0.07659235382080078, 0.07652227020263672, 0.07661702728271484, 0.07650579071044922, 0.07641667175292968, 0.07652531433105468, 0.07673347473144532, 0.0767861099243164, 0.0769546890258789, 0.07665382385253906]",tokens/s,3340.695366236214,kWh,2.2905331362630104e-06,2.5245069747244634e-07,1.5243718792187246e-06,4.0673557129541814e-06,tokens/kWh,62940155.24254783,MB,2009.116672,2877.161472,0.0,2459.959296,2398.931968,s,10,27.068677001953127,2.7068677001953128,0.009065913600233311,2.7085740966796874,2.7162970458984375,2.717575524902344,2.7185983081054688,"[2.706833984375, 2.69775244140625, 2.7073486328125, 2.68984716796875, 2.7159951171875, 2.71885400390625, 2.716012939453125, 2.695991455078125, 2.709799560546875, 2.71024169921875]",tokens/s,23.27413341828796,kWh,7.882388409957049e-05,8.694355700714422e-06,3.806554868158138e-05,0.00012558378848186627,tokens/kWh,501657.1068732881,,s,630,27.067008949279767,0.042963506268698076,0.0005615963649066973,0.04285103988647461,0.043364228057861325,0.04357035903930664,0.045099620475769056,"[0.0430571517944336, 0.042798847198486326, 0.047116321563720705, 0.043133377075195316, 0.04278803253173828, 0.04272745513916015, 0.04275068664550781, 0.04268835067749024, 0.043240833282470706, 0.043413665771484374, 0.04299612808227539, 0.043181793212890625, 0.043333984375, 0.0431566390991211, 0.04294940948486328, 0.04303673553466797, 0.04304671859741211, 0.04285852813720703, 0.0426104621887207, 0.043102592468261716, 0.04293427276611328, 0.04280473709106445, 0.042840576171875, 0.042847518920898435, 0.04305583953857422, 0.04312473678588867, 0.0430623664855957, 0.04319939041137695, 0.04276019287109375, 0.04264550399780274, 0.04257583999633789, 0.04294784164428711, 0.04261750411987305, 0.04263148880004883, 0.04234985733032227, 0.04259689712524414, 0.042831615447998045, 0.04295862579345703, 0.04259395217895508, 0.04258028793334961, 0.04311641693115234, 0.042977920532226564, 0.04267007827758789, 0.04288111877441406, 0.04279644775390625, 0.04278025436401367, 0.04264438247680664, 0.04254105758666992, 0.04289878463745117, 0.042635936737060544, 0.04269055938720703, 0.04276633453369141, 0.042835422515869144, 0.042634880065917966, 0.04263951873779297, 0.042834686279296874, 0.04281148910522461, 0.04282767868041992, 0.04306124877929687, 0.04521779251098633, 0.043025409698486325, 0.042823806762695316, 0.04262566375732422, 0.04308992004394531, 0.04259209442138672, 0.043120254516601564, 0.04289590454101563, 0.04265107345581055, 0.042579551696777344, 0.04254537582397461, 0.04279782485961914, 0.04261068725585938, 0.042842113494873046, 0.04262092971801758, 0.042850303649902347, 0.042759902954101564, 0.04257820892333984, 0.042675487518310545, 0.042646240234375, 0.042522335052490236, 0.042874622344970706, 0.04472646331787109, 0.04307001495361328, 0.04267385482788086, 0.04244070434570312, 0.04366140747070312, 0.042702560424804685, 0.04249219131469727, 0.04253900909423828, 0.04254473495483398, 0.04269295883178711, 0.04252691268920898, 0.04265891265869141, 0.04279171371459961, 0.042439807891845704, 0.04288924789428711, 0.04256240081787109, 0.04303257751464844, 0.04268646240234375, 0.04245094299316406, 0.042502143859863284, 0.04252595138549805, 0.04251315307617187, 0.04278214263916016, 0.04280758285522461, 0.04264511871337891, 0.042651615142822265, 0.042684192657470706, 0.042587039947509765, 0.042997760772705076, 0.04258816146850586, 0.04259430313110352, 0.04263955307006836, 0.042796958923339845, 0.04285865783691406, 0.04254899215698242, 0.042684288024902345, 0.042543231964111326, 0.043081470489501957, 0.04260480117797852, 0.0425799674987793, 0.04267929458618164, 0.042752193450927733, 0.04395500946044922, 0.0462295036315918, 0.04292940902709961, 0.043009761810302735, 0.042783424377441405, 0.042823680877685545, 0.04279318237304688, 0.04334499359130859, 0.04267897415161133, 0.04257926559448242, 0.042660415649414064, 0.042936481475830075, 0.04298339080810547, 0.044181503295898435, 0.044348960876464845, 0.04327990341186523, 0.04286764907836914, 0.043253761291503906, 0.04274380874633789, 0.04281865692138672, 0.04308623886108399, 0.04292182540893555, 0.04294492721557617, 0.042573280334472656, 0.04280393600463867, 0.04323702239990234, 0.042915233612060545, 0.04271820831298828, 0.04271104049682617, 0.042648670196533206, 0.042762561798095705, 0.04281139373779297, 0.04263967895507813, 0.04323971176147461, 0.042668033599853515, 0.043399166107177735, 0.04412179183959961, 0.043468639373779296, 0.04333820724487305, 0.04336598587036133, 0.04343996810913086, 0.043854175567626955, 0.043387294769287106, 0.04340233612060547, 0.04315846252441406, 0.043209568023681644, 0.043065887451171875, 0.04267663955688476, 0.04257382583618164, 0.042592254638671875, 0.04254105758666992, 0.04300595092773438, 0.04260800170898438, 0.042623264312744144, 0.04251171112060547, 0.04238643264770508, 0.04244275283813476, 0.0427204475402832, 0.0427545280456543, 0.04293600082397461, 0.042621601104736326, 0.04255683135986328, 0.04305097579956055, 0.04315814590454101, 0.0428864631652832, 0.04255609512329102, 0.042584415435791015, 0.04283785629272461, 0.04272675323486328, 0.042959041595458984, 0.04302912139892578, 0.042710399627685545, 0.042601089477539066, 0.042782718658447266, 0.04302140808105469, 0.043024318695068356, 0.042904544830322265, 0.04270431900024414, 0.042775104522705075, 0.04248960113525391, 0.04290496063232422, 0.04278524780273438, 0.04249203109741211, 0.04274396896362305, 0.04264502334594727, 0.04272598266601563, 0.0425533447265625, 0.04261628723144531, 0.042676544189453124, 0.04257519912719727, 0.042625408172607425, 0.04269107055664063, 0.04269670486450195, 0.042947872161865235, 0.04327289581298828, 0.04280070495605469, 0.04263564682006836, 0.042608734130859374, 0.042676223754882815, 0.04293983840942383, 0.04254163360595703, 0.042510337829589843, 0.042420223236083986, 0.04248524856567383, 0.042723838806152346, 0.04257382583618164, 0.042641311645507815, 0.04277167892456055, 0.04258291244506836, 0.04243251037597656, 0.04247347259521484, 0.04249132919311523, 0.04248598480224609, 0.04283631896972656, 0.042571617126464845, 0.04257183837890625, 0.04250576019287109, 0.04282220840454101, 0.04292179107666016, 0.04273535919189453, 0.04261299133300781, 0.04249350357055664, 0.04249663925170898, 0.042600448608398435, 0.04270668792724609, 0.04280118560791016, 0.042522113800048826, 0.04281974411010742, 0.0427710075378418, 0.04351359939575195, 0.04282207870483398, 0.04319027328491211, 0.043055103302001956, 0.043152542114257814, 0.04323168182373047, 0.043159488677978516, 0.04313955307006836, 0.04321027374267578, 0.0434183349609375, 0.04306467056274414, 0.043008415222167966, 0.0429854736328125, 0.04296908950805664, 0.043318302154541015, 0.04317078399658203, 0.04305100631713867, 0.042890625, 0.042842750549316404, 0.04273356628417969, 0.04293222427368164, 0.04295193481445313, 0.04319308853149414, 0.043015201568603514, 0.043094879150390626, 0.04302656173706055, 0.04293203353881836, 0.04285254287719727, 0.04296089553833008, 0.04318822479248047, 0.04292403030395508, 0.04308992004394531, 0.043415550231933595, 0.04327622222900391, 0.044810302734375, 0.0428084487915039, 0.04289215850830078, 0.04274176025390625, 0.042829822540283204, 0.04267827224731445, 0.042776321411132814, 0.042907966613769534, 0.043052993774414065, 0.043050209045410154, 0.042801822662353516, 0.04258214569091797, 0.04275356674194336, 0.04307574462890625, 0.042932193756103514, 0.043023872375488284, 0.04284425735473633, 0.04277923202514648, 0.04294672012329102, 0.04296908950805664, 0.04333353424072266, 0.04301785659790039, 0.04333795166015625, 0.04474496078491211, 0.04321392059326172, 0.043366527557373045, 0.04321273422241211, 0.044190113067626956, 0.04334595108032226, 0.04348012924194336, 0.04299977493286133, 0.04300284957885742, 0.043079681396484375, 0.043237377166748046, 0.04348681640625, 0.043206401824951175, 0.043135265350341796, 0.04301046371459961, 0.04291376113891601, 0.04284560012817383, 0.042812255859375, 0.043259937286376955, 0.0428765754699707, 0.04323881530761719, 0.043075614929199216, 0.04295932769775391, 0.042891265869140625, 0.042805408477783205, 0.04303257751464844, 0.04268412780761719, 0.04280348968505859, 0.042749088287353514, 0.04271615982055664, 0.04280249786376953, 0.042621471405029296, 0.04352614212036133, 0.044355583190917966, 0.04398284912109375, 0.04317520141601563, 0.04289603042602539, 0.04288211059570313, 0.04276332855224609, 0.0429988784790039, 0.04460425567626953, 0.043402305603027345, 0.043168704986572264, 0.04287385559082031, 0.043254783630371094, 0.04273766326904297, 0.04309811019897461, 0.04333977508544922, 0.04318310546875, 0.043230369567871095, 0.04305289459228516, 0.04311449432373047, 0.04276838302612305, 0.04317593765258789, 0.04382089614868164, 0.045514911651611326, 0.04333513641357422, 0.04333622360229492, 0.043014144897460936, 0.04342153549194336, 0.04300815963745117, 0.043007488250732424, 0.042866783142089845, 0.04289168167114258, 0.04275190353393555, 0.043151615142822265, 0.04296646499633789, 0.04339731216430664, 0.04288560104370117, 0.043507713317871094, 0.0433070068359375, 0.042987518310546875, 0.04304102325439453, 0.04290063858032227, 0.04346441650390625, 0.043006847381591794, 0.04309196853637695, 0.04313087844848633, 0.04287692642211914, 0.04289510345458984, 0.04280780792236328, 0.042962017059326174, 0.04292879867553711, 0.0429455680847168, 0.0428771858215332, 0.04304313659667969, 0.04372102355957031, 0.043063392639160154, 0.04337254333496094, 0.04297727966308594, 0.0429117431640625, 0.04349679946899414, 0.043038848876953126, 0.04328297424316406, 0.04289945602416992, 0.04289523315429687, 0.04281766510009766, 0.042778656005859374, 0.04296860885620117, 0.04276684951782227, 0.04274169540405273, 0.042660896301269534, 0.04258710479736328, 0.04274176025390625, 0.04261641693115235, 0.04259471893310547, 0.04256358337402344, 0.042790912628173826, 0.042845409393310545, 0.04420915222167969, 0.043254592895507815, 0.04411673736572266, 0.04357747268676758, 0.04333513641357422, 0.043317855834960936, 0.04303260803222656, 0.04370022583007813, 0.043415550231933595, 0.04334169769287109, 0.04351193618774414, 0.04335164642333984, 0.0436550064086914, 0.04347552108764648, 0.0433004150390625, 0.042928031921386715, 0.043074081420898434, 0.043671550750732424, 0.04290764617919922, 0.042771457672119144, 0.0427606086730957, 0.04308438491821289, 0.04312678527832031, 0.04352457427978516, 0.043747329711914064, 0.04293427276611328, 0.04277657699584961, 0.0426987190246582, 0.04307523345947266, 0.04301820755004883, 0.04289567947387695, 0.042612224578857424, 0.042508544921875, 0.042374622344970705, 0.04272796630859375, 0.04268272018432617, 0.04344627380371094, 0.042436607360839845, 0.04281875228881836, 0.042378047943115234, 0.042438495635986326, 0.04258176040649414, 0.04252918243408203, 0.04280319976806641, 0.04255078506469727, 0.04269696044921875, 0.04265804672241211, 0.04263459014892578, 0.04273628616333008, 0.04277657699584961, 0.04301379013061524, 0.04257622528076172, 0.04307465744018555, 0.04290447998046875, 0.04277376174926758, 0.04259097671508789, 0.04263494491577149, 0.04283212661743164, 0.04279897689819336, 0.04266617584228516, 0.042781761169433594, 0.04290860748291016, 0.04323497772216797, 0.042944862365722654, 0.04297689437866211, 0.04300543975830078, 0.04310720062255859, 0.04280319976806641, 0.04261478424072265, 0.04263724899291992, 0.04259833526611328, 0.04337676620483399, 0.04280319976806641, 0.04279081726074219, 0.04283536148071289, 0.042988224029541014, 0.042616992950439456, 0.042428062438964846, 0.042712543487548826, 0.042574558258056644, 0.04268646240234375, 0.042499870300292966, 0.04251670455932617, 0.04283596801757812, 0.04289344024658203, 0.04272659301757813, 0.04331315231323242, 0.04297318267822266, 0.04301824188232422, 0.04292403030395508, 0.042659839630126956, 0.04336025619506836, 0.043055103302001956, 0.04271308898925781, 0.047193695068359375, 0.043364032745361325, 0.04324016189575195, 0.042669345855712894, 0.04257987213134766, 0.042672958374023434, 0.043104255676269534, 0.04330223846435547, 0.04316451263427734, 0.04305276870727539, 0.043055137634277346, 0.04307769775390625, 0.042889217376708984, 0.042883071899414066, 0.04301619338989258, 0.042883071899414066, 0.04257791900634766, 0.042786846160888674, 0.042466655731201175, 0.04265987014770508, 0.04259392166137695, 0.04269327926635742, 0.04672134399414062, 0.0431976318359375, 0.04341980743408203, 0.042770942687988284, 0.04313103866577148, 0.04278681564331055, 0.04281148910522461, 0.04267113494873047, 0.04293926239013672, 0.042853694915771484, 0.042611392974853515, 0.04273766326904297, 0.04304838562011719, 0.04272185516357422, 0.0427204475402832, 0.042685249328613284, 0.04316534423828125, 0.04259244918823242, 0.042614944458007814, 0.042881023406982424, 0.04263731384277344, 0.04273766326904297, 0.042635265350341796, 0.04327222442626953, 0.04302435302734375, 0.042624927520751955, 0.043106399536132815, 0.04287667083740234, 0.042754302978515624, 0.04263529586791992, 0.04253692626953125, 0.04261273574829102, 0.04314918518066406, 0.04297574234008789, 0.04245708847045898, 0.043423744201660154, 0.04257177734375, 0.04236083221435547, 0.04230464172363281, 0.04230438232421875, 0.049539070129394534, 0.044281856536865234, 0.04298956680297852, 0.04263644790649414, 0.042697566986083985, 0.0429035530090332, 0.042385406494140625, 0.042987518310546875, 0.04278659057617187, 0.04253494262695313, 0.04240198516845703, 0.04328857421875, 0.04268851089477539, 0.04275775909423828, 0.04412195205688477, 0.04473705673217773, 0.04351996612548828, 0.04285177612304687, 0.042831615447998045, 0.04250067138671875, 0.04327977752685547, 0.04264230346679688, 0.042485759735107424, 0.04256524658203125, 0.04308819198608398, 0.04274950408935547, 0.04299622344970703, 0.04264044952392578, 0.042952991485595705, 0.042871646881103516, 0.04308768081665039, 0.04271513748168945, 0.04262857437133789, 0.04329308700561523, 0.04286787033081055, 0.04283855819702148, 0.04262137603759766, 0.042718624114990236, 0.042871391296386716, 0.04291107177734375, 0.04268860626220703, 0.04278534317016602, 0.043216670989990234, 0.04283788681030273, 0.04283193588256836, 0.04279939270019531, 0.04273667144775391, 0.042922847747802736, 0.04333484649658203, 0.04281235122680664, 0.0430030403137207, 0.04356166458129883, 0.04280499267578125, 0.04321664047241211, 0.0428878402709961, 0.04298137664794922]",tokens/s,23.275567728245914,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1895.424,2770.20672,0.0,2367.684608,2328.95488,s,1,10.69037109375,10.69037109375,0.0,10.69037109375,10.69037109375,10.69037109375,10.69037109375,[10.69037109375],,kWh,5.610426208333383e-05,6.181408435999716e-06,1.7931681011999073e-05,8.021735153133263e-05,,MB,2012.151808,2847.801344,0.0,2430.599168,2386.812928,s,10,0.763185691833496,0.07631856918334962,0.00017412190636533826,0.07629755020141601,0.07647814331054688,0.07658756713867187,0.07667510620117188,"[0.07669699096679687, 0.07634966278076172, 0.07603718566894531, 0.07624543762207031, 0.07637200164794922, 0.0762110366821289, 0.07645382690429688, 0.07620345306396484, 0.0761890869140625, 0.07642700958251954]",tokens/s,3354.360580122765,kWh,2.277683190213189e-06,2.5118983056306275e-07,1.5115042238449956e-06,4.040377244621247e-06,tokens/kWh,63360420.203534216,MB,2016.710656,2877.161472,0.0,2459.959296,2398.931968,s,10,29.75636254882813,2.975636254882813,0.005760456546906441,2.974993408203125,2.9826527587890626,2.9833654418945312,2.9839355883789063,"[2.9672412109375, 2.984078125, 2.982494384765625, 2.974836181640625, 2.971183837890625, 2.978723388671875, 2.971577392578125, 2.9685986328125, 2.975150634765625, 2.982478759765625]",tokens/s,21.171942604416575,kWh,8.666195005145351e-05,9.558756871039272e-06,4.065075172815463e-05,0.0001368714586506474,tokens/kWh,460285.8815204276,,s,630,29.754756286621085,0.04722977188352555,0.00057492816144639,0.04712243270874024,0.04762779541015625,0.0479372932434082,0.04929599617004395,"[0.04719980621337891, 0.047352256774902346, 0.04720851135253906, 0.04758726501464844, 0.047709598541259765, 0.0473155517578125, 0.047159072875976565, 0.04670399856567383, 0.046653438568115234, 0.04681539154052734, 0.04715999984741211, 0.04681878280639649, 0.04677711868286133, 0.04671622467041016, 0.046827392578125, 0.04676182556152344, 0.04719484710693359, 0.04716953659057617, 0.04717488098144531, 0.047617950439453126, 0.04706825637817383, 0.04700691223144531, 0.046887039184570316, 0.04665935897827148, 0.04709852981567383, 0.047339553833007815, 0.04718592071533203, 0.047152606964111325, 0.04799686431884766, 0.04728278350830078, 0.046944255828857424, 0.04725964736938477, 0.04732928085327148, 0.047009792327880856, 0.047462398529052735, 0.04685548782348633, 0.04721939086914063, 0.047218303680419925, 0.046710208892822264, 0.04715980911254883, 0.04715974426269531, 0.047067134857177735, 0.0468969612121582, 0.047132129669189456, 0.046959327697753905, 0.04683366394042969, 0.04681932830810547, 0.04681727981567383, 0.04686771011352539, 0.047237888336181644, 0.046772064208984374, 0.04676009750366211, 0.04670022583007812, 0.04702032089233398, 0.046704544067382815, 0.04678799819946289, 0.046975711822509765, 0.046884864807128904, 0.04684185409545898, 0.046855712890625, 0.04689148712158203, 0.049479679107666014, 0.047828704833984374, 0.04762771224975586, 0.047162368774414064, 0.04686950302124023, 0.047599998474121094, 0.04716812896728516, 0.046884864807128904, 0.047072830200195315, 0.04717158508300781, 0.04703871917724609, 0.047218879699707034, 0.04741072082519531, 0.047015968322753905, 0.046874176025390624, 0.04687936019897461, 0.04732035064697265, 0.047206912994384766, 0.04731132888793945, 0.0473513298034668, 0.05114310455322266, 0.04760374450683594, 0.047241214752197266, 0.048352832794189456, 0.04722118377685547, 0.047093761444091796, 0.04695027160644531, 0.04700128173828125, 0.04685254287719726, 0.047108097076416014, 0.04714495849609375, 0.0470302734375, 0.04696883010864258, 0.047642623901367184, 0.04715311813354492, 0.04699728012084961, 0.04775551986694336, 0.04794144058227539, 0.047894718170166016, 0.047499198913574216, 0.04746041488647461, 0.04728601455688477, 0.05089510345458984, 0.04767334365844727, 0.04761804962158203, 0.047091712951660154, 0.047008094787597654, 0.046927295684814456, 0.04762617492675781, 0.047187744140625, 0.047018497467041016, 0.04771206283569336, 0.04707551956176758, 0.04685369491577149, 0.0474587516784668, 0.047351806640625, 0.0467322883605957, 0.04688816070556641, 0.047285888671875, 0.0469587516784668, 0.04720025634765625, 0.04718956756591797, 0.04711468887329102, 0.04726988983154297, 0.047255104064941406, 0.04724009704589844, 0.04725491333007813, 0.04713862228393555, 0.0472215690612793, 0.047777793884277345, 0.048261119842529294, 0.04700143814086914, 0.04676784133911133, 0.047067169189453126, 0.047290592193603515, 0.046782657623291014, 0.0467597770690918, 0.046923934936523436, 0.04719820785522461, 0.04715929412841797, 0.04689920043945312, 0.04693196868896484, 0.046986976623535154, 0.0467883186340332, 0.0468158073425293, 0.04709971237182617, 0.046739646911621094, 0.04705279922485352, 0.04686383819580078, 0.04693161773681641, 0.047113086700439455, 0.04709785461425781, 0.0469073600769043, 0.04683164978027344, 0.047308544158935546, 0.04734182357788086, 0.04718182373046875, 0.04708534240722656, 0.0471325454711914, 0.04753238296508789, 0.047070335388183594, 0.047262592315673826, 0.04717158508300781, 0.04738457489013672, 0.04742367935180664, 0.04713452911376953, 0.04689241409301758, 0.047026817321777346, 0.05535728073120117, 0.0477344970703125, 0.046956993103027346, 0.04690534210205078, 0.04741120147705078, 0.04725964736938477, 0.04716304016113281, 0.047083873748779294, 0.047349536895751956, 0.04733340835571289, 0.04725167846679688, 0.04727190399169922, 0.04827545547485351, 0.04768972778320312, 0.049070079803466796, 0.04784288024902344, 0.047277889251708984, 0.047274112701416016, 0.04696275329589844, 0.047012287139892577, 0.04762854385375977, 0.04759116744995117, 0.04736524963378906, 0.047712512969970707, 0.04714083099365234, 0.046932640075683596, 0.04700735855102539, 0.04703680038452149, 0.04736819076538086, 0.04699094390869141, 0.046659999847412106, 0.04709312057495117, 0.04700223922729492, 0.04688022232055664, 0.04677667236328125, 0.046954689025878904, 0.04719411087036133, 0.04698457717895508, 0.04688159942626953, 0.0469870719909668, 0.046720287322998044, 0.046908126831054685, 0.046970176696777347, 0.04686918258666992, 0.0469749755859375, 0.047347713470458984, 0.047432735443115236, 0.04742675018310547, 0.048312095642089846, 0.04754841613769531, 0.04740857696533203, 0.04723110580444336, 0.04701968002319336, 0.04712015914916992, 0.04699852752685547, 0.04700902557373047, 0.04680575942993164, 0.04706099319458008, 0.0471162223815918, 0.046870590209960934, 0.046862335205078126, 0.04696198272705078, 0.047258304595947265, 0.04708761596679688, 0.047004928588867186, 0.04714358520507812, 0.047812255859375, 0.047884735107421875, 0.04760710525512695, 0.04722963333129883, 0.047331329345703124, 0.047644672393798826, 0.04739276885986328, 0.04713872146606445, 0.04742768096923828, 0.04691558456420898, 0.0476868782043457, 0.047312801361083984, 0.04770006561279297, 0.047569023132324216, 0.04726646423339844, 0.04741494369506836, 0.047679550170898435, 0.04742643356323242, 0.04749334335327148, 0.04729219055175781, 0.04739276885986328, 0.04695574569702148, 0.04692457580566406, 0.04772774505615234, 0.0469145278930664, 0.04720137786865235, 0.04695123291015625, 0.047048702239990234, 0.047058944702148435, 0.04712451171875, 0.0471387825012207, 0.04708966445922851, 0.04710604858398437, 0.046911392211914066, 0.047167583465576174, 0.04696268844604492, 0.04708323287963867, 0.04715311813354492, 0.047122016906738284, 0.046892959594726565, 0.046954334259033205, 0.04700656127929687, 0.0470959358215332, 0.04726988983154297, 0.047519744873046874, 0.04681523132324219, 0.047007038116455076, 0.04747062301635742, 0.047239425659179685, 0.04738908767700195, 0.0470118408203125, 0.04692377471923828, 0.04719001770019531, 0.04685004806518555, 0.046927871704101565, 0.04678451156616211, 0.04669007873535156, 0.04716566467285156, 0.04696268844604492, 0.04686463928222656, 0.04698700714111328, 0.04731084823608398, 0.047086849212646484, 0.047376609802246096, 0.04757049560546875, 0.047657470703125, 0.04706553649902344, 0.046878753662109376, 0.047001598358154296, 0.046712158203125, 0.04723779296875, 0.04717158508300781, 0.046936065673828124, 0.046976577758789065, 0.04706758499145508, 0.049012737274169924, 0.047472640991210936, 0.04744502258300781, 0.04746543884277344, 0.04731417465209961, 0.04728409576416016, 0.04707465744018555, 0.046922527313232425, 0.04749311828613281, 0.04684771347045898, 0.047058368682861326, 0.046747871398925785, 0.04702272033691406, 0.047564193725585936, 0.047486881256103515, 0.047145503997802735, 0.04745564651489258, 0.04729056167602539, 0.047212257385253906, 0.0475076789855957, 0.04725721740722656, 0.04741836929321289, 0.04823654556274414, 0.04748287963867188, 0.04731625747680664, 0.047016670227050784, 0.04713190460205078, 0.04805299377441406, 0.04841471862792969, 0.04720790481567383, 0.047370784759521486, 0.047503360748291014, 0.047333377838134766, 0.047034366607666016, 0.04698931121826172, 0.04701385498046875, 0.0468296012878418, 0.0470750732421875, 0.047031742095947265, 0.047104190826416016, 0.047218879699707034, 0.04690524673461914, 0.046936031341552734, 0.04683980941772461, 0.04697760009765625, 0.047889984130859375, 0.04700934219360352, 0.047096832275390625, 0.04716134262084961, 0.047210369110107425, 0.04715520095825195, 0.047271934509277344, 0.04730265426635742, 0.04717363357543945, 0.04714495849609375, 0.04679679870605469, 0.046870529174804686, 0.047484352111816404, 0.04695833587646484, 0.04710892868041992, 0.04694015884399414, 0.04692697525024414, 0.049355648040771485, 0.04723212814331055, 0.04737459182739258, 0.04805286407470703, 0.048016990661621094, 0.04721500778198242, 0.0473076171875, 0.04719411087036133, 0.047093761444091796, 0.04717158508300781, 0.047324447631835936, 0.04704905700683594, 0.04719244766235352, 0.047090782165527346, 0.04714384078979492, 0.04830822372436523, 0.04885094451904297, 0.04804595184326172, 0.04738880157470703, 0.047241214752197266, 0.04720844650268555, 0.04715734481811523, 0.04796099090576172, 0.04712720108032226, 0.04717164611816406, 0.04732262420654297, 0.04698556900024414, 0.04686614227294922, 0.04706195068359375, 0.046991134643554686, 0.04705644989013672, 0.04683411026000977, 0.04695564651489258, 0.04691238403320312, 0.04681932830810547, 0.04721664047241211, 0.04681872177124023, 0.04676668930053711, 0.04700332641601562, 0.046927616119384764, 0.04740153503417969, 0.047304126739501955, 0.04747731018066406, 0.04711609649658203, 0.04695811080932617, 0.046908065795898436, 0.04814438247680664, 0.04711990356445313, 0.046772705078125, 0.04671855926513672, 0.046800865173339846, 0.04701433563232422, 0.0470384635925293, 0.0469134407043457, 0.04673273468017578, 0.046707359313964844, 0.04722265625, 0.04689478302001953, 0.04698918533325196, 0.046983329772949216, 0.04712284851074219, 0.04726528167724609, 0.047433406829833984, 0.047131454467773434, 0.04715724945068359, 0.04710512161254883, 0.047043743133544924, 0.04719555282592774, 0.04718179321289063, 0.04761190414428711, 0.04724732971191406, 0.04704668807983398, 0.04794367980957031, 0.04719206237792969, 0.046772224426269535, 0.04674745559692383, 0.0471003532409668, 0.04716454315185547, 0.0469736328125, 0.046692287445068356, 0.04678860855102539, 0.04761126327514648, 0.04706118392944336, 0.04693673706054687, 0.046856990814208986, 0.04685420989990234, 0.0468653450012207, 0.04690095901489258, 0.046981536865234375, 0.04723904037475586, 0.046902687072753906, 0.04699587249755859, 0.047276222229003906, 0.04741056060791016, 0.046943904876708985, 0.047008735656738285, 0.04717772674560547, 0.047290367126464845, 0.04695379257202149, 0.047038143157958984, 0.04687478256225586, 0.047403873443603514, 0.04770115280151367, 0.047430496215820316, 0.04738662338256836, 0.04736163330078125, 0.047298080444335935, 0.0472334098815918, 0.04690790557861328, 0.047075328826904295, 0.04689254379272461, 0.04690396881103515, 0.04701577758789063, 0.0470118408203125, 0.04718131256103516, 0.04736000061035156, 0.04735846328735351, 0.0472320327758789, 0.0470140151977539, 0.047079582214355466, 0.047045310974121096, 0.04680233764648437, 0.04756492614746094, 0.04695702362060547, 0.046984992980957034, 0.046944255828857424, 0.04728649520874023, 0.0474071044921875, 0.047417503356933594, 0.04682940673828125, 0.04698134231567383, 0.046908641815185545, 0.047607391357421876, 0.047435680389404294, 0.04743385696411133, 0.0469202880859375, 0.04693609619140625, 0.04704022216796875, 0.04690563201904297, 0.04675174331665039, 0.04694630432128906, 0.046854400634765626, 0.04715187072753906, 0.04692070388793945, 0.04866463851928711, 0.04724729537963867, 0.047364097595214844, 0.04769356918334961, 0.04780467224121094, 0.04738457489013672, 0.047230976104736325, 0.04701513671875, 0.04710070419311523, 0.04730879974365235, 0.046927040100097656, 0.0470208625793457, 0.046921215057373046, 0.0467720947265625, 0.04677046585083008, 0.04720880126953125, 0.04681872177124023, 0.0470206413269043, 0.0466835823059082, 0.04682310485839844, 0.047182174682617185, 0.04711996841430664, 0.04679776000976563, 0.04685823822021484, 0.04740300750732422, 0.047085567474365236, 0.047340576171875, 0.04718451309204102, 0.04752182388305664, 0.04906396865844727, 0.047347328186035154, 0.04748764801025391, 0.047476638793945314, 0.047214111328125, 0.0470401611328125, 0.047541152954101565, 0.047767551422119144, 0.04730470275878906, 0.047405055999755856, 0.04703337478637695, 0.04693251037597656, 0.047043006896972654, 0.047560256958007814, 0.0470175666809082, 0.046984031677246095, 0.04696473693847656, 0.04723283386230469, 0.04802560043334961, 0.0470714225769043, 0.047108097076416014, 0.04719001770019531, 0.04748700714111328, 0.04804662322998047, 0.04688649749755859, 0.046946720123291014, 0.047121761322021484, 0.047096481323242186, 0.04690265655517578, 0.04689385604858398, 0.046894943237304684, 0.047156639099121093, 0.046776927947998044, 0.04688816070556641, 0.046813983917236325, 0.04667136001586914, 0.05112473678588867, 0.04914995193481445, 0.04736739349365234, 0.04718467330932617, 0.04700716781616211, 0.04705062484741211, 0.04710879898071289, 0.047040382385253904, 0.047790206909179685, 0.047179550170898435, 0.04741286468505859, 0.047024734497070314, 0.04676179122924805, 0.04798073577880859, 0.05032467269897461, 0.04793222427368164, 0.047718399047851565, 0.0473023681640625, 0.047436065673828125, 0.04736332702636719, 0.047096576690673825, 0.047515583038330075, 0.04721465682983399, 0.04750950241088867, 0.047177120208740236, 0.047032001495361325, 0.04736854553222656, 0.04734374237060547, 0.047050846099853515, 0.04705916976928711, 0.047057022094726564, 0.047083518981933595, 0.04688076782226563, 0.04707727813720703, 0.04687401580810547, 0.04777356719970703, 0.04686726379394531, 0.04703955078125, 0.046932830810546874, 0.047011936187744144, 0.04767548751831055, 0.047245216369628903, 0.04749523162841797, 0.046885822296142576, 0.04715184020996094, 0.047624481201171874, 0.04718182373046875, 0.047065086364746093, 0.04719001770019531]",tokens/s,21.173085537362397,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 1047, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 890, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 366, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3854.815232,4533.911552,0.0,4131.38944,4012.475392,s,1,10.4359755859375,10.4359755859375,0.0,10.4359755859375,10.4359755859375,10.4359755859375,10.4359755859375,[10.4359755859375],,kWh,9.456019279581747e-05,1.0418479603681079e-05,3.0113079646010288e-05,0.00013509175204550882,,MB,2172.612608,4733.140992,0.0,4315.938816,4233.462784,s,10,1.79479150390625,0.179479150390625,0.00012085025752500717,0.17947809600830078,0.1796345962524414,0.17968389053344727,0.17972332595825197,"[0.17951036071777343, 0.17949586486816407, 0.1793469696044922, 0.17932780456542968, 0.17943836975097657, 0.1794603271484375, 0.17973318481445313, 0.17935116577148438, 0.17950381469726562, 0.17962364196777345]",tokens/s,1426.3495199460897,kWh,5.309990883485185e-06,5.855978730670862e-07,3.5183715015636847e-06,9.413960258115956e-06,tokens/kWh,27193656.333878983,MB,2182.836224,4837.998592,0.0,4420.796416,4335.693312,s,10,28.3771533203125,2.8377153320312503,0.015283578343084556,2.8350107421875,2.8584761474609377,2.859589392089844,2.860479987792969,"[2.816822265625, 2.82559912109375, 2.827145751953125, 2.82000390625, 2.828824951171875, 2.846341552734375, 2.852287841796875, 2.86070263671875, 2.841196533203125, 2.858228759765625]",tokens/s,22.20095838679643,kWh,8.276242480068366e-05,9.128718705613379e-06,4.7673502785236645e-05,0.0001395646462915337,tokens/kWh,451403.71629933134,,s,630,28.37420579528808,0.04503842189728268,0.0006235716879006908,0.04490500831604004,0.04555915260314941,0.04598231906890869,0.04810931335449219,"[0.04497708892822266, 0.04481363296508789, 0.044593921661376955, 0.04465862274169922, 0.0445621452331543, 0.0452303695678711, 0.044542049407958986, 0.04468112182617188, 0.04456857681274414, 0.04515542221069336, 0.044802974700927735, 0.044728321075439455, 0.04453376007080078, 0.04452880096435547, 0.0444567985534668, 0.0445296630859375, 0.044539905548095705, 0.04454969787597656, 0.04512979125976563, 0.04503385543823242, 0.044580833435058594, 0.04438224029541016, 0.04447225570678711, 0.044351551055908205, 0.04451532745361328, 0.044627967834472655, 0.044404289245605466, 0.04470191955566406, 0.04522975921630859, 0.0444699821472168, 0.04473436737060547, 0.04480092620849609, 0.044727935791015624, 0.04463004684448242, 0.04467043304443359, 0.04492566299438477, 0.04451548767089844, 0.04474265670776367, 0.0445148811340332, 0.044426944732666014, 0.044929790496826175, 0.04509868621826172, 0.045019199371337894, 0.0447421760559082, 0.04443392181396484, 0.04480400085449219, 0.04478780746459961, 0.04480022430419922, 0.04463411331176758, 0.04477951812744141, 0.045367294311523435, 0.044677120208740234, 0.044865535736083983, 0.045072383880615234, 0.04507235336303711, 0.04488399887084961, 0.04477724838256836, 0.044548320770263675, 0.04435968017578125, 0.044316287994384765, 0.044386688232421874, 0.044557823181152346, 0.04461209487915039, 0.046035457611083984, 0.04565932846069336, 0.04680992126464844, 0.04461891174316406, 0.04442607879638672, 0.04520345687866211, 0.044650142669677734, 0.04655260848999023, 0.045214527130126955, 0.04538284683227539, 0.04501001739501953, 0.04501679992675781, 0.04525260925292969, 0.045465057373046874, 0.044853759765625, 0.044576801300048825, 0.04452761459350586, 0.045059070587158204, 0.044587230682373045, 0.0449769287109375, 0.04470163345336914, 0.04449811172485352, 0.04460985565185547, 0.04455072021484375, 0.04459929656982422, 0.04450825500488281, 0.044399520874023435, 0.0444846076965332, 0.04521136093139649, 0.04439478302001953, 0.04451532745361328, 0.044636032104492185, 0.04469772720336914, 0.04466175842285156, 0.04465055847167969, 0.0450302734375, 0.04460259246826172, 0.045153118133544924, 0.0452751350402832, 0.044482494354248045, 0.045088191986083985, 0.044749439239501955, 0.045649921417236325, 0.04458607864379883, 0.044649375915527346, 0.04477951812744141, 0.04471603012084961, 0.04451532745361328, 0.04460748672485351, 0.044483840942382814, 0.04514214324951172, 0.044498783111572265, 0.044641056060791016, 0.04456982421875, 0.04467792129516602, 0.0444846076965332, 0.04443337631225586, 0.04445187377929687, 0.04483686447143555, 0.04475904083251953, 0.04449689483642578, 0.0444846076965332, 0.04449280166625977, 0.04484348678588867, 0.044832767486572264, 0.044650081634521485, 0.044599712371826174, 0.0452191047668457, 0.04574886322021485, 0.04502864074707031, 0.04496876907348633, 0.044891647338867184, 0.04478579330444336, 0.04444812774658203, 0.044660736083984375, 0.04446831893920898, 0.044510528564453124, 0.04843376159667969, 0.04485283279418945, 0.044730785369873044, 0.04474879837036133, 0.04461929702758789, 0.04465507125854492, 0.04492675018310547, 0.044626079559326175, 0.04455574417114258, 0.0445384635925293, 0.04491468811035156, 0.044693023681640624, 0.04451561737060547, 0.04463167953491211, 0.04508134460449219, 0.04484076690673828, 0.04526489639282227, 0.04542259216308594, 0.04501446533203125, 0.045042240142822265, 0.04467302322387695, 0.044662784576416016, 0.04713631820678711, 0.04685049438476562, 0.04500275039672851, 0.04456857681274414, 0.04474451065063476, 0.044847297668457034, 0.044797279357910155, 0.044730945587158205, 0.04449689483642578, 0.04442873764038086, 0.044524192810058594, 0.044439552307128906, 0.04446822357177734, 0.044365825653076174, 0.045096736907958984, 0.044505313873291014, 0.044472320556640625, 0.04434915161132812, 0.0446929931640625, 0.04449359893798828, 0.04453180694580078, 0.044554145812988284, 0.04487686538696289, 0.04462278366088867, 0.04472371292114258, 0.04468377685546875, 0.044772575378417966, 0.04473712158203125, 0.04471635055541992, 0.04475699234008789, 0.0445002555847168, 0.04497891235351562, 0.04460134506225586, 0.04501504135131836, 0.044609535217285154, 0.0445807991027832, 0.044733665466308595, 0.04444623947143555, 0.044415294647216795, 0.04454393768310547, 0.04862508773803711, 0.045136032104492185, 0.04461820983886719, 0.044883968353271485, 0.04469091033935547, 0.044509727478027346, 0.04508224105834961, 0.0445382080078125, 0.04470991897583008, 0.04456243133544922, 0.04478566360473633, 0.044967937469482425, 0.04472598266601562, 0.04444188690185547, 0.0444846076965332, 0.04526694488525391, 0.044488479614257816, 0.04468304061889648, 0.04460793685913086, 0.044660736083984375, 0.044488704681396485, 0.04461977767944336, 0.04437740707397461, 0.04447711944580078, 0.044437503814697264, 0.04446822357177734, 0.044733631134033204, 0.04477420806884766, 0.044407936096191404, 0.04452851104736328, 0.04465459060668946, 0.04449875259399414, 0.04452985763549805, 0.044646400451660156, 0.045055999755859374, 0.04487945556640625, 0.04465910339355469, 0.04523622512817383, 0.044773216247558596, 0.04463017654418945, 0.044490623474121097, 0.04484688186645508, 0.04456687927246094, 0.04490387344360352, 0.045115966796875, 0.04502262496948242, 0.04481289672851563, 0.04452252960205078, 0.04462076950073242, 0.0448256950378418, 0.04489059066772461, 0.044639583587646484, 0.044785888671875, 0.044746814727783205, 0.0446324462890625, 0.044676513671875, 0.04523273468017578, 0.04465663909912109, 0.0445412483215332, 0.04465913772583008, 0.04433686447143555, 0.04497795104980469, 0.04459596633911133, 0.04458870315551758, 0.0442778549194336, 0.044456192016601566, 0.044358783721923825, 0.044485504150390626, 0.04452143859863281, 0.04621478271484375, 0.04448614501953125, 0.044585182189941404, 0.04457952117919922, 0.04447641754150391, 0.04711740875244141, 0.045980575561523435, 0.044627967834472655, 0.044693504333496094, 0.04475904083251953, 0.04513792037963867, 0.04520140838623047, 0.045037025451660155, 0.04512793731689453, 0.04464668655395508, 0.04474879837036133, 0.04472012710571289, 0.04456243133544922, 0.044758464813232424, 0.04479971313476563, 0.04444655990600586, 0.04479715347290039, 0.04491548919677734, 0.04754431915283203, 0.044727935791015624, 0.04470345687866211, 0.04453590393066406, 0.044640766143798825, 0.04485945510864258, 0.04493107223510742, 0.04493721771240235, 0.04513792037963867, 0.04513382339477539, 0.04503497695922851, 0.045021728515625, 0.04500630569458008, 0.04495209503173828, 0.044998561859130856, 0.04466902542114258, 0.04468940734863281, 0.04512675094604492, 0.04478841781616211, 0.045794784545898436, 0.04482534408569336, 0.04555583953857422, 0.045032032012939455, 0.0449986572265625, 0.045113121032714844, 0.04495382308959961, 0.044957695007324217, 0.04494131088256836, 0.04483203125, 0.04492275238037109, 0.04487801742553711, 0.04464831924438477, 0.04480284881591797, 0.04516179275512695, 0.04487443161010742, 0.0460552978515625, 0.04474643325805664, 0.04468137741088867, 0.04480438232421875, 0.04465795135498047, 0.045517536163330076, 0.04521955108642578, 0.04517507171630859, 0.044781150817871096, 0.04490895843505859, 0.04479312133789062, 0.04482940673828125, 0.04519110488891601, 0.04513324737548828, 0.04509900665283203, 0.04475353622436523, 0.04478966522216797, 0.044665985107421875, 0.04475564956665039, 0.04476716613769531, 0.04482902526855469, 0.04500582504272461, 0.049463550567626954, 0.04520832061767578, 0.044799999237060545, 0.04478345489501953, 0.044841121673583985, 0.04472217559814453, 0.0449536018371582, 0.045328384399414064, 0.045700641632080076, 0.045262367248535156, 0.04500368118286133, 0.045037601470947264, 0.045217281341552736, 0.04521187210083008, 0.04507468795776367, 0.045416481018066404, 0.04563763046264648, 0.04509286499023438, 0.04564787292480469, 0.04504931259155273, 0.0450032958984375, 0.045230079650878906, 0.048365150451660156, 0.0460865592956543, 0.04511520004272461, 0.044992702484130856, 0.044974079132080076, 0.04632297515869141, 0.0450546875, 0.04460879898071289, 0.04464905548095703, 0.04457279968261719, 0.04537139129638672, 0.04503756713867187, 0.0451932144165039, 0.04528236770629883, 0.04527779388427734, 0.04512803268432617, 0.04522598266601562, 0.04538777542114258, 0.045352958679199216, 0.04525235366821289, 0.045887744903564454, 0.04546355056762695, 0.045305694580078125, 0.04491484832763672, 0.04487366485595703, 0.045004257202148436, 0.04529801559448242, 0.04553254318237305, 0.04485823822021484, 0.04456857681274414, 0.04506623840332031, 0.0446033935546875, 0.045428737640380856, 0.04464432144165039, 0.04459113693237305, 0.04451123046875, 0.04478976058959961, 0.04492287826538086, 0.04468867111206055, 0.048122592926025394, 0.045666305541992185, 0.04567561721801758, 0.04573084640502929, 0.04531801605224609, 0.04527452850341797, 0.04556246566772461, 0.04534886550903321, 0.04578508758544922, 0.04524031829833984, 0.04561715316772461, 0.045554721832275394, 0.045378528594970706, 0.045362590789794925, 0.045532958984375, 0.045280063629150394, 0.04532137680053711, 0.04518998336791992, 0.0452006721496582, 0.04560079956054688, 0.045277088165283204, 0.045257503509521485, 0.045276161193847655, 0.045265918731689454, 0.045418495178222655, 0.04512153625488281, 0.04512768173217773, 0.04533967971801758, 0.0454389762878418, 0.04614044952392578, 0.045547679901123045, 0.045530849456787106, 0.04532467269897461, 0.04523772811889649, 0.04607241439819336, 0.04518489456176758, 0.04563203048706055, 0.04552294540405273, 0.045312000274658204, 0.04542259216308594, 0.04536524963378906, 0.04544233703613281, 0.04670086288452149, 0.04528323364257812, 0.045156352996826174, 0.04562790298461914, 0.04564582443237305, 0.04523212814331055, 0.04509600067138672, 0.04530428695678711, 0.045416126251220705, 0.04544793701171875, 0.045797409057617186, 0.045681758880615236, 0.04538566589355469, 0.04516758346557617, 0.04586844635009766, 0.04527369689941406, 0.04502937698364258, 0.045499935150146484, 0.04534473419189453, 0.04543948745727539, 0.045206687927246095, 0.045240894317626956, 0.044947742462158206, 0.04468262481689453, 0.0445522575378418, 0.04484972763061523, 0.046693504333496096, 0.04521798324584961, 0.045099712371826174, 0.04518409729003906, 0.045085216522216795, 0.045402206420898435, 0.04792758560180664, 0.045475135803222655, 0.04517110443115235, 0.045372833251953126, 0.046134143829345706, 0.04537120056152344, 0.045246654510498044, 0.045461727142333985, 0.045157184600830076, 0.04534985733032226, 0.045440383911132816, 0.04518278503417969, 0.045438945770263674, 0.04512444686889648, 0.04497612762451172, 0.04484540939331055, 0.04486681747436523, 0.04457907104492188, 0.04559299087524414, 0.04534092712402344, 0.04540415954589844, 0.04525433731079102, 0.045934913635253906, 0.04515020751953125, 0.04506787109375, 0.04531398391723633, 0.045908447265625, 0.0452751350402832, 0.045346561431884765, 0.045261054992675784, 0.0460873908996582, 0.045257503509521485, 0.045055553436279296, 0.04505644989013672, 0.04522111892700195, 0.04514041519165039, 0.04571372985839844, 0.044614688873291015, 0.0447784652709961, 0.044799999237060545, 0.04475699234008789, 0.04452761459350586, 0.04477337646484375, 0.045104576110839845, 0.04537401580810547, 0.04555878448486328, 0.04482118225097656, 0.044773696899414066, 0.04485232162475586, 0.04531670379638672, 0.04461996841430664, 0.044742782592773436, 0.04499660873413086, 0.0446484489440918, 0.04463411331176758, 0.0445665283203125, 0.04468735885620117, 0.045079967498779294, 0.0444749755859375, 0.044560382843017575, 0.04460486221313477, 0.044550209045410155, 0.04476128005981445, 0.0448474235534668, 0.04513977432250976, 0.045383102416992185, 0.045475841522216794, 0.04522854232788086, 0.04480419158935547, 0.044566238403320316, 0.0445731201171875, 0.04458700942993164, 0.04466659164428711, 0.04470198440551758, 0.04871372985839844, 0.04511043167114258, 0.04513840103149414, 0.04456390380859375, 0.04545836639404297, 0.04523622512817383, 0.04524227142333984, 0.0457391357421875, 0.04548067092895508, 0.045369342803955076, 0.0452567024230957, 0.04640972900390625, 0.04534886550903321, 0.04522393417358399, 0.04598374557495117, 0.045385726928710936, 0.04529878234863281, 0.04501187133789063, 0.04807680130004883, 0.04564787292480469, 0.04516563034057617, 0.04491360092163086, 0.044969249725341796, 0.04530863952636719, 0.04500844955444336, 0.045042110443115235, 0.044844192504882814, 0.044872543334960935, 0.04480409622192383, 0.044988414764404294, 0.04466624069213867, 0.04486822509765625, 0.045178878784179685, 0.04637900924682617, 0.04541843032836914, 0.04497177505493164, 0.04482489776611328, 0.04494540786743164, 0.04496502304077148, 0.044958560943603516, 0.04487081527709961, 0.044888927459716794, 0.044843006134033206, 0.04480963134765625, 0.04744252777099609, 0.04689100646972656, 0.04490614318847656, 0.045171039581298825, 0.04506371307373047, 0.04500323104858398, 0.0449249267578125, 0.045096958160400394, 0.04522118377685547, 0.044860095977783204, 0.04479532623291015, 0.04562771224975586, 0.04523571014404297, 0.045499137878417965, 0.0456511344909668, 0.04535110473632813, 0.04515084838867187, 0.0481976318359375, 0.04552214431762695, 0.04590409469604492, 0.04523884963989258, 0.04663679885864258, 0.044990718841552736, 0.04548387145996094, 0.04468892669677734, 0.04464704132080078]",tokens/s,22.203264632154738,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,825.131008,545.128448,0.0,159.383552,143.673856,s,1,7.73799560546875,7.73799560546875,0.0,7.73799560546875,7.73799560546875,7.73799560546875,7.73799560546875,[7.73799560546875],,kWh,1.1002869616701598e-05,1.2060808126243883e-06,2.7222243999813767e-06,1.4931174829307362e-05,,MB,1342.889984,616.431616,0.0,199.22944,186.684928,s,19,0.19152707195281982,0.010080372208043148,0.0003365190027857135,0.010015935897827148,0.010246790313720703,0.01085495672225952,0.011141666240692138,"[0.010030431747436524, 0.00982265567779541, 0.009850144386291504, 0.010815135955810547, 0.010030752182006835, 0.00987548828125, 0.009902912139892578, 0.010098560333251953, 0.010015935897827148, 0.011213343620300293, 0.009973952293395996, 0.010052191734313964, 0.009920448303222655, 0.00991385555267334, 0.010054495811462403, 0.010104703903198241, 0.00993280029296875, 0.009895584106445313, 0.010023679733276367]",tokens/s,25395.88764348772,kWh,2.955258249409259e-07,3.259125785066744e-08,1.2309529640685493e-07,4.512123791984483e-07,tokens/kWh,567360320.3324533,MB,1382.739968,618.528768,0.0,201.326592,186.687488,s,19,10.012183837890625,0.5269570440995066,0.002872720266168862,0.526202880859375,0.5314283447265625,0.5317847961425781,0.5338151135253907,"[0.5235718383789062, 0.5343226928710938, 0.5236062622070312, 0.5251556396484375, 0.5257523803710937, 0.5269797973632813, 0.5251487426757813, 0.527787353515625, 0.5288425903320313, 0.5261953125, 0.526202880859375, 0.5254588623046875, 0.524680419921875, 0.5315028076171875, 0.5314097290039063, 0.526810791015625, 0.5282140502929688, 0.522894775390625, 0.5276469116210938]",tokens/s,119.55433693396753,kWh,1.5136497833391946e-05,1.6692905834529984e-06,5.240432386225572e-06,2.2046220803070518e-05,tokens/kWh,2857632.632946577,,s,1197,10.000772610664368,0.008354864336394627,0.00020488175669350434,0.008317055702209473,0.008443858909606933,0.00855712013244629,0.008912132034301756,"[0.008239199638366699, 0.008333215713500977, 0.008282112121582032, 0.00831612777709961, 0.008248096466064453, 0.008289695739746094, 0.008296575546264649, 0.008328767776489257, 0.008323840141296387, 0.008331007957458496, 0.008292287826538085, 0.008323552131652832, 0.008284159660339355, 0.008256671905517578, 0.00825430393218994, 0.008281503677368164, 0.008247584342956542, 0.00830844783782959, 0.008462143898010253, 0.00846553611755371, 0.008406911849975586, 0.00825654411315918, 0.008388799667358398, 0.00832960033416748, 0.008289504051208495, 0.00829759979248047, 0.00827785587310791, 0.008286208152770995, 0.008256896018981934, 0.00827235221862793, 0.008529600143432617, 0.008298208236694336, 0.008300767898559571, 0.008275775909423829, 0.008272255897521973, 0.008282015800476075, 0.008272255897521973, 0.00830412769317627, 0.0083088960647583, 0.008292256355285644, 0.008294015884399415, 0.008275967597961426, 0.00832806396484375, 0.008275967597961426, 0.00826598358154297, 0.008283167839050294, 0.008278752326965332, 0.008271967887878418, 0.008245152473449707, 0.008271903991699218, 0.00825545597076416, 0.00832102394104004, 0.00827187156677246, 0.008272159576416015, 0.008331263542175293, 0.008306400299072265, 0.00829644775390625, 0.008281087875366211, 0.008287232398986816, 0.008355839729309082, 0.008272928237915039, 0.008276960372924805, 0.008272992134094239, 0.008324511528015137, 0.008297056198120116, 0.008524127960205078, 0.008515168190002441, 0.009025440216064454, 0.008854016304016114, 0.008383935928344726, 0.008359807968139649, 0.008319071769714356, 0.008319328308105469, 0.008345600128173827, 0.008324864387512208, 0.00837388801574707, 0.008384896278381347, 0.008392767906188965, 0.00824726390838623, 0.008300479888916015, 0.008274111747741699, 0.008325119972229005, 0.008262751579284668, 0.008296511650085448, 0.00833017635345459, 0.008430496215820312, 0.00825382423400879, 0.008257856369018554, 0.0082291841506958, 0.008378368377685547, 0.00825500774383545, 0.008264512062072753, 0.008279711723327636, 0.008316927909851075, 0.00827017593383789, 0.008299903869628906, 0.008327360153198243, 0.008306303977966308, 0.008384991645812988, 0.00991436767578125, 0.010825728416442871, 0.013303423881530762, 0.008366463661193847, 0.008300607681274413, 0.00832262420654297, 0.008280096054077148, 0.008269951820373536, 0.008241375923156739, 0.00825334358215332, 0.00833459186553955, 0.008233823776245118, 0.008296768188476562, 0.008341343879699707, 0.008232416152954102, 0.008236639976501465, 0.00826483154296875, 0.008260543823242188, 0.00831772804260254, 0.008257247924804688, 0.008265952110290527, 0.008259424209594727, 0.008279359817504883, 0.008252320289611816, 0.008246975898742676, 0.008261856079101562, 0.008263903617858887, 0.008281567573547362, 0.008300992012023925, 0.00840726375579834, 0.00829964828491211, 0.008298368453979492, 0.008332032203674317, 0.00833529567718506, 0.008270432472229004, 0.00831004810333252, 0.008376864433288575, 0.008348608016967773, 0.008262623786926269, 0.008259167671203613, 0.008263327598571778, 0.008389375686645507, 0.008262975692749024, 0.00825823974609375, 0.00828451156616211, 0.008296223640441894, 0.008294272422790527, 0.008353695869445801, 0.008296544075012208, 0.008271712303161621, 0.008269984245300293, 0.008254688262939453, 0.00827023983001709, 0.008353216171264648, 0.00824345588684082, 0.008256447792053223, 0.008236800193786621, 0.00827939224243164, 0.00823532772064209, 0.008303232192993164, 0.008371968269348145, 0.00828822422027588, 0.008245247840881348, 0.008242752075195313, 0.008251839637756348, 0.00835590362548828, 0.008320351600646973, 0.008277824401855469, 0.008265600204467774, 0.00825830364227295, 0.00822640037536621, 0.00823977565765381, 0.008247200012207032, 0.0082774076461792, 0.008323679924011231, 0.008666175842285156, 0.008276415824890136, 0.008371999740600586, 0.008261471748352052, 0.008307583808898926, 0.008259136199951172, 0.008348320007324219, 0.0082674560546875, 0.008244928359985351, 0.008234911918640137, 0.008324607849121094, 0.008288576126098632, 0.008370176315307617, 0.008532095909118652, 0.00831663990020752, 0.00829644775390625, 0.008331263542175293, 0.008364224433898925, 0.008316160202026367, 0.008301088333129883, 0.008374496459960938, 0.008300352096557616, 0.00832921600341797, 0.008280351638793945, 0.008304448127746581, 0.008409088134765624, 0.008287296295166016, 0.008433823585510253, 0.008466719627380371, 0.008294943809509278, 0.008280223846435546, 0.00836787223815918, 0.008284223556518555, 0.008275679588317871, 0.008282719612121582, 0.008287839889526367, 0.008300543785095215, 0.00829856014251709, 0.00830844783782959, 0.008329119682312012, 0.008328864097595215, 0.008350367546081544, 0.008383520126342773, 0.008346207618713379, 0.008339872360229492, 0.008303647994995117, 0.008313920021057129, 0.008316255569458008, 0.008313376426696778, 0.00831488037109375, 0.008371328353881836, 0.008274687767028808, 0.008261568069458007, 0.008287455558776856, 0.008257568359375, 0.00828003215789795, 0.00826483154296875, 0.008287232398986816, 0.008292511940002441, 0.008296480178833008, 0.00826249599456787, 0.008277888298034669, 0.008296064376831055, 0.008296863555908203, 0.008396736145019531, 0.008285599708557129, 0.008747584342956543, 0.008295552253723144, 0.00827286434173584, 0.00829430389404297, 0.008299903869628906, 0.008338047981262206, 0.008347776412963867, 0.008340576171875, 0.008327967643737793, 0.008368127822875977, 0.008371999740600586, 0.008419551849365235, 0.008461343765258788, 0.008419424057006837, 0.008343680381774903, 0.008452863693237304, 0.00829148769378662, 0.00830726432800293, 0.008312479972839355, 0.008307168006896973, 0.008302751541137696, 0.008304927825927734, 0.008324031829833984, 0.00832310390472412, 0.00845315170288086, 0.008277215957641602, 0.00827888011932373, 0.008307776451110839, 0.008282143592834473, 0.008284735679626464, 0.0083374080657959, 0.008356160163879394, 0.008400575637817383, 0.008351743698120117, 0.00832307243347168, 0.008296607971191406, 0.008365407943725586, 0.008388447761535644, 0.008298720359802246, 0.008331647872924805, 0.00828012752532959, 0.00827347183227539, 0.008265727996826172, 0.008308287620544433, 0.008301440238952637, 0.008275967597961426, 0.008321056365966797, 0.00828752040863037, 0.008282719612121582, 0.008318400382995606, 0.008297087669372559, 0.008382335662841797, 0.008328800201416015, 0.008332192420959473, 0.00832703971862793, 0.008421343803405762, 0.00837724781036377, 0.008320992469787597, 0.008336031913757324, 0.00831443214416504, 0.00829529571533203, 0.008615615844726563, 0.008449312210083007, 0.008383008003234863, 0.0082990083694458, 0.008285344123840332, 0.00827187156677246, 0.008308992385864258, 0.008370783805847168, 0.008328639984130859, 0.008303263664245605, 0.00835968017578125, 0.00836137580871582, 0.008383071899414063, 0.00829148769378662, 0.008301568031311036, 0.008301568031311036, 0.008326944351196288, 0.008300767898559571, 0.00828774356842041, 0.008570688247680664, 0.00835472011566162, 0.00833459186553955, 0.008438048362731933, 0.008358143806457519, 0.008294400215148925, 0.008384511947631837, 0.008334431648254394, 0.008313440322875976, 0.008288576126098632, 0.008273088455200195, 0.008323328018188476, 0.008319295883178712, 0.00831494426727295, 0.008421567916870118, 0.00858844757080078, 0.0083439359664917, 0.00834607982635498, 0.00832307243347168, 0.008421376228332519, 0.008294528007507324, 0.00843273639678955, 0.0083189115524292, 0.00828707218170166, 0.00829856014251709, 0.008303936004638673, 0.008286304473876953, 0.008288800239562988, 0.008297504425048828, 0.00835260772705078, 0.008301983833312989, 0.008298720359802246, 0.00828876781463623, 0.00841932773590088, 0.008343903541564942, 0.008326208114624024, 0.008294464111328125, 0.008320575714111328, 0.008281056404113769, 0.008300543785095215, 0.008320063591003417, 0.008293184280395507, 0.008388544082641601, 0.008300736427307128, 0.008331487655639649, 0.008382240295410156, 0.008302592277526855, 0.00832271957397461, 0.008974687576293945, 0.008323264122009278, 0.008275327682495117, 0.008382207870483398, 0.008331199645996094, 0.008399456024169923, 0.008910816192626953, 0.008325311660766601, 0.008304863929748536, 0.008329024314880371, 0.008241567611694336, 0.008296319961547852, 0.008423551559448243, 0.008301664352416992, 0.008360447883605958, 0.008294912338256836, 0.008281888008117676, 0.00828006362915039, 0.008318976402282715, 0.008298591613769531, 0.008325183868408203, 0.008286175727844238, 0.00829759979248047, 0.008301471710205078, 0.008388383865356445, 0.008308799743652343, 0.008371392250061034, 0.008346752166748047, 0.008294079780578613, 0.008275039672851562, 0.008299136161804199, 0.008298784255981446, 0.008317055702209473, 0.00834934425354004, 0.008294624328613281, 0.00826972770690918, 0.008310879707336426, 0.00829974365234375, 0.008315679550170898, 0.008304032325744629, 0.008273823738098145, 0.008301440238952637, 0.00831868839263916, 0.00827184009552002, 0.008414624214172363, 0.008394623756408691, 0.008297311782836913, 0.008349184036254884, 0.008331775665283203, 0.008352864265441894, 0.008424351692199707, 0.008396160125732422, 0.008365983963012696, 0.008305567741394043, 0.008286016464233398, 0.008320992469787597, 0.008365568161010742, 0.008300383567810059, 0.008309632301330567, 0.00830025577545166, 0.008345376014709473, 0.008337087631225586, 0.008410816192626952, 0.008291263580322266, 0.00831612777709961, 0.008328160285949708, 0.008296256065368652, 0.008289952278137207, 0.008296832084655762, 0.008345024108886719, 0.008290783882141113, 0.00860371208190918, 0.008312000274658202, 0.008287263870239257, 0.00836460781097412, 0.008411552429199219, 0.008359935760498047, 0.008331263542175293, 0.008341024398803711, 0.00836451244354248, 0.008350720405578613, 0.008337920188903808, 0.008541695594787598, 0.008453472137451172, 0.00860739231109619, 0.008474464416503907, 0.008480544090270997, 0.008442239761352539, 0.00843603229522705, 0.008488960266113281, 0.00844489574432373, 0.008469216346740723, 0.008415040016174316, 0.00836457633972168, 0.008369248390197754, 0.008386464118957519, 0.008344032287597657, 0.008335583686828614, 0.00836524772644043, 0.008316864013671875, 0.00833619213104248, 0.008341631889343261, 0.008342464447021485, 0.008322015762329101, 0.008381888389587403, 0.008387136459350585, 0.008372223854064942, 0.008396544456481934, 0.008396608352661133, 0.008344256401062011, 0.008346879959106445, 0.008331423759460449, 0.008314847946166992, 0.008268159866333008, 0.008284159660339355, 0.008502752304077148, 0.00831494426727295, 0.008329440116882324, 0.008317184448242188, 0.008333151817321777, 0.008368576049804687, 0.008285920143127441, 0.008348671913146973, 0.008296799659729005, 0.00837932777404785, 0.008332096099853515, 0.008345696449279785, 0.008368736267089843, 0.008366592407226562, 0.008338144302368164, 0.008391200065612793, 0.008319231986999511, 0.008370400428771973, 0.008290176391601562, 0.008259712219238281, 0.008298272132873536, 0.008308287620544433, 0.008292736053466797, 0.00829360008239746, 0.008307807922363282, 0.008310463905334473, 0.008300543785095215, 0.00830463981628418, 0.008389760017395019, 0.008317824363708495, 0.008297568321228027, 0.008368736267089843, 0.008408736228942872, 0.00833788776397705, 0.008327712059020996, 0.008334752082824706, 0.008317248344421387, 0.008344639778137207, 0.008348480224609375, 0.008327232360839844, 0.008414815902709961, 0.00838492774963379, 0.008361439704895019, 0.008432160377502442, 0.008371583938598633, 0.009284223556518555, 0.008418368339538575, 0.008422335624694825, 0.008558591842651368, 0.008460288047790527, 0.00841113567352295, 0.008568832397460938, 0.008466431617736817, 0.008374272346496582, 0.008407039642333984, 0.008434752464294433, 0.008385087966918945, 0.008411520004272461, 0.008366080284118652, 0.008449119567871094, 0.008342432022094727, 0.0083372802734375, 0.008492287635803222, 0.008332159996032715, 0.008457920074462891, 0.008324799537658691, 0.008339360237121582, 0.008294207572937012, 0.008328096389770508, 0.008312447547912598, 0.00830412769317627, 0.00836083221435547, 0.008284064292907714, 0.008562047958374024, 0.008340543746948242, 0.008482751846313476, 0.008301312446594239, 0.008330207824707031, 0.008484992027282715, 0.008349568367004394, 0.00832921600341797, 0.008337535858154298, 0.008292223930358887, 0.008316800117492676, 0.00860979175567627, 0.008361984252929687, 0.008769536018371582, 0.00841932773590088, 0.008404288291931152, 0.008350208282470703, 0.008290495872497559, 0.008316287994384766, 0.008292991638183594, 0.008308927536010742, 0.00832310390472412, 0.00831056022644043, 0.008439519882202148, 0.008300127983093262, 0.008301376342773437, 0.008319135665893554, 0.008311615943908692, 0.00834447956085205, 0.008363103866577149, 0.008280415534973145, 0.008311360359191895, 0.008308735847473145, 0.008376319885253907, 0.00830463981628418, 0.008296575546264649, 0.008351615905761719, 0.008267552375793456, 0.008284576416015625, 0.008273728370666504, 0.008299551963806152, 0.00830953598022461, 0.008275168418884277, 0.008346400260925292, 0.008302783966064453, 0.00832921600341797, 0.008292351722717285, 0.008296159744262696, 0.008347647666931152, 0.008354080200195313, 0.00829030418395996, 0.008316032409667969, 0.008301216125488281, 0.008300224304199218, 0.008303135871887206, 0.008320447921752929, 0.00840556812286377, 0.00833737564086914, 0.008352095603942872, 0.008459199905395507, 0.008407808303833008, 0.008366080284118652, 0.008372223854064942, 0.00834489631652832, 0.00833625602722168, 0.00834108829498291, 0.008314175605773926, 0.008359135627746582, 0.008337087631225586, 0.008386912345886231, 0.008310688018798829, 0.00834774398803711, 0.008321791648864747, 0.008329919815063477, 0.008344544410705566, 0.008311967849731445, 0.008309151649475098, 0.008315296173095703, 0.00832921600341797, 0.008318976402282715, 0.008320223808288575, 0.008417759895324707, 0.008307040214538574, 0.008335328102111817, 0.00832646369934082, 0.008344256401062011, 0.00836787223815918, 0.008404928207397461, 0.00835750389099121, 0.008331040382385253, 0.008311776161193848, 0.008308320045471192, 0.008372032165527343, 0.008357728004455566, 0.008353919982910156, 0.00834934425354004, 0.0083090238571167, 0.008288928031921387, 0.008292320251464844, 0.008292351722717285, 0.008318976402282715, 0.008341343879699707, 0.008304351806640626, 0.00831935977935791, 0.008284223556518555, 0.008349023818969726, 0.008323679924011231, 0.008363103866577149, 0.008709088325500488, 0.008364031791687012, 0.00840441608428955, 0.008333888053894043, 0.008437376022338867, 0.008345024108886719, 0.008434623718261718, 0.00834540843963623, 0.00834988784790039, 0.008548352241516113, 0.008372223854064942, 0.008314656257629395, 0.008370400428771973, 0.008320128440856934, 0.00827683162689209, 0.008278047561645508, 0.008296511650085448, 0.00827564811706543, 0.008311039924621582, 0.008335359573364258, 0.008283807754516601, 0.008279999732971192, 0.008309151649475098, 0.008292160034179687, 0.008327136039733887, 0.008395296096801758, 0.008340224266052246, 0.008334272384643555, 0.008345952033996582, 0.00833459186553955, 0.008311296463012695, 0.008361248016357422, 0.008303071975708008, 0.008320799827575684, 0.008319456100463868, 0.008363776206970215, 0.008333567619323731, 0.008455840110778809, 0.008396608352661133, 0.008394847869873047, 0.00840771198272705, 0.008402144432067871, 0.0084071683883667, 0.008368512153625489, 0.00839891242980957, 0.008356127738952637, 0.008322560310363769, 0.008315296173095703, 0.008341312408447266, 0.008288191795349122, 0.008347552299499511, 0.008308223724365234, 0.008288928031921387, 0.0082774076461792, 0.008295007705688476, 0.008273920059204102, 0.008267744064331055, 0.008316960334777832, 0.00838361644744873, 0.008289248466491698, 0.008290207862854004, 0.00828006362915039, 0.008290592193603516, 0.008264863967895508, 0.008354496002197266, 0.008312704086303711, 0.00831283187866211, 0.008345600128173827, 0.00834886360168457, 0.008340479850769043, 0.00852563190460205, 0.008324640274047852, 0.0083374080657959, 0.008352224349975585, 0.008331423759460449, 0.008312671661376954, 0.008296256065368652, 0.00847702407836914, 0.008308575630187989, 0.008286496162414551, 0.008281472206115723, 0.008286463737487792, 0.008328736305236817, 0.008332127571105958, 0.008307807922363282, 0.008300992012023925, 0.008288288116455078, 0.008286368370056153, 0.008286399841308594, 0.008285504341125488, 0.00833132839202881, 0.008317472457885743, 0.00826905632019043, 0.008300800323486329, 0.008274432182312011, 0.008333312034606934, 0.008277088165283204, 0.008270912170410157, 0.008281855583190919, 0.008283616065979004, 0.008264320373535156, 0.008292160034179687, 0.00835807991027832, 0.008323007583618165, 0.008347935676574707, 0.008265503883361816, 0.00825712013244629, 0.00826204776763916, 0.008269280433654784, 0.008301024436950683, 0.008289728164672851, 0.008292799949645996, 0.008329407691955566, 0.008324416160583496, 0.008335136413574218, 0.008350624084472656, 0.008421567916870118, 0.008396608352661133, 0.00829206371307373, 0.008287615776062011, 0.008284704208374023, 0.008307071685791015, 0.00833897590637207, 0.008259231567382813, 0.008259936332702637, 0.008243680000305176, 0.008270048141479492, 0.008517439842224122, 0.00834982395172119, 0.008288096427917481, 0.008341504096984862, 0.008257535934448243, 0.008304703712463378, 0.008253376007080078, 0.008281536102294922, 0.008364352226257324, 0.008282048225402832, 0.008326751708984375, 0.00862281608581543, 0.008543744087219238, 0.008272735595703126, 0.008590496063232422, 0.008339296340942383, 0.008313504219055176, 0.008421183586120606, 0.008305983543395996, 0.008320159912109375, 0.00830025577545166, 0.008362336158752441, 0.008301823616027833, 0.008257599830627441, 0.008259936332702637, 0.008269472122192383, 0.0082640962600708, 0.008270048141479492, 0.008258144378662109, 0.00824351978302002, 0.0082510404586792, 0.00825551986694336, 0.00827132797241211, 0.008327712059020996, 0.008293791770935059, 0.008286815643310547, 0.008274080276489258, 0.008296287536621094, 0.008273920059204102, 0.008274208068847657, 0.008338239669799805, 0.00827280044555664, 0.008294400215148925, 0.008253503799438476, 0.008281920433044434, 0.008276224136352539, 0.008375807762145996, 0.008320863723754884, 0.008343263626098633, 0.0083689603805542, 0.008376319885253907, 0.008359935760498047, 0.00832688045501709, 0.008417568206787109, 0.008382464408874512, 0.008431072235107423, 0.008515775680541992, 0.008569120407104492, 0.008533120155334473, 0.008461248397827149, 0.00843769645690918, 0.008368191719055175, 0.008330400466918946, 0.008334176063537597, 0.008359135627746582, 0.008297311782836913, 0.008382335662841797, 0.00834771156311035, 0.008374272346496582, 0.008450079917907714, 0.008521696090698243, 0.008445952415466309, 0.00841932773590088, 0.008492223739624024, 0.008492959976196288, 0.00859008026123047, 0.008648799896240235, 0.008674688339233398, 0.0086627197265625, 0.008692607879638672, 0.008570655822753907, 0.008606047630310058, 0.008751104354858399, 0.008888319969177246, 0.00885747241973877, 0.008732799530029298, 0.008572928428649903, 0.00847276782989502, 0.008453951835632325, 0.00840499210357666, 0.00845206356048584, 0.008423328399658203, 0.008363007545471191, 0.008363200187683105, 0.00836083221435547, 0.008343487739562989, 0.008358943939208984, 0.008425791740417481, 0.008304351806640626, 0.008293312072753906, 0.008311871528625489, 0.008297311782836913, 0.008665184020996093, 0.008632479667663574, 0.008830816268920898, 0.00894371223449707, 0.00841750431060791, 0.008435551643371581, 0.00834329605102539, 0.008357983589172363, 0.008410143852233886, 0.00834233570098877, 0.00836575984954834, 0.008336864471435546, 0.008301568031311036, 0.008335359573364258, 0.008270048141479492, 0.008668959617614746, 0.008347647666931152, 0.00832102394104004, 0.00832921600341797, 0.008340831756591796, 0.008322976112365722, 0.008469535827636718, 0.008420160293579101, 0.008437919616699218, 0.008396832466125489, 0.008362719535827637, 0.0085032958984375, 0.00833459186553955, 0.008375200271606445, 0.008415295600891113, 0.008430912017822265, 0.00839846420288086, 0.008473440170288086, 0.008554400444030762, 0.008591456413269043, 0.008433568000793456, 0.008371552467346192, 0.00833180809020996, 0.008405216217041015, 0.00832851219177246, 0.008503999710083008, 0.00872383975982666, 0.008687904357910156, 0.008556896209716797, 0.008407360076904296, 0.008355744361877441, 0.008345184326171875, 0.008392895698547363, 0.008415231704711914, 0.008353728294372559, 0.008436960220336914, 0.008411328315734863, 0.008612319946289063, 0.008689663887023925, 0.008852767944335937, 0.008771743774414063, 0.009069120407104492, 0.008558015823364257, 0.008519231796264648, 0.008448415756225586, 0.008389216423034668, 0.00831283187866211, 0.008283679962158202, 0.008278592109680176, 0.008400064468383789, 0.008329952239990234, 0.00828544044494629, 0.00825830364227295, 0.008279744148254395, 0.008285856246948243, 0.008312704086303711, 0.008291104316711425, 0.008385600090026856, 0.008313183784484863, 0.00830508804321289, 0.00828335952758789, 0.008391615867614746, 0.008309791564941407, 0.00833568000793457, 0.008294655799865722, 0.008332832336425781, 0.008285247802734375, 0.008301600456237793, 0.008303008079528808, 0.00828659152984619, 0.008354144096374512, 0.008310175895690919, 0.008290016174316406, 0.008387328147888184, 0.008277440071105957, 0.008275712013244629, 0.008258144378662109, 0.008310400009155273, 0.008294272422790527, 0.008298144340515137, 0.008307295799255371, 0.008364288330078126, 0.008273920059204102, 0.008296159744262696, 0.00827625560760498, 0.008425472259521484, 0.008260640144348144, 0.008246368408203125, 0.00824953556060791, 0.008236736297607421, 0.008295519828796387, 0.008273887634277343, 0.00824124813079834, 0.00827683162689209, 0.008252927780151367, 0.008346112251281738, 0.008250368118286134, 0.008390848159790038, 0.00824556827545166, 0.00831116771697998, 0.008275967597961426, 0.008460288047790527, 0.008644031524658204, 0.009065024375915528, 0.008480223655700683, 0.009562656402587891, 0.008839167594909669, 0.00829849624633789, 0.008408384323120117, 0.008303296089172364, 0.008303936004638673, 0.008727231979370116, 0.008295583724975585, 0.008302656173706055, 0.008300383567810059, 0.008381695747375488, 0.008601280212402343, 0.008304320335388183, 0.008284480094909667, 0.00825107192993164, 0.008370719909667968, 0.008371999740600586, 0.00833459186553955, 0.008305407524108887, 0.00822662353515625, 0.008285504341125488, 0.008362719535827637, 0.008347999572753907, 0.008341312408447266, 0.008322751998901368, 0.008509023666381836, 0.008288991928100587, 0.008392191886901856, 0.008384736061096191, 0.00830288028717041, 0.008273951530456543, 0.008292448043823243, 0.008371647834777831, 0.008290752410888672, 0.008379839897155761, 0.00830726432800293, 0.008346943855285645, 0.008272576332092284, 0.008259231567382813, 0.008310879707336426, 0.008253696441650391, 0.00825376033782959, 0.008257216453552246, 0.008318976402282715, 0.008269824028015137, 0.008278016090393067, 0.008332384109497071, 0.008274304389953612, 0.008289055824279785, 0.008326911926269531, 0.00833244800567627, 0.008309599876403808, 0.008265727996826172, 0.008285792350769042, 0.008274335861206055, 0.00826809597015381, 0.008334527969360352, 0.008292256355285644, 0.008283807754516601, 0.008273920059204102, 0.008267775535583496, 0.00831488037109375, 0.008271615982055664, 0.00826198387145996, 0.008302271842956543, 0.008272064208984376, 0.008273183822631836, 0.008280735969543457, 0.008328991889953613, 0.008302751541137696, 0.008283616065979004, 0.008290016174316406, 0.008263903617858887, 0.008272640228271485, 0.008255488395690918, 0.00829644775390625, 0.008271200180053711, 0.008241439819335937, 0.008269472122192383, 0.00830288028717041, 0.008243647575378418, 0.00832096004486084, 0.008267840385437012, 0.008261631965637208, 0.008258655548095703, 0.00828115177154541, 0.008277695655822754, 0.008332672119140625, 0.008307071685791015, 0.008286911964416504, 0.008291872024536132, 0.00826204776763916, 0.008258784294128418, 0.00836460781097412, 0.008285823822021485, 0.00827014446258545, 0.008281408309936523, 0.008262399673461915, 0.008278016090393067, 0.008261407852172852, 0.008274144172668457, 0.00830463981628418, 0.008255488395690918, 0.008257823944091797, 0.008275168418884277, 0.008306655883789062, 0.008312543869018555, 0.008296832084655762, 0.008515711784362792, 0.008299967765808106, 0.008319264411926269, 0.008272480010986329, 0.008295424461364746, 0.00827455997467041, 0.008298879623413085, 0.008254624366760254, 0.008256352424621582, 0.008341504096984862, 0.008324416160583496, 0.008375328063964843, 0.008357631683349609, 0.008320896148681641, 0.008295807838439941, 0.008298720359802246, 0.00837491226196289, 0.008335488319396973, 0.008456064224243163, 0.008327168464660644, 0.008390496253967285, 0.008375616073608398, 0.008463199615478516, 0.00834886360168457, 0.008315711975097657, 0.008443167686462402, 0.008351648330688476, 0.008283167839050294, 0.008247072219848633, 0.008263615608215332, 0.008284223556518555, 0.008314816474914551, 0.008333375930786133, 0.008280159950256348, 0.008353695869445801, 0.008294400215148925, 0.008282112121582032, 0.008472512245178223, 0.009198847770690919, 0.00968115234375, 0.008580831527709961, 0.008777888298034667, 0.008370016098022461, 0.008364031791687012, 0.008358752250671387, 0.008459456443786621, 0.008415743827819825, 0.008293984413146972, 0.008297183990478515, 0.008302656173706055, 0.008279423713684082, 0.00830726432800293, 0.008277312278747558, 0.00826268768310547, 0.008393471717834472, 0.00831167984008789, 0.008263551712036132, 0.008414527893066407, 0.008280927658081055, 0.008244992256164551, 0.008235424041748048, 0.008295968055725097, 0.008259776115417481, 0.008245375633239746, 0.008275679588317871, 0.008237504005432128, 0.008295552253723144, 0.008288991928100587, 0.008269824028015137, 0.008387968063354493, 0.008244095802307129, 0.008305536270141602, 0.008254591941833495, 0.008248255729675294, 0.008297280311584472, 0.008281184196472167]",tokens/s,119.6907525648142,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1570.562048,1604.190208,0.0,1201.668096,1189.151232,s,1,8.401806640625,8.401806640625,0.0,8.401806640625,8.401806640625,8.401806640625,8.401806640625,[8.401806640625],,kWh,3.556609466666032e-05,3.915731534917954e-06,1.0888619821999845e-05,5.037044602357812e-05,,MB,1686.05696,1799.225344,0.0,1382.023168,1351.367168,s,10,0.4756693420410156,0.04756693420410156,0.00015275484322596928,0.047562110900878904,0.04775382080078125,0.04782882995605468,0.04788883728027344,"[0.047584224700927734, 0.04736636734008789, 0.04743743896484375, 0.047473312377929684, 0.047622112274169924, 0.04754950332641601, 0.04742067337036133, 0.047737152099609374, 0.047574718475341796, 0.047903839111328124]",tokens/s,5381.8898418289455,kWh,1.427549544024274e-06,1.5743328940958855e-07,9.501376164780055e-07,2.5351204499118683e-06,tokens/kWh,100981395.1873173,MB,1693.847552,1841.168384,0.0,1423.966208,1407.328256,s,10,13.674264770507813,1.3674264770507814,0.007045571745078874,1.3668082885742188,1.3741877197265624,1.3784882080078125,1.3819285986328125,"[1.3601513671875, 1.3654959716796875, 1.364861572265625, 1.36155078125, 1.3573333740234375, 1.368206787109375, 1.3827886962890625, 1.3732320556640625, 1.3725235595703125, 1.36812060546875]",tokens/s,46.07194687050104,kWh,3.9831488602639934e-05,4.393011124421548e-06,1.8551933540723365e-05,6.277643326778483e-05,tokens/kWh,1003561.3162548038,,s,630,13.671653858184815,0.021701037870134627,0.0004162646086460559,0.021623727798461917,0.021904031372070313,0.022045338535308837,0.023074691810607912,"[0.021573247909545897, 0.021596351623535157, 0.02155743980407715, 0.02169599914550781, 0.02179532814025879, 0.021652864456176757, 0.021800640106201172, 0.021607168197631837, 0.021448896408081054, 0.02148681640625, 0.021555999755859374, 0.021557247161865235, 0.021530624389648437, 0.021436256408691408, 0.021498016357421875, 0.02152448081970215, 0.02158758354187012, 0.021494144439697264, 0.021558719635009764, 0.021594688415527342, 0.021598207473754884, 0.021590368270874023, 0.02167363166809082, 0.02165555191040039, 0.021716224670410157, 0.02170342445373535, 0.02165692710876465, 0.021516319274902343, 0.02162886428833008, 0.02161324882507324, 0.021485567092895508, 0.021747711181640626, 0.021530399322509764, 0.021549280166625977, 0.021596160888671875, 0.02153011131286621, 0.021486080169677735, 0.021824607849121092, 0.021535648345947265, 0.021587648391723634, 0.02142639923095703, 0.021516096115112304, 0.02151203155517578, 0.02150649642944336, 0.021610496520996093, 0.021539968490600588, 0.021502847671508788, 0.021664863586425782, 0.021618976593017578, 0.02155174446105957, 0.021534208297729493, 0.021554752349853514, 0.021861312866210937, 0.02161664009094238, 0.021495359420776367, 0.021582271575927733, 0.02152003288269043, 0.02153913688659668, 0.021749792098999025, 0.021569280624389647, 0.021401599884033205, 0.021537023544311522, 0.021733375549316408, 0.02137273597717285, 0.0215063362121582, 0.021516191482543946, 0.02147737693786621, 0.02142972755432129, 0.02173529624938965, 0.02146371269226074, 0.021727231979370116, 0.021395040512084962, 0.021424543380737304, 0.02172313690185547, 0.02175529670715332, 0.0218089599609375, 0.021727264404296873, 0.021674720764160157, 0.021595935821533203, 0.021643104553222655, 0.021516704559326173, 0.02155094337463379, 0.021388639450073244, 0.021408512115478517, 0.02146895980834961, 0.021440799713134766, 0.02141756820678711, 0.021402015686035156, 0.021708799362182618, 0.021665632247924806, 0.021448863983154296, 0.021432191848754882, 0.021473407745361328, 0.021727231979370116, 0.0216712646484375, 0.02309391975402832, 0.022691839218139647, 0.02181046485900879, 0.021666240692138673, 0.021702943801879884, 0.021743616104125976, 0.021563392639160156, 0.022386688232421875, 0.02302761650085449, 0.0216843204498291, 0.02168832015991211, 0.02162892723083496, 0.021780448913574217, 0.02179075241088867, 0.02154857635498047, 0.021559200286865234, 0.021520639419555666, 0.021498176574707033, 0.021478431701660156, 0.021408735275268556, 0.021497791290283202, 0.021482784271240233, 0.021433120727539064, 0.0215285758972168, 0.02143833541870117, 0.021534271240234375, 0.02166227149963379, 0.022075103759765624, 0.022122592926025392, 0.021855968475341797, 0.021657567977905273, 0.021494848251342773, 0.021482431411743164, 0.021604352951049805, 0.021415935516357423, 0.02145484733581543, 0.021691680908203125, 0.02246076774597168, 0.022122880935668946, 0.021630847930908203, 0.021689983367919923, 0.021680639266967772, 0.021593215942382813, 0.022244224548339842, 0.022859519958496093, 0.022876415252685547, 0.021729215621948243, 0.021610559463500975, 0.02168217658996582, 0.021503679275512694, 0.02156515121459961, 0.021587551116943358, 0.02169343948364258, 0.02166579246520996, 0.021561248779296875, 0.021985376358032226, 0.021605567932128908, 0.021521215438842774, 0.021495840072631837, 0.021516544342041016, 0.021509855270385743, 0.02150809669494629, 0.021506048202514647, 0.021523679733276367, 0.021551904678344728, 0.02155516815185547, 0.021547008514404296, 0.02170262336730957, 0.02149177551269531, 0.02154617691040039, 0.021513023376464845, 0.021540864944458008, 0.02161664009094238, 0.021548608779907226, 0.021600704193115234, 0.021602304458618164, 0.02158099174499512, 0.021553983688354494, 0.02153628730773926, 0.021499359130859375, 0.021601184844970703, 0.021504032135009767, 0.021527616500854493, 0.021691328048706056, 0.021553216934204103, 0.021610496520996093, 0.02153183937072754, 0.02156787109375, 0.02164575958251953, 0.02157145690917969, 0.02159779167175293, 0.02171104049682617, 0.021737823486328123, 0.021710559844970702, 0.021526304244995118, 0.021575904846191405, 0.021437824249267576, 0.021481407165527343, 0.02150876808166504, 0.02146512031555176, 0.021506048202514647, 0.021559295654296876, 0.02149990463256836, 0.021538816452026367, 0.02145280075073242, 0.02149580764770508, 0.021444608688354492, 0.02147942352294922, 0.02156096076965332, 0.02156559944152832, 0.021522655487060546, 0.021605823516845705, 0.021461376190185545, 0.021511423110961915, 0.021455808639526366, 0.0216760311126709, 0.021577535629272462, 0.021633216857910156, 0.021587392807006837, 0.021569536209106444, 0.02152009582519531, 0.02158883285522461, 0.021659456253051757, 0.021852352142333983, 0.021934080123901366, 0.02183782386779785, 0.021747711181640626, 0.021671680450439452, 0.021552383422851564, 0.021633888244628908, 0.021672096252441406, 0.022633535385131836, 0.023001440048217775, 0.021693023681640625, 0.02168956756591797, 0.02185091209411621, 0.021639167785644533, 0.021741567611694337, 0.021636415481567382, 0.021527231216430662, 0.021501440048217774, 0.021471744537353517, 0.021440160751342772, 0.021426528930664063, 0.021592063903808592, 0.021893119812011717, 0.02145280075073242, 0.021518335342407227, 0.02146099281311035, 0.021501344680786134, 0.021469791412353514, 0.02145894432067871, 0.02141980743408203, 0.021374719619750977, 0.0214368953704834, 0.02159721565246582, 0.02152931213378906, 0.021459104537963868, 0.021483455657958984, 0.021645151138305663, 0.021441280364990236, 0.021437536239624022, 0.021393728256225587, 0.021486047744750977, 0.02153606414794922, 0.021450687408447265, 0.02148646354675293, 0.02137468719482422, 0.021411327362060546, 0.021322336196899414, 0.021452768325805664, 0.021595903396606445, 0.021457376480102538, 0.021454816818237306, 0.021495840072631837, 0.02146303939819336, 0.021587968826293946, 0.021489664077758788, 0.021481472015380858, 0.021587007522583006, 0.02156844711303711, 0.021555200576782226, 0.021601728439331055, 0.021657344818115234, 0.021653568267822266, 0.02149247932434082, 0.021503456115722658, 0.0214881591796875, 0.021495744705200194, 0.021555263519287108, 0.02149171257019043, 0.02146713638305664, 0.02157513618469238, 0.021934623718261718, 0.021880064010620117, 0.02168668746948242, 0.021639520645141602, 0.021546783447265624, 0.021500064849853517, 0.021484895706176756, 0.021506784439086914, 0.021605695724487305, 0.021492095947265626, 0.021620223999023438, 0.021492544174194335, 0.022302112579345702, 0.021574079513549806, 0.021527711868286132, 0.021529600143432616, 0.021501087188720704, 0.021492095947265626, 0.021519840240478514, 0.021541887283325196, 0.021488960266113282, 0.021498559951782226, 0.02149100875854492, 0.021500608444213868, 0.021534303665161132, 0.021463071823120117, 0.02163942337036133, 0.021717824935913087, 0.0215380802154541, 0.02154979133605957, 0.021487583160400392, 0.026583072662353515, 0.02278131294250488, 0.02165113639831543, 0.021584831237792968, 0.021643264770507813, 0.02165350341796875, 0.021530624389648437, 0.0216760311126709, 0.021761024475097656, 0.022021120071411132, 0.021743616104125976, 0.021551103591918946, 0.021577728271484374, 0.021716991424560548, 0.02153267288208008, 0.021604352951049805, 0.021542911529541017, 0.021610496520996093, 0.02158492851257324, 0.021947359085083006, 0.021480735778808595, 0.02155183982849121, 0.02145894432067871, 0.021561344146728514, 0.02151628875732422, 0.021607839584350585, 0.021520992279052735, 0.021587968826293946, 0.021573631286621094, 0.021600255966186522, 0.021703840255737305, 0.021590879440307617, 0.021521888732910156, 0.02146748733520508, 0.021575872421264648, 0.02153267288208008, 0.022271999359130858, 0.021564863204956056, 0.021592639923095704, 0.021506048202514647, 0.02159555244445801, 0.021563488006591795, 0.0216048641204834, 0.02153232002258301, 0.021533023834228514, 0.021590015411376954, 0.021590015411376954, 0.021495359420776367, 0.02148601531982422, 0.021576704025268553, 0.021556224822998047, 0.02157155227661133, 0.021593568801879882, 0.02165203285217285, 0.021565439224243164, 0.021639167785644533, 0.021465087890625, 0.021788671493530275, 0.02191564750671387, 0.02190991973876953, 0.021789087295532226, 0.021736576080322267, 0.02166032028198242, 0.02171516799926758, 0.021573631286621094, 0.02198031997680664, 0.022115167617797853, 0.021813087463378907, 0.023271583557128907, 0.021850175857543945, 0.021712831497192383, 0.02164684867858887, 0.02166156768798828, 0.02166383934020996, 0.021813791275024415, 0.021770240783691407, 0.021821151733398436, 0.021717279434204102, 0.02168544006347656, 0.021780895233154296, 0.02170102310180664, 0.021738752365112305, 0.02172185516357422, 0.02169798469543457, 0.02185856056213379, 0.022387008666992187, 0.02205695915222168, 0.02195644760131836, 0.02180281639099121, 0.02230089569091797, 0.02202579116821289, 0.021723712921142578, 0.02168627166748047, 0.021704704284667968, 0.0217640323638916, 0.021684288024902344, 0.021654848098754884, 0.021700960159301758, 0.021960319519042967, 0.022147808074951172, 0.024268800735473633, 0.026814464569091798, 0.021858240127563478, 0.02192140769958496, 0.021801408767700196, 0.02166988754272461, 0.021700607299804688, 0.021843967437744142, 0.021643264770507813, 0.021909503936767577, 0.021742879867553713, 0.021850847244262697, 0.021712896347045898, 0.021843967437744142, 0.021827455520629882, 0.021962879180908203, 0.021755903244018555, 0.02168627166748047, 0.021766143798828123, 0.021710111618041993, 0.021664480209350585, 0.021636735916137694, 0.02190342330932617, 0.02181875228881836, 0.021824127197265626, 0.021857311248779297, 0.021791391372680664, 0.02162719917297363, 0.022025375366210936, 0.021867359161376953, 0.021792736053466797, 0.021866527557373047, 0.021769311904907225, 0.022133663177490236, 0.021782527923583983, 0.021675519943237305, 0.021795263290405275, 0.021728864669799806, 0.021825855255126952, 0.022008991241455077, 0.021744640350341796, 0.021884927749633788, 0.021971168518066405, 0.022019775390625, 0.021827680587768555, 0.02187468719482422, 0.0216595516204834, 0.021769535064697264, 0.02183184051513672, 0.021893312454223633, 0.02180656051635742, 0.021791263580322264, 0.021758176803588866, 0.02206742477416992, 0.02166579246520996, 0.021733375549316408, 0.02171494483947754, 0.021721088409423828, 0.021624671936035157, 0.021825696945190428, 0.021835168838500976, 0.02173174476623535, 0.021672128677368164, 0.021819391250610352, 0.021788543701171875, 0.02166387176513672, 0.021620479583740235, 0.021672191619873046, 0.021689952850341795, 0.021763776779174803, 0.02163759994506836, 0.021586143493652343, 0.021639039993286133, 0.02162499237060547, 0.02166953659057617, 0.02213923263549805, 0.021712896347045898, 0.021764095306396485, 0.021897216796875, 0.021927135467529297, 0.02203113555908203, 0.021753152847290038, 0.021676736831665037, 0.02165900802612305, 0.02174835205078125, 0.02188073539733887, 0.022018495559692382, 0.021874399185180665, 0.02194905662536621, 0.021643264770507813, 0.021568735122680663, 0.02160908889770508, 0.021786239624023436, 0.02150454330444336, 0.02167398452758789, 0.021733375549316408, 0.02166988754272461, 0.021669023513793944, 0.021550975799560546, 0.021961343765258788, 0.021948640823364257, 0.021790143966674804, 0.021964864730834963, 0.02191417694091797, 0.02188038444519043, 0.021835391998291015, 0.02191244888305664, 0.02186412811279297, 0.02165996742248535, 0.021557247161865235, 0.021659648895263672, 0.021590015411376954, 0.021563072204589844, 0.021602432250976564, 0.021584064483642577, 0.021807104110717773, 0.021596000671386718, 0.02160246467590332, 0.021608448028564452, 0.02165113639831543, 0.021678400039672852, 0.022208511352539064, 0.02406934356689453, 0.021963552474975587, 0.021952512741088868, 0.02172313690185547, 0.021656959533691407, 0.021697151184082032, 0.02183782386779785, 0.02162892723083496, 0.021736448287963867, 0.021834527969360352, 0.021780704498291014, 0.021741567611694337, 0.021753856658935547, 0.021794496536254884, 0.02174291229248047, 0.02167091178894043, 0.021552799224853515, 0.02181769561767578, 0.02174550437927246, 0.021790016174316407, 0.021786848068237306, 0.021711488723754883, 0.021616191864013673, 0.021795167922973632, 0.021647455215454102, 0.02166579246520996, 0.021655967712402344, 0.02162131118774414, 0.021577728271484374, 0.021739519119262696, 0.02150150489807129, 0.021516511917114258, 0.02165123176574707, 0.021596063613891603, 0.021600448608398437, 0.02159244728088379, 0.021647296905517577, 0.021614240646362304, 0.021651840209960936, 0.021608448028564452, 0.021640447616577147, 0.021667903900146484, 0.02179340744018555, 0.021915136337280275, 0.02549612808227539, 0.021942272186279296, 0.02186240005493164, 0.021751359939575197, 0.021641408920288086, 0.021612800598144532, 0.021573471069335937, 0.0216080322265625, 0.021598783493041993, 0.0216944637298584, 0.02164531135559082, 0.02161155128479004, 0.021574464797973633, 0.02155945587158203, 0.02159984016418457, 0.021707168579101564, 0.021583871841430666, 0.02164531135559082, 0.021606399536132814, 0.021602304458618164, 0.021583871841430666, 0.021737472534179687, 0.021659456253051757, 0.021627071380615235, 0.021577280044555665, 0.021658048629760743, 0.02182963180541992, 0.02173676872253418, 0.02160047912597656, 0.021588447570800782, 0.021626880645751953, 0.021630239486694337, 0.021544927597045897, 0.021539583206176757, 0.021622783660888673, 0.021563392639160156, 0.021538816452026367, 0.021540864944458008, 0.021622783660888673, 0.021518335342407227, 0.02229248046875, 0.02163711929321289, 0.02171811294555664, 0.021703584671020508, 0.02168009567260742]",tokens/s,46.08074535348462,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,813.760512,561.905664,0.0,159.383552,143.673856,s,1,7.3676845703125,7.3676845703125,0.0,7.3676845703125,7.3676845703125,7.3676845703125,7.3676845703125,[7.3676845703125],,kWh,1.0916867704190736e-05,1.196920151901788e-06,2.723335512017089e-06,1.4837123368109613e-05,,MB,1306.140672,616.431616,0.0,199.22944,186.684928,s,24,0.19232851219177247,0.00801368800799052,6.165399156196195e-05,0.008013535976409912,0.008090870380401611,0.008098009777069093,0.008146730442047119,"[0.008099200248718262, 0.007954815864562988, 0.007960127830505372, 0.007942751884460449, 0.007936384201049805, 0.008160927772521972, 0.007939871788024902, 0.008080512046813966, 0.007945375919342041, 0.007949920177459716, 0.007964767932891846, 0.00809126377105713, 0.007992832183837891, 0.008029184341430663, 0.008009984016418456, 0.007948351860046386, 0.008051456451416016, 0.008043423652648925, 0.008026975631713867, 0.007994656085968017, 0.008017087936401367, 0.008020544052124023, 0.008078144073486329, 0.00808995246887207]",tokens/s,31945.34148880517,kWh,2.411672569765749e-07,2.65963053540374e-08,9.840756265619628e-08,3.661711249868086e-07,tokens/kWh,699126672.014956,MB,1319.596032,620.62592,0.0,203.423744,186.687488,s,24,10.198731811523437,0.42494715881347656,0.002993508830446943,0.42421670532226563,0.42788203125,0.43008334045410157,0.43438586303710935,"[0.4269555053710937, 0.42416644287109373, 0.4237364196777344, 0.4224688415527344, 0.4228974914550781, 0.4250695495605469, 0.42160183715820315, 0.4242669677734375, 0.42280044555664065, 0.42234713745117186, 0.4238018188476563, 0.42356057739257813, 0.42827911376953126, 0.4249194030761719, 0.4304017333984375, 0.4227448425292969, 0.4239934387207031, 0.422290771484375, 0.4267169189453125, 0.4264006042480469, 0.435575927734375, 0.42447433471679685, 0.42450375366210935, 0.4247579345703125]",tokens/s,148.25372683019347,kWh,1.2420049390939685e-05,1.3697175134143692e-06,4.332186259594147e-06,1.81219531639482e-05,tokens/kWh,3476446.464133466,,s,1512,10.184698696136467,0.0067359118360690974,0.00020807272034875375,0.006702847957611084,0.006794848155975342,0.006853195142745972,0.007366307768821726,"[0.0068031039237976074, 0.006912352085113526, 0.006958335876464844, 0.006751232147216797, 0.006762239933013916, 0.006750207901000976, 0.006713280200958252, 0.006719327926635742, 0.006747583866119385, 0.006662015914916992, 0.006730432033538818, 0.006948863983154297, 0.006755936145782471, 0.006722176074981689, 0.006770688056945801, 0.006781184196472168, 0.0066967358589172365, 0.006879327774047851, 0.006752064228057861, 0.0069012799263000486, 0.006753151893615723, 0.006775487899780273, 0.00684335994720459, 0.006823935985565186, 0.006713344097137451, 0.00671884822845459, 0.006736192226409912, 0.006696576118469238, 0.00671398401260376, 0.006711135864257813, 0.006691328048706055, 0.006713056087493897, 0.00671721601486206, 0.00681932783126831, 0.006746848106384277, 0.006976672172546387, 0.00677564811706543, 0.006715487957000732, 0.006815552234649658, 0.006703199863433838, 0.006797056198120117, 0.006723648071289063, 0.006696288108825684, 0.0067387838363647465, 0.006692863941192627, 0.00667852783203125, 0.006750207901000976, 0.006686719894409179, 0.006846528053283691, 0.006821824073791504, 0.006743807792663575, 0.0067873277664184575, 0.0069918718338012695, 0.006721536159515381, 0.006719488143920899, 0.0069489598274230955, 0.00680460786819458, 0.006727807998657227, 0.006721280097961426, 0.006715744018554687, 0.006695487976074219, 0.00667852783203125, 0.006814943790435791, 0.006721983909606934, 0.006704864025115967, 0.006998208045959473, 0.006762495994567871, 0.006735648155212402, 0.0067402877807617185, 0.006809599876403808, 0.006711135864257813, 0.0066706881523132325, 0.006695744037628174, 0.006718463897705078, 0.006673408031463623, 0.006699903964996338, 0.006727807998657227, 0.006739039897918701, 0.006671264171600342, 0.006719583988189697, 0.006682528018951416, 0.006709248065948486, 0.006726719856262207, 0.0066835517883300785, 0.006776768207550049, 0.006711296081542969, 0.0067114558219909665, 0.006756576061248779, 0.00668342399597168, 0.006692863941192627, 0.006734208106994629, 0.00681660795211792, 0.006756063938140869, 0.006696800231933594, 0.006751584053039551, 0.0066826558113098145, 0.00667523193359375, 0.006704864025115967, 0.006703392028808593, 0.0068056960105895995, 0.006694943904876709, 0.006722879886627197, 0.006707680225372314, 0.006680575847625733, 0.006744063854217529, 0.006715392112731934, 0.00674403190612793, 0.006860832214355469, 0.006692287921905518, 0.006686880111694336, 0.006714975833892823, 0.0067571840286254885, 0.006684671878814697, 0.0067010560035705566, 0.006709248065948486, 0.006673632144927978, 0.006701759815216065, 0.006692959785461426, 0.006653791904449463, 0.006729887962341309, 0.006671711921691894, 0.006703775882720947, 0.006782048225402832, 0.006717376232147217, 0.006673151969909668, 0.006821248054504394, 0.0067274560928344725, 0.006688704013824463, 0.006694943904876709, 0.006739935874938965, 0.00667852783203125, 0.0066960639953613284, 0.006817759990692139, 0.006703392028808593, 0.00670579195022583, 0.006704448223114013, 0.006707647800445557, 0.006709887981414795, 0.006708767890930176, 0.0067248959541320805, 0.006742752075195312, 0.006765888214111328, 0.0067383041381835936, 0.006690720081329346, 0.006690336227416992, 0.006687424182891846, 0.006750720024108887, 0.0066761598587036135, 0.006700992107391357, 0.006705247879028321, 0.00667142391204834, 0.006799647808074951, 0.006707071781158447, 0.006671487808227539, 0.006759871959686279, 0.006709536075592041, 0.006719583988189697, 0.00679036808013916, 0.006911808013916015, 0.006684864044189453, 0.006832831859588623, 0.006712800025939942, 0.006696896076202392, 0.006691487789154052, 0.006729343891143799, 0.006758592128753662, 0.006690112113952637, 0.006711584091186523, 0.0066771841049194335, 0.0067786240577697755, 0.006721119880676269, 0.006723264217376709, 0.0066936960220336916, 0.006665823936462402, 0.006696447849273681, 0.006677055835723877, 0.006693215847015381, 0.006694816112518311, 0.006686048030853271, 0.0067058238983154295, 0.006686143875122071, 0.006678400039672852, 0.006705952167510986, 0.006673696041107177, 0.00677129602432251, 0.006707263946533203, 0.006680064201354981, 0.006742623805999756, 0.006698912143707276, 0.006696959972381592, 0.0066592001914978025, 0.006687615871429443, 0.006696896076202392, 0.006751584053039551, 0.006762847900390625, 0.006666624069213868, 0.006651264190673828, 0.0066538238525390625, 0.006680960178375244, 0.006662528038024903, 0.006721471786499024, 0.006721183776855468, 0.006711711883544922, 0.006656000137329102, 0.00669593620300293, 0.0066713600158691405, 0.006673439979553223, 0.00670307207107544, 0.006688960075378418, 0.006658976078033447, 0.006754208087921143, 0.006670464038848877, 0.006676511764526367, 0.006696800231933594, 0.0067151041030883785, 0.006664480209350586, 0.00667852783203125, 0.006667327880859375, 0.006719840049743652, 0.006695136070251465, 0.006683263778686523, 0.006643455982208252, 0.0067125439643859865, 0.006670783996582031, 0.006671807765960693, 0.006681407928466797, 0.006719840049743652, 0.006670015811920166, 0.0066724481582641606, 0.006684127807617187, 0.006720064163208007, 0.006702400207519531, 0.006677343845367432, 0.006663008213043213, 0.006689760208129883, 0.006737919807434082, 0.0066416640281677245, 0.006699359893798828, 0.0067084159851074215, 0.0067506561279296875, 0.006681695938110351, 0.006683584213256836, 0.006840415954589844, 0.006750112056732178, 0.006729728221893311, 0.006823935985565186, 0.006715392112731934, 0.006681856155395508, 0.006703904151916504, 0.0066468482017517086, 0.006695839881896972, 0.006715648174285889, 0.00662169599533081, 0.006658527851104736, 0.00668236780166626, 0.006643008232116699, 0.00666860818862915, 0.006717343807220459, 0.006673120021820069, 0.006674528121948242, 0.006682528018951416, 0.0066145601272583005, 0.006670815944671631, 0.006692736148834228, 0.006641727924346924, 0.006682943820953369, 0.006759456157684326, 0.006667232036590576, 0.006731296062469482, 0.006697184085845948, 0.00668387222290039, 0.006662559986114502, 0.006684160232543946, 0.006666463851928711, 0.006711967945098877, 0.006688992023468018, 0.006821152210235596, 0.006668799877166748, 0.006689055919647217, 0.006670207977294922, 0.006661151885986328, 0.006685823917388916, 0.006641568183898926, 0.006713119983673096, 0.006674367904663086, 0.006674655914306641, 0.006664031982421875, 0.0067003841400146485, 0.006677311897277832, 0.006658912181854248, 0.006681600093841553, 0.006813632011413574, 0.0066761279106140135, 0.0066724162101745605, 0.006646143913269043, 0.006640704154968262, 0.006667200088500976, 0.006657408237457275, 0.006648608207702637, 0.006774271965026855, 0.006699359893798828, 0.0066641921997070315, 0.0066993279457092285, 0.006688447952270507, 0.006651904106140137, 0.006703104019165039, 0.006675680160522461, 0.00670307207107544, 0.006722239971160888, 0.0074787201881408695, 0.006890175819396972, 0.00671068811416626, 0.006842016220092773, 0.006725887775421143, 0.006681280136108398, 0.006646527767181397, 0.006701183795928955, 0.0066795840263366695, 0.006654079914093018, 0.0066772160530090335, 0.006662144184112549, 0.006658048152923584, 0.006739264011383057, 0.006666944026947021, 0.0066538558006286625, 0.006689919948577881, 0.00667683219909668, 0.006803455829620361, 0.00667852783203125, 0.006668928146362304, 0.006701183795928955, 0.00671343994140625, 0.00667849588394165, 0.00667145586013794, 0.006693183898925781, 0.006674848079681397, 0.006663680076599121, 0.0066993279457092285, 0.00674015998840332, 0.006852416038513184, 0.006745567798614502, 0.006766655921936035, 0.00664847993850708, 0.006746016025543213, 0.006670432090759277, 0.006621183872222901, 0.006797311782836914, 0.0067870721817016606, 0.006739999771118164, 0.006684800148010254, 0.0067285442352294925, 0.007914495944976807, 0.0073768959045410155, 0.0072724480628967286, 0.00667683219909668, 0.006761760234832764, 0.006713727951049805, 0.006672607898712158, 0.006907519817352295, 0.006678688049316406, 0.006658400058746338, 0.006712800025939942, 0.006686944007873535, 0.0067134079933166505, 0.006685664176940918, 0.006699615955352783, 0.0066562237739562985, 0.006673888206481933, 0.006647679805755615, 0.006629951953887939, 0.0066641921997070315, 0.006688992023468018, 0.006639488220214844, 0.006698847770690918, 0.006656288146972656, 0.006645535945892334, 0.006678368091583252, 0.006788832187652588, 0.006688672065734863, 0.006875167846679688, 0.006791967868804932, 0.006686272144317627, 0.006653664112091065, 0.006666975975036621, 0.006645760059356689, 0.006656000137329102, 0.006709248065948486, 0.006651968002319336, 0.006643487930297852, 0.006668288230895996, 0.006709407806396484, 0.006647168159484863, 0.006754496097564697, 0.006696864128112793, 0.006770527839660644, 0.006688767910003662, 0.006673024177551269, 0.0066273918151855465, 0.006674752235412597, 0.006656832218170166, 0.006650752067565918, 0.006668288230895996, 0.006706751823425293, 0.006664544105529785, 0.006674528121948242, 0.0066490240097045895, 0.006631360054016113, 0.006695168018341064, 0.0066566400527954105, 0.006617087841033936, 0.006652991771697998, 0.006695551872253418, 0.006624639987945557, 0.00667519998550415, 0.006708447933197022, 0.006777215957641602, 0.006711904048919678, 0.006719488143920899, 0.006657599925994873, 0.006740032196044922, 0.006658432006835938, 0.006637279987335205, 0.006664480209350586, 0.006660352230072022, 0.0066516480445861816, 0.006692863941192627, 0.006664351940155029, 0.006680416107177734, 0.00667628812789917, 0.006635519981384277, 0.006643904209136963, 0.006694560050964356, 0.006675007820129394, 0.006812479972839356, 0.006673376083374024, 0.006708703994750977, 0.006640160083770752, 0.0066888961791992186, 0.006689727783203125, 0.0066789441108703615, 0.00668726396560669, 0.006690783977508545, 0.0067309122085571285, 0.006649856090545654, 0.006700160026550293, 0.006690559864044189, 0.006668032169342041, 0.006716991901397705, 0.006699103832244873, 0.006654047966003418, 0.006791264057159424, 0.00674019193649292, 0.006701344013214111, 0.006698912143707276, 0.00663705587387085, 0.006653535842895508, 0.0066776638031005855, 0.00688044786453247, 0.00675491189956665, 0.006913951873779297, 0.006678592205047607, 0.006740064144134522, 0.006633535861968994, 0.006680031776428222, 0.0066891517639160155, 0.006652031898498535, 0.006694975852966308, 0.00671724796295166, 0.006654240131378174, 0.006680287837982178, 0.006673759937286377, 0.006647552013397217, 0.006723872184753418, 0.00668236780166626, 0.006658944129943847, 0.006959104061126709, 0.006758399963378906, 0.006676479816436768, 0.006703199863433838, 0.006708320140838623, 0.006673151969909668, 0.00678268814086914, 0.0067341761589050295, 0.006736127853393554, 0.006878719806671142, 0.006686272144317627, 0.006634175777435303, 0.007214975833892822, 0.0067339520454406734, 0.007028128147125244, 0.00670576000213623, 0.0067276802062988285, 0.006681695938110351, 0.006812255859375, 0.006715712070465088, 0.006735648155212402, 0.0066724162101745605, 0.006695295810699463, 0.006779967784881592, 0.006719456195831299, 0.006642176151275635, 0.00673196792602539, 0.00666761589050293, 0.006648575782775879, 0.00666326379776001, 0.006647552013397217, 0.006673696041107177, 0.0066752958297729495, 0.006656288146972656, 0.0066579518318176266, 0.00666153621673584, 0.0066457920074462894, 0.006753759860992432, 0.006678847789764404, 0.006642271995544433, 0.006719488143920899, 0.006684832096099854, 0.006666079998016357, 0.006651679992675782, 0.006695136070251465, 0.006696959972381592, 0.007280640125274658, 0.006961152076721191, 0.0068037757873535155, 0.006710976123809814, 0.006671872138977051, 0.006703296184539795, 0.006681151866912842, 0.006653471946716309, 0.006677919864654541, 0.0066752638816833494, 0.006703104019165039, 0.006671807765960693, 0.006650176048278809, 0.0067363839149475096, 0.006694655895233154, 0.006656000137329102, 0.006696959972381592, 0.006690815925598144, 0.006715487957000732, 0.006698912143707276, 0.0066826238632202144, 0.006688543796539307, 0.0066696639060974125, 0.0067136001586914065, 0.006699647903442383, 0.006677599906921386, 0.006787519931793213, 0.006695680141448975, 0.006716127872467041, 0.006783999919891357, 0.006672383785247803, 0.006660096168518067, 0.006645472049713135, 0.006700672149658203, 0.006698912143707276, 0.006683135986328125, 0.006687200069427491, 0.0067233600616455075, 0.006647808074951172, 0.006670335769653321, 0.006672383785247803, 0.006640768051147461, 0.006742080211639404, 0.00671827220916748, 0.00665718412399292, 0.006688992023468018, 0.006652800083160401, 0.006638976097106933, 0.006689472198486328, 0.0066819839477539065, 0.006701024055480957, 0.006676896095275879, 0.006647808074951172, 0.006638720035552979, 0.006677472114562989, 0.006661312103271484, 0.006648543834686279, 0.0067338237762451176, 0.006801407814025879, 0.006686719894409179, 0.006696959972381592, 0.006698016166687012, 0.006656000137329102, 0.006698080062866211, 0.006762527942657471, 0.006644991874694825, 0.006717343807220459, 0.0066928000450134275, 0.0067873277664184575, 0.006671040058135986, 0.006711103916168213, 0.00669593620300293, 0.006667263984680176, 0.006676447868347168, 0.006719808101654053, 0.006661280155181885, 0.006693855762481689, 0.006682208061218262, 0.006793407917022705, 0.006686719894409179, 0.006678336143493653, 0.006651487827301026, 0.006748576164245606, 0.006666240215301514, 0.006688767910003662, 0.0066776638031005855, 0.006656511783599854, 0.006736095905303955, 0.006738048076629638, 0.006710559844970703, 0.0066847681999206545, 0.006716032028198243, 0.006647808074951172, 0.006626848220825195, 0.006660575866699219, 0.006666240215301514, 0.00663702392578125, 0.006674272060394287, 0.006650559902191162, 0.0070266880989074704, 0.006718656063079834, 0.006699584007263184, 0.006684927940368652, 0.006703296184539795, 0.0066909117698669435, 0.00664137601852417, 0.00668617582321167, 0.006756896018981933, 0.006672383785247803, 0.006754303932189941, 0.006671711921691894, 0.006649631977081299, 0.006701951980590821, 0.006675456047058105, 0.006773759841918945, 0.006657599925994873, 0.006695231914520264, 0.006799456119537354, 0.006678112030029297, 0.006721343994140625, 0.006695551872253418, 0.006665631771087647, 0.006758560180664062, 0.006678976058959961, 0.0067233600616455075, 0.006697184085845948, 0.00667852783203125, 0.006722911834716797, 0.0067283520698547365, 0.0072226881980896, 0.006748159885406494, 0.006668255805969238, 0.006702847957611084, 0.0067060480117797855, 0.006651135921478271, 0.006708191871643066, 0.006724607944488525, 0.006649951934814453, 0.0066928000450134275, 0.006714111804962159, 0.006657343864440918, 0.006685279846191407, 0.006670048236846924, 0.006647679805755615, 0.0066769919395446775, 0.006657216072082519, 0.006650559902191162, 0.00673305606842041, 0.006943615913391113, 0.0066744318008422855, 0.0067116479873657224, 0.006688608169555664, 0.006612800121307373, 0.006776832103729248, 0.006708703994750977, 0.006662112236022949, 0.0066893439292907714, 0.006659904003143311, 0.006615231990814209, 0.007152895927429199, 0.006743135929107666, 0.00676639986038208, 0.006750336170196533, 0.006757919788360596, 0.006660287857055664, 0.006660096168518067, 0.006704768180847168, 0.0066891517639160155, 0.006684031963348389, 0.006710912227630615, 0.006693759918212891, 0.006784351825714111, 0.006862815856933594, 0.006754303932189941, 0.006785024166107178, 0.006707071781158447, 0.006786496162414551, 0.006695615768432617, 0.006751584053039551, 0.006719488143920899, 0.006734208106994629, 0.006673984050750733, 0.006755040168762207, 0.006727231979370117, 0.006693183898925781, 0.006729856014251709, 0.006795263767242431, 0.006747680187225342, 0.006662623882293701, 0.006709184169769287, 0.006680575847625733, 0.006684735774993896, 0.006698400020599365, 0.006675039768218994, 0.006720640182495117, 0.006687776088714599, 0.00668620777130127, 0.006656352043151855, 0.006692863941192627, 0.0066638078689575195, 0.006662528038024903, 0.006674655914306641, 0.006754079818725586, 0.006740992069244385, 0.0067573761940002445, 0.006686079978942871, 0.006632063865661621, 0.006729887962341309, 0.006739808082580566, 0.00666864013671875, 0.006706079959869385, 0.006759168148040772, 0.006666048049926758, 0.0067216320037841795, 0.0067073597908020016, 0.006652063846588134, 0.0067003521919250485, 0.006695199966430664, 0.006660128116607666, 0.006746272087097168, 0.006708799839019775, 0.006647776126861573, 0.006711743831634522, 0.00670905590057373, 0.006832352161407471, 0.006653952121734619, 0.006791168212890625, 0.0067667198181152344, 0.0066455998420715335, 0.006756383895874024, 0.006731840133666992, 0.00675600004196167, 0.0066908798217773435, 0.006733856201171875, 0.006655295848846435, 0.006651936054229736, 0.006674176216125489, 0.00667852783203125, 0.006660096168518067, 0.0070321598052978515, 0.006977215766906738, 0.008258527755737304, 0.00793552017211914, 0.006771168231964111, 0.006719103813171387, 0.006760575771331787, 0.0067547521591186524, 0.006724800109863281, 0.00674454402923584, 0.006715839862823487, 0.006741727828979492, 0.006731776237487793, 0.006780416011810303, 0.006682752132415771, 0.006700831890106201, 0.006666719913482666, 0.006750336170196533, 0.006686143875122071, 0.006765120029449463, 0.006682144165039062, 0.00674838399887085, 0.006694464206695556, 0.006773056030273438, 0.006963776111602783, 0.00678278398513794, 0.006713344097137451, 0.006819392204284668, 0.00682585620880127, 0.0067402877807617185, 0.00674560022354126, 0.006761216163635254, 0.006709248065948486, 0.006747647762298584, 0.006713856220245362, 0.006862592220306396, 0.006695168018341064, 0.006714816093444824, 0.006697535991668701, 0.006704671859741211, 0.006707680225372314, 0.00669923210144043, 0.006682144165039062, 0.006737855911254883, 0.006773151874542236, 0.006679967880249023, 0.006725855827331543, 0.00678326416015625, 0.006702239990234375, 0.006693183898925781, 0.006715456008911133, 0.0067097277641296384, 0.006737631797790527, 0.006711584091186523, 0.006699007987976074, 0.006881279945373535, 0.0067350401878356935, 0.006789792060852051, 0.006844031810760498, 0.006736608028411866, 0.006719520092010498, 0.0071077442169189455, 0.006693727970123291, 0.006741407871246338, 0.0067276802062988285, 0.006672544002532959, 0.006715839862823487, 0.006739967823028564, 0.006686336040496827, 0.006721151828765869, 0.006720160007476807, 0.006678815841674805, 0.00671827220916748, 0.006749152183532715, 0.006710432052612305, 0.006921088218688965, 0.007185855865478516, 0.006701888084411621, 0.0066921601295471195, 0.006721375942230224, 0.006770815849304199, 0.006679008007049561, 0.006735424041748047, 0.006801695823669434, 0.0067933759689331056, 0.006710879802703858, 0.006723423957824707, 0.006746687889099121, 0.0067341761589050295, 0.006702144145965577, 0.006795008182525635, 0.006807936191558838, 0.006686495780944824, 0.006787680149078369, 0.006767871856689453, 0.006705215930938721, 0.006703904151916504, 0.006676032066345215, 0.006713535785675049, 0.006746463775634766, 0.006833600044250488, 0.006713088035583496, 0.006707935810089111, 0.006708384037017822, 0.006666656017303467, 0.006708703994750977, 0.006693855762481689, 0.006801472187042236, 0.00671123218536377, 0.0066806402206420895, 0.006702208042144776, 0.006697440147399902, 0.00666812801361084, 0.0066743998527526854, 0.006690464019775391, 0.006685152053833008, 0.0067199039459228515, 0.006696959972381592, 0.006690368175506591, 0.006640255928039551, 0.0067084159851074215, 0.006679296016693115, 0.006634880065917969, 0.006734464168548584, 0.0067010560035705566, 0.006672383785247803, 0.006680543899536133, 0.0067092800140380856, 0.006682047843933106, 0.006660672187805175, 0.006741631984710694, 0.006743680000305176, 0.006759103775024414, 0.007036064147949219, 0.007029664039611816, 0.006661856174468994, 0.0068488001823425295, 0.006713344097137451, 0.006735136032104492, 0.006720479965209961, 0.006702847957611084, 0.00667852783203125, 0.006666240215301514, 0.006681920051574707, 0.00668339204788208, 0.007280128002166748, 0.0074115839004516605, 0.009368160247802734, 0.007675392150878906, 0.006846528053283691, 0.0070143680572509765, 0.007102911949157715, 0.0067785921096801755, 0.006760128021240234, 0.006802015781402588, 0.006673727989196777, 0.006820543766021729, 0.006723040103912353, 0.006709375858306885, 0.006676896095275879, 0.006721536159515381, 0.006689856052398682, 0.006721888065338135, 0.006728288173675537, 0.006725632190704346, 0.006711296081542969, 0.006774784088134766, 0.006676608085632324, 0.006672255992889405, 0.006840320110321045, 0.0067218561172485355, 0.0067396478652954105, 0.006707200050354004, 0.006679840087890625, 0.006672800064086914, 0.0067259521484375, 0.006809375762939453, 0.006669792175292968, 0.006698880195617676, 0.006755104064941406, 0.006660192012786865, 0.007040671825408936, 0.006723936080932617, 0.006750048160552979, 0.006690624237060547, 0.00668387222290039, 0.006748576164245606, 0.00664192008972168, 0.006688767910003662, 0.006706975936889649, 0.0066390719413757324, 0.006693151950836182, 0.006693280220031738, 0.006647840023040772, 0.006760479927062989, 0.006690176010131836, 0.006652607917785645, 0.006673503875732422, 0.0066752958297729495, 0.006713247776031494, 0.006705247879028321, 0.006688960075378418, 0.006721343994140625, 0.0066641921997070315, 0.00670739221572876, 0.006716544151306152, 0.006682464122772217, 0.006732416152954101, 0.006791359901428222, 0.006688864231109619, 0.006770624160766602, 0.006687744140625, 0.006648896217346191, 0.006692480087280273, 0.00666860818862915, 0.006659327983856201, 0.006703680038452149, 0.006672639846801758, 0.006698400020599365, 0.006674335956573486, 0.006672544002532959, 0.006652639865875244, 0.006689599990844727, 0.006689792156219483, 0.0068566398620605465, 0.006811391830444336, 0.006766560077667236, 0.006723584175109864, 0.006695199966430664, 0.006688767910003662, 0.006713344097137451, 0.006698751926422119, 0.006712704181671142, 0.006673279762268066, 0.0066641921997070315, 0.006772736072540283, 0.006713344097137451, 0.006725279808044433, 0.006723711967468262, 0.006778880119323731, 0.006716800212860107, 0.0066845440864562984, 0.006648799896240234, 0.006696959972381592, 0.006686719894409179, 0.00666758394241333, 0.006654655933380127, 0.006704895973205566, 0.006688704013824463, 0.006643040180206299, 0.006745759963989258, 0.00669593620300293, 0.006633471965789795, 0.006719647884368896, 0.006788095951080322, 0.006695775985717773, 0.006699071884155273, 0.006750400066375732, 0.006762080192565918, 0.0066992959976196285, 0.006721407890319824, 0.006694176197052002, 0.006676640033721924, 0.006699584007263184, 0.006684000015258789, 0.0066650562286376955, 0.006751743793487549, 0.006678624153137207, 0.006668799877166748, 0.0066986241340637204, 0.0066600642204284665, 0.006671679973602295, 0.006685503959655762, 0.00670959997177124, 0.006760096073150635, 0.00674345588684082, 0.006700863838195801, 0.006673183917999268, 0.006719488143920899, 0.00676694393157959, 0.006694464206695556, 0.006702720165252685, 0.006837887763977051, 0.00677564811706543, 0.006751776218414307, 0.006724160194396972, 0.0067190399169921875, 0.006654208183288574, 0.006713664054870606, 0.006665664196014404, 0.0066687679290771485, 0.0067337918281555175, 0.006690720081329346, 0.006668288230895996, 0.006686719894409179, 0.006666240215301514, 0.007080160140991211, 0.006788479804992676, 0.0067116799354553225, 0.006717472076416015, 0.006702847957611084, 0.006666368007659912, 0.00661516809463501, 0.006688064098358155, 0.006697728157043457, 0.0066293120384216305, 0.006715392112731934, 0.007185664176940918, 0.006853248119354248, 0.006674111843109131, 0.006791264057159424, 0.006768735885620117, 0.006651904106140137, 0.0066705279350280765, 0.006807263851165772, 0.0066622400283813474, 0.006645760059356689, 0.0067125439643859865, 0.006673183917999268, 0.006664031982421875, 0.006688928127288819, 0.006661952018737793, 0.006696800231933594, 0.006691167831420899, 0.006657536029815674, 0.006653600215911866, 0.006687583923339843, 0.006717440128326416, 0.006699007987976074, 0.006686143875122071, 0.0067136001586914065, 0.006666272163391113, 0.0066921601295471195, 0.006669280052185059, 0.006659552097320557, 0.006701600074768066, 0.006645279884338379, 0.0066646718978881836, 0.006670432090759277, 0.006731616020202637, 0.006657120227813721, 0.006707647800445557, 0.006685279846191407, 0.0066696319580078125, 0.006683487892150879, 0.006674208164215088, 0.006897664070129395, 0.006747456073760987, 0.006764992237091065, 0.006666495800018311, 0.006672383785247803, 0.006660096168518067, 0.006649856090545654, 0.006676479816436768, 0.006653088092803955, 0.006680799961090088, 0.006686431884765625, 0.006664351940155029, 0.0066629118919372555, 0.006709248065948486, 0.006702655792236328, 0.006664639949798584, 0.00671292781829834, 0.00675878381729126, 0.006669919967651367, 0.006742303848266602, 0.006721119880676269, 0.006701632022857666, 0.006705023765563965, 0.006723711967468262, 0.006673855781555176, 0.006709792137145996, 0.006772799968719483, 0.006676447868347168, 0.006650559902191162, 0.006687039852142334, 0.006684480190277099, 0.006649472236633301, 0.006725632190704346, 0.006668799877166748, 0.006670207977294922, 0.00667852783203125, 0.0067686400413513184, 0.006651679992675782, 0.006729951858520508, 0.0067131838798522945, 0.006739999771118164, 0.006780576229095459, 0.006707808017730713, 0.006688831806182861, 0.0066806721687316896, 0.0067069120407104495, 0.006707488059997558, 0.006679679870605469, 0.00671395206451416, 0.006750207901000976, 0.006656000137329102, 0.006702176094055176, 0.0066854081153869625, 0.006787263870239258, 0.0067686400413513184, 0.006694623947143555, 0.006684671878814697, 0.006756800174713135, 0.006682464122772217, 0.006656000137329102, 0.006702335834503173, 0.006671103954315185, 0.006677728176116943, 0.006705952167510986, 0.0066826238632202144, 0.00673363208770752, 0.006721727848052979, 0.006720672130584717, 0.006668831825256348, 0.006719808101654053, 0.006721759796142578, 0.00666761589050293, 0.006713119983673096, 0.006830527782440186, 0.006707424163818359, 0.006727424144744873, 0.006684927940368652, 0.006694911956787109, 0.00665340805053711, 0.006754464149475098, 0.0066991357803344725, 0.006684927940368652, 0.006766592025756836, 0.007132544040679932, 0.006908512115478516, 0.008523296356201172, 0.007635456085205078, 0.006809855937957764, 0.006751200199127197, 0.006785920143127441, 0.006774687767028808, 0.006727583885192871, 0.006707200050354004, 0.006660096168518067, 0.007714303970336914, 0.006731711864471436, 0.006752511978149414, 0.006746431827545166, 0.006719488143920899, 0.008240639686584473, 0.006736288070678711, 0.006680960178375244, 0.006689824104309082, 0.006707231998443603, 0.006750879764556884, 0.0068438401222229005, 0.006705728054046631, 0.006701280117034912, 0.006737696170806885, 0.0067226881980895995, 0.006705376148223877, 0.006740640163421631, 0.006772799968719483, 0.006694464206695556, 0.006756864070892334, 0.006694784164428711, 0.006686336040496827, 0.006672736167907715, 0.006684703826904297, 0.006696864128112793, 0.0067290239334106446, 0.006699808120727539, 0.006729728221893311, 0.006662144184112549, 0.006690559864044189, 0.006911295890808106, 0.006710207939147949, 0.0066744318008422855, 0.00680787181854248, 0.006715072154998779, 0.0066641921997070315, 0.006699007987976074, 0.006688191890716553, 0.006681280136108398, 0.006698880195617676, 0.006672544002532959, 0.006745952129364014, 0.006762495994567871, 0.006668288230895996, 0.006733151912689209, 0.006691487789154052, 0.006661344051361084, 0.006742303848266602, 0.006717631816864014, 0.006666560173034668, 0.006711296081542969, 0.006945824146270752, 0.006703743934631348, 0.006682975769042969, 0.006715392112731934, 0.006720608234405518, 0.006680960178375244, 0.006683296203613282, 0.006772607803344727, 0.006686079978942871, 0.006690336227416992, 0.006738111972808838, 0.006701759815216065, 0.0067313919067382814, 0.006719840049743652, 0.00678879976272583, 0.006661920070648193, 0.006715648174285889, 0.006720064163208007, 0.00668342399597168, 0.006699967861175537, 0.0067003521919250485, 0.00666815996170044, 0.0067571840286254885, 0.006694496154785156, 0.006730144023895264, 0.006651519775390625, 0.0067272958755493165, 0.0066977920532226565, 0.006671840190887451, 0.006709343910217285, 0.0068058881759643555, 0.006666240215301514, 0.006708799839019775, 0.0066603198051452634, 0.006657824039459228, 0.006690464019775391, 0.00672870397567749, 0.006696959972381592, 0.007129024028778077, 0.006710207939147949, 0.006672671794891357, 0.006745823860168457, 0.006738848209381103, 0.006668288230895996, 0.0067586879730224605, 0.007464672088623047, 0.00832102394104004, 0.010098688125610352, 0.011272512435913085, 0.007030464172363281, 0.00683622407913208, 0.006776415824890137, 0.0067792959213256836, 0.006823935985565186, 0.007241600036621094, 0.00672166395187378, 0.006715392112731934, 0.006744063854217529, 0.006739967823028564, 0.006696959972381592, 0.006730879783630371, 0.006779935836791992, 0.0066947522163391116, 0.006731520175933838, 0.006733280181884765, 0.006791744232177734, 0.0067853121757507325, 0.006700992107391357, 0.006702655792236328, 0.006768320083618164, 0.006728447914123535, 0.006770559787750244, 0.006737919807434082, 0.00671721601486206, 0.006745952129364014, 0.006713727951049805, 0.006746111869812011, 0.006692863941192627, 0.006782976150512696, 0.006702911853790283, 0.006721727848052979, 0.006686719894409179, 0.006737919807434082, 0.006804736137390137, 0.006734720230102539, 0.0066679039001464845, 0.00671292781829834, 0.0066703038215637205, 0.006675136089324951, 0.00670531177520752, 0.006725471973419189, 0.006684671878814697, 0.006789120197296142, 0.006769023895263672, 0.006688608169555664, 0.006733535766601562, 0.006690176010131836, 0.006741888046264648, 0.006693183898925781, 0.006660384178161621, 0.0067422399520874025, 0.006738976001739502, 0.006677472114562989, 0.006647264003753662, 0.00667251205444336, 0.006775487899780273, 0.00668233585357666, 0.006737919807434082, 0.006750207901000976, 0.006748159885406494, 0.006712416172027588, 0.006690783977508545, 0.006672832012176513, 0.006806015968322754, 0.006727839946746826, 0.006722943782806396, 0.006668320178985596, 0.006758848190307617, 0.006686880111694336, 0.006796768188476562, 0.006833600044250488, 0.006785920143127441, 0.006662208080291748, 0.006839360237121582, 0.0066856322288513185, 0.0067309122085571285, 0.006706367969512939, 0.007151264190673828, 0.0066778559684753415, 0.006662816047668457, 0.006699007987976074, 0.006651423931121826, 0.006667871952056884, 0.006822783946990967, 0.006651936054229736, 0.0066295042037963865, 0.006690815925598144, 0.006692863941192627, 0.006645760059356689, 0.006661664009094239, 0.006734399795532226, 0.006645664215087891, 0.006708992004394531, 0.006711552143096924, 0.006866047859191895, 0.006688735961914063, 0.006689695835113525, 0.006676191806793213, 0.006729472160339356, 0.00672540807723999, 0.006687424182891846, 0.006645823955535889, 0.0067686400413513184, 0.0066724481582641606, 0.006647744178771973, 0.006674272060394287, 0.006694655895233154, 0.006704800128936768, 0.006734784126281738, 0.006714399814605713, 0.006708000183105469, 0.006709248065948486, 0.006698527812957764, 0.006694431781768799, 0.0067123198509216305, 0.006754240036010742, 0.006883327960968018, 0.006759647846221924, 0.006740128040313721, 0.006726272106170654, 0.006809599876403808, 0.0067105917930603024, 0.006689472198486328, 0.0067420158386230465, 0.006868351936340332, 0.006853151798248291, 0.006891615867614746, 0.0067333121299743654, 0.006685184001922607, 0.006659615993499756, 0.006756832122802734, 0.006778880119323731, 0.006811615943908692, 0.0067420477867126466, 0.0067073597908020016, 0.006652768135070801, 0.006711743831634522, 0.0068031039237976074, 0.0067432961463928225, 0.006749855995178223, 0.00672156810760498, 0.006754528045654297, 0.006718944072723389, 0.006750239849090576, 0.006743519783020019, 0.006728415966033936, 0.006845888137817383, 0.0067870721817016606, 0.006725632190704346, 0.006764895915985107, 0.006798912048339844, 0.0067420158386230465, 0.006725696086883545, 0.006743743896484375, 0.006795680046081543, 0.006707456111907959, 0.006741663932800293, 0.006760575771331787, 0.006719391822814941, 0.006744063854217529, 0.006751711845397949, 0.00674451208114624, 0.006850783824920654, 0.006743807792663575, 0.006709407806396484, 0.0067086400985717776, 0.006751840114593506, 0.006744480133056641, 0.006822463989257813, 0.006748223781585693, 0.006815616130828857, 0.006715456008911133, 0.0067686400413513184, 0.006801663875579834, 0.00689251184463501, 0.006763360023498535, 0.006813632011413574, 0.006720704078674316, 0.00671827220916748, 0.006770688056945801, 0.006709248065948486, 0.006665215969085693, 0.006703135967254639, 0.006675007820129394, 0.006670752048492432, 0.006719456195831299, 0.00667471981048584, 0.006704864025115967, 0.0066826558113098145, 0.006846464157104492, 0.00667683219909668, 0.006697728157043457, 0.006826591968536377, 0.006677919864654541, 0.0066774082183837895, 0.0067411518096923825, 0.006701151847839356, 0.006672768115997314, 0.0067077441215515135, 0.006688127994537354, 0.006655807971954346, 0.0066847681999206545, 0.006677248001098633, 0.00678278398513794, 0.006703104019165039, 0.006692863941192627, 0.0066776638031005855, 0.006683487892150879, 0.006703104019165039, 0.006686719894409179]",tokens/s,148.45800009513988,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,813.34272,545.128448,0.0,159.383552,143.673856,s,1,7.43645556640625,7.43645556640625,0.0,7.43645556640625,7.43645556640625,7.43645556640625,7.43645556640625,[7.43645556640625],,kWh,1.1159478924984492e-05,1.2237030355358137e-06,2.711946613997984e-06,1.509512857451829e-05,,MB,1345.06496,616.431616,0.0,199.22944,186.684928,s,25,0.19603059291839603,0.00784122371673584,6.264749272878208e-05,0.007823296070098877,0.00793838710784912,0.007949881744384765,0.007957512989044188,"[0.007959455966949462, 0.00776358413696289, 0.007892320156097412, 0.007823552131652832, 0.007804063796997071, 0.0078123841285705565, 0.007784383773803711, 0.007930016040802002, 0.007783071994781494, 0.007763072013854981, 0.007790463924407959, 0.007779007911682129, 0.007831840038299561, 0.007813087940216065, 0.00792470407485962, 0.007838528156280517, 0.007943967819213867, 0.007823296070098877, 0.00790940809249878, 0.00787820816040039, 0.007788000106811524, 0.00795136022567749, 0.007866112232208252, 0.007788352012634277, 0.007788352012634277]",tokens/s,32647.965323780885,kWh,2.2858353291895563e-07,2.5208711314408596e-08,1.0111663009710281e-07,3.5490887433046703e-07,tokens/kWh,721311915.5809844,MB,1384.38656,618.528768,0.0,201.326592,186.687488,s,25,9.894076324462889,0.39576305297851555,0.002099559086157807,0.3956947937011719,0.3981936584472656,0.3995603088378906,0.4015686962890625,"[0.4021055908203125, 0.3958044738769531, 0.3957227783203125, 0.3926264343261719, 0.39688644409179685, 0.3934579772949219, 0.3939168395996094, 0.39432684326171874, 0.397101318359375, 0.39589395141601563, 0.39432000732421874, 0.3956947937011719, 0.39451702880859374, 0.394517578125, 0.3958472900390625, 0.394456298828125, 0.39799301147460936, 0.39528375244140623, 0.39662890625, 0.39298492431640625, 0.39456201171875, 0.3962434387207031, 0.3949886779785156, 0.3998685302734375, 0.3983274230957031]",tokens/s,159.18615829815732,kWh,1.1350921557947191e-05,1.2516806070438836e-06,4.04845933913384e-06,1.665106150412491e-05,tokens/kWh,3783542.5678052553,,s,1575,9.882510946273829,0.006274610124618289,0.00013488673599689358,0.006253056049346924,0.00632426233291626,0.006379203271865845,0.0068573912525176995,"[0.006295584201812744, 0.006387680053710937, 0.006223872184753418, 0.006385216236114502, 0.006293536186218262, 0.006332672119140625, 0.0062791361808776855, 0.006305344104766845, 0.006375648021697998, 0.007053728103637695, 0.008384511947631837, 0.008130559921264649, 0.007784383773803711, 0.006311135768890381, 0.006349664211273193, 0.0062722558975219726, 0.006347519874572754, 0.006278816223144531, 0.006261343955993652, 0.0062481918334960935, 0.006240032196044922, 0.006287807941436767, 0.0063417601585388185, 0.006331039905548096, 0.006258304119110107, 0.006295904159545899, 0.0062501440048217775, 0.006332863807678222, 0.0062483839988708495, 0.006263967990875244, 0.006290304183959961, 0.006295519828796387, 0.006219967842102051, 0.00626259183883667, 0.006245728015899658, 0.006271711826324463, 0.00623356819152832, 0.006275551795959473, 0.006244351863861084, 0.006362368106842041, 0.006266975879669189, 0.006216351985931397, 0.006260735988616943, 0.006292928218841553, 0.006224448204040527, 0.0062197761535644535, 0.00626262378692627, 0.006252607822418213, 0.006230144023895263, 0.00626259183883667, 0.006244512081146241, 0.0062724161148071285, 0.0062325439453125, 0.006260863780975342, 0.006260640144348144, 0.006225120067596435, 0.006241151809692383, 0.006285312175750732, 0.006246399879455566, 0.006278240203857422, 0.0062473278045654295, 0.006230016231536865, 0.006266751766204834, 0.006162943840026855, 0.006215551853179932, 0.006272352218627929, 0.006205664157867431, 0.006248672008514405, 0.006224095821380615, 0.006261983871459961, 0.006304543972015381, 0.006236159801483154, 0.006240255832672119, 0.006299263954162597, 0.006380032062530518, 0.006208799839019776, 0.006284927845001221, 0.0063350720405578615, 0.006316415786743164, 0.0062975997924804685, 0.006305791854858398, 0.006275328159332275, 0.006272096157073975, 0.006281888008117676, 0.006248640060424805, 0.006225632190704346, 0.006246848106384277, 0.0062904319763183595, 0.006238304138183594, 0.006212160110473632, 0.00638972806930542, 0.006225984096527099, 0.0062382397651672365, 0.006225855827331543, 0.006229663848876953, 0.006762847900390625, 0.006277120113372803, 0.006270976066589356, 0.006279007911682129, 0.0062278399467468265, 0.006279039859771729, 0.006223936080932618, 0.006246751785278321, 0.006252543926239014, 0.006254144191741943, 0.006228032112121582, 0.006223872184753418, 0.006218272209167481, 0.006260799884796143, 0.0062379841804504396, 0.006418591976165772, 0.006344543933868408, 0.006264832019805908, 0.0062341117858886715, 0.006267072200775147, 0.006234208106994629, 0.006266304016113281, 0.006205728054046631, 0.00637500810623169, 0.006273632049560547, 0.006258656024932861, 0.006353824138641357, 0.006296512126922608, 0.0063240318298339844, 0.006269120216369629, 0.006301152229309082, 0.006184959888458252, 0.006639616012573242, 0.006280255794525146, 0.006267680168151856, 0.006301536083221436, 0.0062549118995666505, 0.006426911830902099, 0.006258399963378907, 0.006338560104370118, 0.006311359882354736, 0.006218175888061523, 0.0062557439804077145, 0.006237184047698975, 0.006262784004211426, 0.006239327907562256, 0.006225984096527099, 0.006249504089355469, 0.006233376026153564, 0.00624505615234375, 0.006223487854003906, 0.006281023979187012, 0.006222239971160889, 0.0062111358642578125, 0.006239871978759765, 0.006213632106781006, 0.0062484159469604495, 0.006175871849060059, 0.006241504192352295, 0.006224448204040527, 0.0062564477920532225, 0.006234240055084228, 0.0062137598991394045, 0.006239520072937012, 0.006263743877410888, 0.006245535850524902, 0.006201536178588867, 0.006256959915161133, 0.006210752010345459, 0.006228096008300781, 0.0062863359451293946, 0.006252255916595459, 0.006232096195220947, 0.006433951854705811, 0.007024576187133789, 0.006275936126708985, 0.006285312175750732, 0.006270976066589356, 0.00619539213180542, 0.006286367893218994, 0.006228799819946289, 0.006219007968902588, 0.006254559993743897, 0.006215583801269531, 0.006316736221313477, 0.006203360080718994, 0.006301919937133789, 0.00623635196685791, 0.006276127815246582, 0.00622054386138916, 0.006219679832458496, 0.006218080043792725, 0.006499519824981689, 0.0062548799514770504, 0.0061337599754333495, 0.00621673583984375, 0.006239200115203857, 0.006215904235839844, 0.0062379841804504396, 0.006228127956390381, 0.006340159893035889, 0.00619539213180542, 0.006254079818725586, 0.006215519905090332, 0.006222591876983642, 0.0062382397651672365, 0.006258656024932861, 0.006250495910644531, 0.006184864044189453, 0.006234079837799072, 0.006178016185760498, 0.006239039897918701, 0.006249983787536621, 0.006220096111297607, 0.0062548799514770504, 0.006217728137969971, 0.006270016193389892, 0.006230720043182373, 0.0062016000747680666, 0.006166528224945069, 0.0062065601348876955, 0.006185887813568115, 0.006182911872863769, 0.0062336320877075194, 0.006196703910827637, 0.00619820785522461, 0.006219840049743653, 0.006228127956390381, 0.006180831909179688, 0.006238080024719238, 0.006199295997619629, 0.006225728034973145, 0.006257887840270996, 0.006198592185974121, 0.0062146239280700686, 0.0061877121925354005, 0.00622819185256958, 0.0061968002319335936, 0.00624457597732544, 0.006254591941833496, 0.006300864219665528, 0.006265664100646972, 0.006232063770294189, 0.006256991863250732, 0.006213280200958252, 0.006254591941833496, 0.006191103935241699, 0.00626470422744751, 0.0062271361351013185, 0.006258783817291259, 0.0062165441513061526, 0.006225247859954834, 0.006238880157470703, 0.006187007904052734, 0.006243455886840821, 0.006218880176544189, 0.006207520008087158, 0.006174752235412598, 0.006197120189666748, 0.006207615852355957, 0.0062262721061706544, 0.006516384124755859, 0.006227968215942382, 0.006309887886047363, 0.006294911861419678, 0.006234752178192139, 0.006238143920898437, 0.006221216201782226, 0.006369760036468506, 0.006223680019378662, 0.006230720043182373, 0.006213312149047852, 0.0064412479400634765, 0.006252480030059814, 0.006245984077453613, 0.006322400093078613, 0.006208608150482178, 0.006605696201324463, 0.0062873601913452145, 0.006282815933227539, 0.006265408039093018, 0.00621343994140625, 0.006451007843017578, 0.006283520221710205, 0.006374879837036133, 0.006322112083435059, 0.006230912208557129, 0.006306975841522217, 0.006250400066375733, 0.006277791976928711, 0.006248447895050049, 0.006286752223968506, 0.006294144153594971, 0.0062873921394348145, 0.006681888103485107, 0.0065790719985961916, 0.006266655921936035, 0.006273151874542237, 0.006250368118286133, 0.0062782721519470215, 0.006255360126495362, 0.006326399803161621, 0.006242303848266601, 0.006289120197296143, 0.00634665584564209, 0.006226336002349854, 0.006262752056121826, 0.006400000095367431, 0.00629094409942627, 0.006297952175140381, 0.006244031906127929, 0.0062614078521728515, 0.0062351679801940915, 0.006314047813415527, 0.006220511913299561, 0.006232096195220947, 0.006303840160369873, 0.006272895812988281, 0.006226143836975098, 0.006227744102478027, 0.006205152034759522, 0.0061831998825073245, 0.0062507839202880855, 0.0061990079879760745, 0.006242303848266601, 0.006242303848266601, 0.006217728137969971, 0.006211840152740479, 0.006239999771118164, 0.0062763838768005375, 0.0061970558166503905, 0.006276000022888184, 0.006268928050994873, 0.00622537612915039, 0.006237919807434082, 0.006316256046295166, 0.00624022388458252, 0.006206079959869385, 0.006269983768463135, 0.006203487873077392, 0.006239359855651856, 0.006250239849090576, 0.0062873601913452145, 0.006252031803131103, 0.006218239784240722, 0.006280831813812256, 0.006187392234802246, 0.0064349122047424314, 0.006282559871673584, 0.006214240074157715, 0.00621776008605957, 0.006309823989868164, 0.006240287780761719, 0.006191103935241699, 0.006233280181884766, 0.0061898880004882815, 0.00624128007888794, 0.006267072200775147, 0.0062259202003479, 0.00621446418762207, 0.0062065281867980954, 0.006252511978149414, 0.006257887840270996, 0.006405951976776123, 0.0062483839988708495, 0.006201344013214111, 0.006281216144561768, 0.006210976123809815, 0.006206240177154541, 0.006184768199920654, 0.006231776237487793, 0.0061855998039245606, 0.006231071949005127, 0.006192800045013428, 0.0062679038047790524, 0.006201312065124511, 0.0062111358642578125, 0.0062111358642578125, 0.006171520233154297, 0.0062975997924804685, 0.006205088138580323, 0.006238783836364746, 0.006208415985107422, 0.006142240047454834, 0.006202655792236328, 0.006185887813568115, 0.006217535972595215, 0.006184959888458252, 0.006268223762512207, 0.006168384075164795, 0.006212800025939941, 0.006231904029846191, 0.00620524787902832, 0.006211616039276123, 0.00622815990447998, 0.006243904113769531, 0.006215487957000733, 0.0062837119102478025, 0.006284704208374023, 0.0062082881927490235, 0.00621120023727417, 0.006199584007263183, 0.006214719772338867, 0.006226816177368164, 0.006227712154388428, 0.0062585601806640625, 0.006243872165679932, 0.006187839984893799, 0.0062687678337097165, 0.0062486081123352055, 0.006211008071899414, 0.00621830415725708, 0.006176767826080322, 0.006204895973205567, 0.00627184009552002, 0.006252223968505859, 0.006201344013214111, 0.006250495910644531, 0.00624348783493042, 0.006232927799224854, 0.006288383960723877, 0.0062882242202758785, 0.006297952175140381, 0.006227392196655274, 0.006317823886871338, 0.006251423835754394, 0.006350560188293457, 0.006324448108673096, 0.006196735858917236, 0.006515200138092041, 0.006235936164855957, 0.006268928050994873, 0.006229856014251709, 0.0062503361701965335, 0.006238527774810791, 0.0062074241638183595, 0.006334527969360352, 0.00621292781829834, 0.006265535831451416, 0.006334752082824707, 0.006272736072540283, 0.006270976066589356, 0.006248000144958496, 0.006324672222137452, 0.006182911872863769, 0.006284927845001221, 0.006201344013214111, 0.006254591941833496, 0.006262176036834717, 0.006316639900207519, 0.006340191841125488, 0.006398079872131347, 0.006283199787139893, 0.006246079921722412, 0.0062317438125610355, 0.0062672638893127445, 0.006221888065338135, 0.006258240222930908, 0.0061842560768127445, 0.006230016231536865, 0.00627785587310791, 0.006273888111114502, 0.006231647968292236, 0.006212096214294433, 0.006244351863861084, 0.0061931519508361815, 0.006363135814666748, 0.006229472160339355, 0.006237887859344483, 0.006211552143096924, 0.006214528083801269, 0.006217120170593261, 0.0062259202003479, 0.007049856185913086, 0.006219744205474853, 0.006189055919647217, 0.006238207817077636, 0.0062828798294067385, 0.006246111869812012, 0.006183519840240479, 0.006256224155426026, 0.006369311809539795, 0.00624019193649292, 0.006253056049346924, 0.0062197761535644535, 0.0062197761535644535, 0.006235936164855957, 0.006278656005859375, 0.006183775901794433, 0.006288959980010987, 0.006232319831848144, 0.006196864128112793, 0.006209983825683594, 0.006207551956176758, 0.006208799839019776, 0.006209472179412842, 0.006216224193572998, 0.006187039852142334, 0.006228127956390381, 0.006182720184326172, 0.006254784107208252, 0.0061972479820251464, 0.0061931519508361815, 0.006244448184967041, 0.006252448081970215, 0.006223872184753418, 0.006211232185363769, 0.006227519989013672, 0.006201216220855713, 0.006175039768218994, 0.006229279994964599, 0.006208255767822266, 0.006239999771118164, 0.006213600158691406, 0.006251808166503906, 0.006226912021636963, 0.0062197761535644535, 0.006232063770294189, 0.006219871997833252, 0.006225823879241943, 0.006262784004211426, 0.006444672107696533, 0.006586976051330566, 0.006671199798583985, 0.0064676799774169924, 0.0062226881980896, 0.006342400074005127, 0.006260447978973389, 0.006268479824066162, 0.006302688121795654, 0.006303296089172363, 0.006332064151763916, 0.006191840171813965, 0.006346816062927246, 0.006256127834320068, 0.0062552962303161624, 0.006335840225219726, 0.006215968132019043, 0.006262559890747071, 0.006197696208953858, 0.006268064022064209, 0.006209983825683594, 0.0062540478706359865, 0.006314176082611084, 0.006214079856872559, 0.0063060798645019536, 0.00621779203414917, 0.006285312175750732, 0.006221759796142578, 0.006264832019805908, 0.006250495910644531, 0.006248640060424805, 0.006369088172912598, 0.006643295764923096, 0.007281055927276611, 0.006293504238128662, 0.006233888149261475, 0.006262176036834717, 0.0061981120109558105, 0.006280831813812256, 0.006257343769073487, 0.006270624160766601, 0.006342656135559082, 0.006301023960113525, 0.006265312194824219, 0.00620959997177124, 0.006236288070678711, 0.006230271816253662, 0.006258048057556152, 0.006225312232971191, 0.00621065616607666, 0.00624012804031372, 0.006112063884735107, 0.00624128007888794, 0.006516736030578613, 0.006285376071929931, 0.006320064067840576, 0.006272799968719482, 0.0062464637756347655, 0.0062035517692565914, 0.0062873601913452145, 0.006184800148010254, 0.00631328010559082, 0.0062165441513061526, 0.006337855815887451, 0.006267551898956299, 0.006256095886230469, 0.006306560039520264, 0.006195007801055908, 0.006297535896301269, 0.0062239041328430176, 0.006321760177612304, 0.006238272190093994, 0.0062631678581237795, 0.006271200180053711, 0.006387584209442139, 0.0063235840797424316, 0.006219295978546142, 0.006341631889343262, 0.006230368137359619, 0.006256576061248779, 0.006315231800079346, 0.006217728137969971, 0.0062839360237121585, 0.006255712032318115, 0.006296319961547852, 0.006227968215942382, 0.0065742721557617186, 0.006549312114715576, 0.006317376136779785, 0.006296256065368652, 0.006268928050994873, 0.006283008098602295, 0.006260000228881836, 0.006310880184173584, 0.006289408206939697, 0.006262080192565918, 0.006265535831451416, 0.0062197761535644535, 0.006258207798004151, 0.00624073600769043, 0.006245696067810059, 0.006342688083648682, 0.00622057580947876, 0.006285183906555176, 0.006217887878417969, 0.006240096092224121, 0.006198912143707275, 0.006229375839233399, 0.006237184047698975, 0.006281216144561768, 0.006242303848266601, 0.0062239041328430176, 0.006266975879669189, 0.006266751766204834, 0.0061594557762146, 0.006261888027191162, 0.006231135845184326, 0.0062512001991271975, 0.006252543926239014, 0.0062156801223754886, 0.006240255832672119, 0.006232063770294189, 0.006242656230926514, 0.006260064125061035, 0.006308032035827637, 0.006297728061676025, 0.006285312175750732, 0.006330719947814941, 0.0063025918006896975, 0.006251039981842041, 0.0062360639572143555, 0.006242047786712646, 0.006257247924804687, 0.006254079818725586, 0.006334943771362305, 0.006239712238311768, 0.006289984226226807, 0.006259712219238281, 0.006247424125671387, 0.006319680213928223, 0.006270847797393799, 0.006247104167938233, 0.0061989760398864745, 0.006281407833099365, 0.006253952026367187, 0.006264575958251953, 0.006238111972808838, 0.006238719940185547, 0.006219647884368897, 0.006297855854034424, 0.0062503361701965335, 0.006205023765563965, 0.006255519866943359, 0.006207488059997559, 0.006239391803741455, 0.0062226881980896, 0.006296671867370605, 0.006271168231964112, 0.00619817590713501, 0.006284351825714111, 0.006238048076629639, 0.0062432317733764646, 0.006229087829589843, 0.006246496200561524, 0.006217728137969971, 0.006232448101043701, 0.006330111980438233, 0.006218048095703125, 0.006232448101043701, 0.006239327907562256, 0.006232704162597656, 0.006218016147613525, 0.006262176036834717, 0.0062715840339660645, 0.006195199966430664, 0.006253952026367187, 0.00621017599105835, 0.006184991836547852, 0.006190815925598145, 0.006301119804382325, 0.006215744018554688, 0.006240928173065185, 0.006228064060211182, 0.006221792221069336, 0.0062624640464782715, 0.0062016000747680666, 0.0062856640815734865, 0.006237376213073731, 0.006283040046691894, 0.0062659521102905276, 0.006249599933624268, 0.006311552047729492, 0.006245408058166504, 0.006266848087310791, 0.006188127994537353, 0.006282080173492432, 0.006249567985534668, 0.006327199935913086, 0.006277088165283203, 0.006240287780761719, 0.0062993597984313966, 0.0062039680480957034, 0.006315487861633301, 0.00694707202911377, 0.00624835205078125, 0.006280447959899902, 0.006316895961761475, 0.006299647808074951, 0.006197375774383545, 0.006272480010986328, 0.00622431993484497, 0.006229599952697754, 0.006685056209564209, 0.006256192207336426, 0.0063161921501159665, 0.006281023979187012, 0.0062219839096069336, 0.00627558422088623, 0.006186431884765625, 0.00635532808303833, 0.006178751945495605, 0.0063788480758666995, 0.006185696125030517, 0.0062507839202880855, 0.00626038408279419, 0.006211008071899414, 0.0062665920257568355, 0.006302624225616455, 0.006312352180480957, 0.00617142391204834, 0.006257472038269043, 0.0063569917678833006, 0.006220032215118408, 0.006274816036224365, 0.006238304138183594, 0.0063056960105896, 0.006195199966430664, 0.006290976047515869, 0.0061874880790710445, 0.006227071762084961, 0.0061372160911560055, 0.006255231857299805, 0.006262784004211426, 0.006311935901641846, 0.006296800136566162, 0.006264736175537109, 0.006260831832885742, 0.006214687824249268, 0.0062481918334960935, 0.006277056217193604, 0.006257919788360595, 0.006259520053863526, 0.006303840160369873, 0.006285215854644775, 0.006237664222717285, 0.006244895935058593, 0.006252352237701416, 0.006254784107208252, 0.006243360042572021, 0.0062286720275878905, 0.0062849922180175785, 0.006285920143127442, 0.006331744194030761, 0.0062486081123352055, 0.00628275203704834, 0.006335487842559814, 0.0062663998603820804, 0.006272768020629883, 0.006318816184997559, 0.0062466559410095214, 0.006220895767211914, 0.006251167774200439, 0.006245791912078857, 0.006238944053649902, 0.0063034558296203615, 0.006258848190307617, 0.006232192039489746, 0.006227776050567627, 0.00632428789138794, 0.00621504020690918, 0.0062197761535644535, 0.006238143920898437, 0.006206143856048584, 0.0062215042114257815, 0.006238527774810791, 0.006207520008087158, 0.006219615936279297, 0.006241983890533447, 0.0062074241638183595, 0.006232863903045655, 0.006291168212890625, 0.006305823802947998, 0.006244319915771485, 0.006244351863861084, 0.006258687973022461, 0.006207295894622803, 0.006239520072937012, 0.00631715202331543, 0.0062605438232421875, 0.006203392028808594, 0.00627126407623291, 0.006253983974456787, 0.006226240158081054, 0.0061337599754333495, 0.006356256008148193, 0.00624886417388916, 0.006281407833099365, 0.006295680046081543, 0.0062444801330566405, 0.006324384212493896, 0.006216447830200195, 0.006291679859161377, 0.006265600204467773, 0.006225728034973145, 0.006324416160583496, 0.006477888107299805, 0.006321536064147949, 0.006222400188446045, 0.006309887886047363, 0.006254591941833496, 0.006257952213287354, 0.00634278392791748, 0.006300096035003662, 0.006295135974884033, 0.006230559825897217, 0.00628329610824585, 0.006221824169158936, 0.006285312175750732, 0.006273024082183838, 0.006211584091186524, 0.006238304138183594, 0.0062169919013977054, 0.006236639976501465, 0.006223264217376709, 0.0062287039756774906, 0.006184127807617188, 0.006246528148651123, 0.006240575790405273, 0.006205376148223877, 0.006228608131408691, 0.006239424228668213, 0.006263679981231689, 0.006200191974639893, 0.006252768039703369, 0.00625324821472168, 0.006246399879455566, 0.006307839870452881, 0.006217728137969971, 0.006230016231536865, 0.006326015949249268, 0.006287903785705567, 0.006201056003570557, 0.006236159801483154, 0.006240255832672119, 0.006245888233184814, 0.006211904048919678, 0.006242496013641357, 0.0062341117858886715, 0.00620908784866333, 0.00626035213470459, 0.006236991882324219, 0.006246399879455566, 0.006240384101867676, 0.006205408096313476, 0.006223455905914306, 0.006232384204864502, 0.006200223922729492, 0.006189055919647217, 0.0062932162284851075, 0.006240543842315674, 0.006213791847229004, 0.006298943996429444, 0.006244895935058593, 0.006275072097778321, 0.006196671962738037, 0.006320352077484131, 0.006207007884979248, 0.006246560096740723, 0.006222655773162842, 0.006223360061645508, 0.006310239791870117, 0.006194464206695557, 0.00628553581237793, 0.00622819185256958, 0.006308127880096435, 0.006232063770294189, 0.006273119926452637, 0.006352799892425537, 0.006231840133666992, 0.006308320045471191, 0.006238207817077636, 0.006353888034820556, 0.00622054386138916, 0.006302944183349609, 0.006267712116241455, 0.00625161600112915, 0.006341536045074463, 0.006244351863861084, 0.0063055682182312014, 0.006590047836303711, 0.006522655963897705, 0.0063517122268676755, 0.006318143844604492, 0.006350944042205811, 0.006260223865509033, 0.0062708802223205564, 0.006244416236877441, 0.0062979841232299804, 0.006290976047515869, 0.006239776134490967, 0.0063190398216247555, 0.006260735988616943, 0.006298783779144287, 0.006240960121154785, 0.006277152061462402, 0.006264639854431152, 0.006236480236053467, 0.006262784004211426, 0.006250815868377686, 0.006230847835540771, 0.006341504096984863, 0.00628326416015625, 0.0062341117858886715, 0.006251999855041504, 0.006264512062072754, 0.006308576107025146, 0.0062444801330566405, 0.006209536075592041, 0.006248447895050049, 0.006177216053009033, 0.006241919994354248, 0.006218656063079834, 0.006262976169586182, 0.0061910400390625, 0.006268671989440918, 0.0062073922157287595, 0.006289504051208496, 0.006272352218627929, 0.006289696216583252, 0.006250879764556885, 0.006246399879455566, 0.006268928050994873, 0.006215616226196289, 0.006291520118713379, 0.006252543926239014, 0.006316031932830811, 0.006272160053253174, 0.006363359928131104, 0.006283135890960694, 0.006254335880279541, 0.006278431892395019, 0.006252575874328614, 0.006301375865936279, 0.006259039878845215, 0.006338431835174561, 0.006327424049377441, 0.00622431993484497, 0.006283487796783447, 0.006227456092834473, 0.006306303977966309, 0.00622815990447998, 0.006323103904724121, 0.006263711929321289, 0.006212704181671143, 0.006267807960510254, 0.0062156801223754886, 0.006275040149688721, 0.006288991928100586, 0.006270656108856201, 0.00622054386138916, 0.006201344013214111, 0.00626259183883667, 0.006176032066345215, 0.006259359836578369, 0.006193439960479736, 0.006262815952301025, 0.006210527896881104, 0.006249407768249512, 0.006211616039276123, 0.0062724161148071285, 0.006268511772155762, 0.006227039813995362, 0.006258592128753662, 0.006199520111083984, 0.006274208068847657, 0.006232704162597656, 0.006247424125671387, 0.006273087978363037, 0.006206399917602539, 0.006238207817077636, 0.006184959888458252, 0.006289408206939697, 0.006123519897460937, 0.006310080051422119, 0.006251359939575195, 0.0062494401931762695, 0.006227200031280518, 0.00619596815109253, 0.006299136161804199, 0.006175456047058106, 0.006464799880981445, 0.00622652816772461, 0.006260640144348144, 0.006235936164855957, 0.006209440231323242, 0.0062848000526428225, 0.006200128078460694, 0.006252543926239014, 0.006180863857269287, 0.0062665920257568355, 0.006199584007263183, 0.006237311840057373, 0.006222720146179199, 0.006243775844573975, 0.006276864051818848, 0.0061898880004882815, 0.006299647808074951, 0.006557695865631104, 0.006760735988616943, 0.007631648063659668, 0.006883423805236816, 0.006850751876831054, 0.006264959812164307, 0.006226592063903809, 0.006221216201782226, 0.00624073600769043, 0.006232319831848144, 0.006306975841522217, 0.006351424217224121, 0.006262911796569824, 0.006219615936279297, 0.006253791809082031, 0.006240992069244384, 0.00625267219543457, 0.006285120010375977, 0.0061989760398864745, 0.006292096138000488, 0.0062053117752075195, 0.006229599952697754, 0.006228064060211182, 0.006250527858734131, 0.006267168045043945, 0.006221824169158936, 0.006432320117950439, 0.006230336189270019, 0.006290847778320312, 0.006303552150726318, 0.00633516788482666, 0.00625593614578247, 0.006255648136138916, 0.006340415954589844, 0.006348576068878174, 0.006253983974456787, 0.006212480068206787, 0.0062566399574279785, 0.006204576015472412, 0.006408671855926514, 0.0062672638893127445, 0.006313183784484863, 0.006250751972198486, 0.006274784088134765, 0.006257472038269043, 0.0062975997924804685, 0.0063201279640197755, 0.00625603199005127, 0.0062849922180175785, 0.006228896141052246, 0.0062997121810913085, 0.006274687767028809, 0.0062503361701965335, 0.006266816139221192, 0.006258783817291259, 0.006293951988220215, 0.006256063938140869, 0.006390336036682129, 0.006231488227844239, 0.006285696029663086, 0.006274879932403565, 0.006311456203460694, 0.006280288219451904, 0.006235904216766357, 0.006279327869415283, 0.006214719772338867, 0.006267839908599853, 0.006225759983062744, 0.006227776050567627, 0.006283455848693847, 0.006293504238128662, 0.006293504238128662, 0.006229983806610107, 0.006470719814300537, 0.006271967887878418, 0.006260447978973389, 0.0063134078979492185, 0.006255807876586914, 0.00625219202041626, 0.006231552124023438, 0.006265344142913819, 0.006219871997833252, 0.006259903907775879, 0.006236959934234619, 0.006211008071899414, 0.006252128124237061, 0.0061981120109558105, 0.00636352014541626, 0.006217535972595215, 0.006323423862457276, 0.006207136154174805, 0.006248799800872803, 0.0062911038398742675, 0.006234591960906983, 0.006252863883972168, 0.006200640201568603, 0.006273983955383301, 0.006193120002746582, 0.006276415824890137, 0.006244671821594238, 0.00621343994140625, 0.0061394238471984865, 0.00624073600769043, 0.00627945613861084, 0.006201056003570557, 0.00628326416015625, 0.0061972479820251464, 0.006344704151153564, 0.00623638391494751, 0.006303328037261963, 0.006303936004638672, 0.006214687824249268, 0.006291520118713379, 0.006285823822021484, 0.006279583930969238, 0.006236159801483154, 0.006268896102905273, 0.00623414421081543, 0.006231296062469483, 0.006300127983093261, 0.00619379186630249, 0.0062911357879638675, 0.006242015838623047, 0.006260992050170899, 0.006258687973022461, 0.0062566399574279785, 0.006292799949645996, 0.006185664176940918, 0.006294879913330078, 0.006196928024291992, 0.006273407936096192, 0.006214335918426513, 0.006219679832458496, 0.006223519802093506, 0.0062221760749816895, 0.006268479824066162, 0.006168896198272705, 0.00625267219543457, 0.006205535888671875, 0.006264927864074707, 0.006271967887878418, 0.006201727867126465, 0.006279232025146485, 0.006287775993347168, 0.006323775768280029, 0.006185408115386963, 0.006274496078491211, 0.006213344097137451, 0.006212448120117188, 0.006250751972198486, 0.006192160129547119, 0.006249504089355469, 0.006174079895019531, 0.006246304035186768, 0.006200831890106201, 0.006273952007293701, 0.0062873601913452145, 0.006524928092956543, 0.006326176166534424, 0.006659167766571045, 0.006558015823364258, 0.006300352096557617, 0.007456319808959961, 0.006561279773712158, 0.006206783771514892, 0.0062053117752075195, 0.006277311801910401, 0.006251391887664795, 0.006201087951660156, 0.0062259202003479, 0.006240255832672119, 0.0062156801223754886, 0.006187007904052734, 0.006268415927886963, 0.006189184188842773, 0.006242496013641357, 0.0062007360458374025, 0.00621401596069336, 0.006202079772949219, 0.00618668794631958, 0.006198624134063721, 0.0062039680480957034, 0.006318431854248047, 0.006190847873687744, 0.006258687973022461, 0.006299647808074951, 0.006268928050994873, 0.006218976020812989, 0.006207263946533203, 0.00622489595413208, 0.006233119964599609, 0.0062494401931762695, 0.006172224044799805, 0.0062202239036560055, 0.006426623821258545, 0.0062197761535644535, 0.006205664157867431, 0.006196703910827637, 0.006254623889923096, 0.0061855678558349605, 0.006229695796966553, 0.006191103935241699, 0.006248799800872803, 0.0061907520294189455, 0.006216896057128906, 0.0062267518043518065, 0.006207263946533203, 0.006279488086700439, 0.006197184085845947, 0.006225728034973145, 0.006180511951446533, 0.0062510080337524416, 0.006199359893798828, 0.006193088054656982, 0.006236159801483154, 0.006205440044403076, 0.006226143836975098, 0.006187839984893799, 0.006300672054290772, 0.006227935791015625, 0.006286975860595703, 0.006287744045257568, 0.006262688159942627, 0.006234015941619873, 0.006283584117889404, 0.006252416133880615, 0.006205056190490723, 0.006145311832427979, 0.006261023998260498, 0.006226719856262207, 0.006248095989227295, 0.006233983993530273, 0.006221951961517334, 0.006262911796569824, 0.006235680103302002, 0.006273375988006592, 0.006216800212860108, 0.006263711929321289, 0.006235648155212403, 0.006253056049346924, 0.006266623973846436, 0.006270815849304199, 0.006266751766204834, 0.006271520137786865, 0.006260735988616943, 0.006267231941223144, 0.006246399879455566, 0.006264063835144043, 0.006230432033538819, 0.00626796817779541, 0.006231200218200683, 0.006288832187652588, 0.006208127975463868, 0.006250207901000977, 0.006232063770294189, 0.006227968215942382, 0.0065168957710266115, 0.006284863948822022, 0.00629372787475586, 0.006252607822418213, 0.006306816101074219, 0.0062494721412658695, 0.0062873601913452145, 0.006248447895050049, 0.0062624959945678715, 0.006291168212890625, 0.006225791931152344, 0.006259391784667969, 0.006215712070465088, 0.006285280227661133, 0.0062559680938720705, 0.006241087913513184, 0.006233952045440674, 0.00624835205078125, 0.006262879848480224, 0.006213632106781006, 0.006289216041564941, 0.006209727764129639, 0.006245920181274414, 0.006241824150085449, 0.00625055980682373, 0.006265279769897461, 0.006218175888061523, 0.006262720108032227, 0.006251904010772705, 0.006279871940612793, 0.006254687786102295, 0.0062581758499145506, 0.006268640041351318, 0.006242015838623047, 0.006191999912261963, 0.006258815765380859, 0.006320000171661377, 0.006227519989013672, 0.006304192066192627, 0.006200479984283447, 0.006288576126098633, 0.0062462081909179686, 0.006268735885620117, 0.006296832084655762, 0.006222623825073242, 0.006316256046295166, 0.0062226881980896, 0.00636624002456665, 0.006258592128753662, 0.0062811517715454105, 0.006520927906036377, 0.0062679038047790524, 0.006281375885009766, 0.0062146239280700686, 0.006234047889709472, 0.0062501120567321775, 0.006283423900604248, 0.006289087772369385, 0.006267360210418701, 0.006315040111541748, 0.006236608028411865, 0.0063281598091125485, 0.006236767768859864, 0.006270944118499756, 0.006289504051208496, 0.006242303848266601, 0.006278336048126221, 0.006209407806396484, 0.006314176082611084, 0.006266975879669189, 0.006287583827972412, 0.006232672214508057, 0.006285151958465576, 0.0063012480735778805, 0.006265279769897461, 0.006643680095672608, 0.006264927864074707, 0.006272319793701172, 0.006314911842346191, 0.006213344097137451, 0.006313663959503174, 0.006199615955352783, 0.006388832092285157, 0.006405024051666259, 0.006311935901641846, 0.0063053760528564455, 0.006226336002349854, 0.006307839870452881, 0.006258207798004151, 0.006260799884796143, 0.006274496078491211, 0.006239200115203857, 0.006311264038085938, 0.0062221760749816895, 0.006295872211456299, 0.0061972479820251464, 0.00632147216796875, 0.006138815879821778, 0.0062791681289672855, 0.0062259202003479, 0.006258656024932861, 0.006244383811950684, 0.006254208087921142, 0.006287744045257568, 0.006238207817077636, 0.006280928134918213, 0.0062691841125488285, 0.006258975982666015, 0.00625600004196167, 0.006262176036834717, 0.006252607822418213, 0.006232863903045655, 0.0062708802223205564, 0.006269152164459228, 0.006272863864898681, 0.006237728118896484, 0.006223519802093506, 0.0062822079658508305, 0.006262784004211426, 0.00628326416015625, 0.0062975997924804685, 0.00625216007232666, 0.006258975982666015, 0.006265215873718262, 0.00625216007232666, 0.006284543991088867, 0.006294623851776123, 0.006876287937164306, 0.006237823963165284, 0.00630844783782959, 0.0062980160713195805, 0.0062641921043395995, 0.006259488105773926, 0.006238175868988037, 0.006223584175109863, 0.006219935894012451, 0.0062626562118530274, 0.00618016004562378, 0.006275904178619385, 0.00619868803024292, 0.0062347202301025395, 0.006231103897094727, 0.0062475199699401854, 0.0062912960052490235, 0.0062137598991394045, 0.006277056217193604, 0.0061931519508361815, 0.0062709121704101565, 0.006208608150482178, 0.006226848125457764, 0.0062334399223327635, 0.00629807996749878, 0.006291679859161377, 0.0061849279403686525, 0.006273024082183838, 0.006219136238098145, 0.006229887962341309, 0.006202367782592774, 0.006323808193206787, 0.00629478406906128, 0.006195199966430664, 0.006233983993530273, 0.006237855911254883, 0.006264736175537109, 0.006226784229278565, 0.006261983871459961, 0.006281504154205322, 0.006263008117675781, 0.006285312175750732, 0.006257919788360595, 0.0062978239059448245, 0.006249311923980713, 0.0062808961868286136, 0.006235775947570801, 0.0062631678581237795, 0.006266880035400391, 0.0062392959594726564, 0.0062700481414794925, 0.006279007911682129, 0.006295551776885986, 0.006225664138793946, 0.006282623767852783, 0.0062740478515625, 0.006247903823852539, 0.0063021121025085445, 0.00628326416015625, 0.006280320167541504, 0.006241312026977539, 0.006274879932403565, 0.006277152061462402, 0.006276639938354492, 0.00628988790512085, 0.006313983917236328, 0.006254432201385498, 0.006271008014678955, 0.006271359920501709, 0.006271999835968017, 0.006287263870239258, 0.006320159912109375, 0.006303904056549073, 0.006240928173065185, 0.0062585282325744625, 0.0062854719161987305, 0.006268352031707764, 0.006716159820556641, 0.006614751815795898, 0.006714879989624023, 0.007045055866241455, 0.0072341117858886715, 0.006334559917449951, 0.006286367893218994, 0.006259615898132324, 0.006269343852996826, 0.006282911777496338, 0.006317376136779785, 0.006298111915588379, 0.006301887989044189, 0.006373023986816407, 0.007264607906341552, 0.006338560104370118, 0.006321728229522705, 0.00629804801940918, 0.006333568096160889, 0.006127039909362793, 0.006261312007904053, 0.006191264152526856, 0.006408031940460205, 0.006227968215942382, 0.006340608119964599, 0.006267136096954346, 0.006393919944763184, 0.006315711975097656, 0.0062156801223754886, 0.006359007835388184, 0.006297632217407227, 0.0065064959526062015, 0.006313983917236328, 0.006317376136779785, 0.006312640190124512, 0.00632422399520874, 0.0063056640625, 0.006370495796203613, 0.006256927967071533, 0.00629417610168457, 0.006256703853607178, 0.006318016052246094, 0.007324800014495849, 0.006293536186218262, 0.006380383968353272, 0.006317408084869385, 0.0065064640045166015, 0.006308544158935547, 0.006522111892700196, 0.006299776077270508, 0.0062798080444335935, 0.0062259521484375, 0.006242144107818604, 0.0062399678230285645, 0.006264575958251953, 0.006219423770904541, 0.0062800321578979496, 0.006228096008300781, 0.006275392055511474, 0.006280543804168701, 0.006236576080322266, 0.006260255813598633, 0.0063385281562805176, 0.006257120132446289, 0.006246399879455566, 0.006272704124450684, 0.006238143920898437, 0.006242015838623047, 0.006263455867767334, 0.006275263786315918, 0.006337440013885498, 0.006255871772766113, 0.006391456127166748, 0.00628227186203003, 0.006322688102722168, 0.006252543926239014, 0.0062379841804504396, 0.006339263916015625, 0.006330368041992188, 0.0062988801002502445, 0.006294271945953369, 0.0064268479347229]",tokens/s,159.37245185585684,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7436.812288,8048.738304,0.0,7646.216192,7627.584,s,1,13.2579482421875,13.2579482421875,0.0,13.2579482421875,13.2579482421875,13.2579482421875,13.2579482421875,[13.2579482421875],,kWh,0.00017457606932500008,1.924887821256329e-05,5.65525452420057e-05,0.0002503774927795691,,MB,1781.61664,8694.66112,0.0,8277.458944,8199.8592,s,10,3.5925811462402346,0.35925811462402335,0.0013695581169296252,0.359052993774414,0.3603923065185547,0.36132835540771485,0.36207719451904297,"[0.35684686279296873, 0.3584761962890625, 0.3596515197753906, 0.3601842956542969, 0.3585326538085937, 0.35834906005859374, 0.35913156127929685, 0.35897442626953124, 0.362264404296875, 0.360170166015625]",tokens/s,712.5795899360917,kWh,1.0491878434375697e-05,1.1570262446000938e-06,6.972098831643196e-06,1.8621003510618987e-05,tokens/kWh,13747916.42426849,MB,1791.56992,9009.23392,0.0,8592.031744,8476.849152,s,10,29.637718261718746,2.9637718261718744,0.005008545861069562,2.963507568359375,2.9687735107421878,2.970996374511719,2.9727746655273437,"[2.954398681640625, 2.96602294921875, 2.962838134765625, 2.96061376953125, 2.97321923828125, 2.968279541015625, 2.96679345703125, 2.96285791015625, 2.958537353515625, 2.9641572265625]",tokens/s,21.256697105921717,kWh,8.645050377478658e-05,9.534735657917464e-06,5.748192495375674e-05,0.0001534671643864608,tokens/kWh,410511.26637978066,,s,630,29.632593711853026,0.04703586303468734,0.000377765763243554,0.04702848052978516,0.04748235206604004,0.04760046043395996,0.04799247398376465,"[0.04702825546264648, 0.04676544189453125, 0.046362369537353516, 0.046412670135498046, 0.04620230484008789, 0.04617875289916992, 0.046383232116699216, 0.04630963134765625, 0.04650163269042969, 0.04633379364013672, 0.04637900924682617, 0.047856033325195314, 0.046771968841552734, 0.04642531204223633, 0.04626054382324219, 0.04628659057617188, 0.046454689025878904, 0.046467742919921874, 0.04639350509643555, 0.0464793586730957, 0.04679916763305664, 0.046929534912109376, 0.046825183868408206, 0.046942558288574215, 0.04685622406005859, 0.04671619033813477, 0.04649193572998047, 0.046481056213378905, 0.04662764739990234, 0.04671891021728516, 0.04653788757324219, 0.04684649658203125, 0.046674240112304685, 0.04673311996459961, 0.04690348815917969, 0.046870529174804686, 0.046923583984375, 0.04729481506347656, 0.04702377700805664, 0.04685023880004883, 0.04710326385498047, 0.04706921768188477, 0.0469920654296875, 0.04702412796020508, 0.04711529541015625, 0.04745929718017578, 0.04739187240600586, 0.0471325454711914, 0.04700467300415039, 0.04715219116210938, 0.047385601043701174, 0.047196094512939456, 0.04727315139770508, 0.047239105224609376, 0.04736016082763672, 0.047522048950195316, 0.047397342681884766, 0.047411231994628905, 0.04759318542480469, 0.047368000030517575, 0.04749356842041016, 0.04749484634399414, 0.047530303955078124, 0.04773516845703125, 0.04715155029296875, 0.046481407165527344, 0.04638521575927734, 0.04663283157348633, 0.046465087890625, 0.04648550415039063, 0.046581760406494144, 0.04673273468017578, 0.046623294830322265, 0.04661862564086914, 0.04654463958740234, 0.04642220687866211, 0.046549121856689454, 0.046621631622314454, 0.04666470336914062, 0.0470118408203125, 0.04695654296875, 0.04680499267578125, 0.04688083267211914, 0.04684288024902344, 0.04674041748046875, 0.0479185905456543, 0.04707129669189453, 0.04719577789306641, 0.046980960845947266, 0.047074272155761716, 0.04661043167114258, 0.04688022232055664, 0.046835617065429686, 0.04680563354492188, 0.04707123184204102, 0.046779903411865234, 0.04674387359619141, 0.046788352966308594, 0.047335872650146486, 0.047168895721435546, 0.047284862518310544, 0.0471954231262207, 0.04703078460693359, 0.04696806335449219, 0.047166431427001956, 0.047073184967041014, 0.0474686393737793, 0.04732108688354492, 0.047462398529052735, 0.04739686584472656, 0.04734566497802734, 0.04700364685058594, 0.04724127960205078, 0.047306591033935544, 0.04742278289794922, 0.04821481704711914, 0.048010848999023435, 0.04727836990356445, 0.047144191741943356, 0.04716838455200195, 0.04758528137207031, 0.04789254379272461, 0.047611167907714844, 0.04756444931030274, 0.047782913208007816, 0.04755023956298828, 0.04707360076904297, 0.04665568161010742, 0.04657097625732422, 0.0467215690612793, 0.04676153564453125, 0.04656902313232422, 0.04641164779663086, 0.04646841430664062, 0.04687020874023438, 0.046739456176757815, 0.04661043167114258, 0.04661248016357422, 0.046837760925292966, 0.04693734359741211, 0.046878849029541016, 0.04667046356201172, 0.046647296905517575, 0.04681318283081055, 0.04699750518798828, 0.047089344024658204, 0.04688723373413086, 0.04690124893188476, 0.04704691314697266, 0.04699929428100586, 0.04697907257080078, 0.047068576812744144, 0.04695657730102539, 0.04679244613647461, 0.04678329467773437, 0.04698316955566406, 0.046882015228271484, 0.04693686294555664, 0.04683059310913086, 0.04726595306396485, 0.047118209838867185, 0.04723795318603516, 0.04698502349853516, 0.047052639007568356, 0.04677478408813476, 0.04721049499511719, 0.04706224060058594, 0.04722972869873047, 0.04700774383544922, 0.04727807998657227, 0.047222782135009765, 0.04717929458618164, 0.04705676651000976, 0.04685689544677735, 0.047048160552978516, 0.04702048110961914, 0.04743600082397461, 0.04741507339477539, 0.047306751251220705, 0.04719820785522461, 0.04772249603271484, 0.04759961700439453, 0.04729241561889649, 0.04727603149414063, 0.04754227066040039, 0.047421440124511716, 0.04734137725830078, 0.04761385726928711, 0.04736979293823242, 0.0469730224609375, 0.04648803329467773, 0.04632918548583984, 0.046480224609375, 0.046505184173583985, 0.046527263641357425, 0.04657955169677734, 0.04654095840454102, 0.04694144058227539, 0.04658457565307617, 0.046388671875, 0.0466229133605957, 0.046493888854980465, 0.046723262786865234, 0.04677142333984375, 0.04661942291259766, 0.04685609436035156, 0.0466457290649414, 0.04674636840820313, 0.046795936584472654, 0.0467729606628418, 0.0467589111328125, 0.04664524841308594, 0.04658992004394531, 0.0468480339050293, 0.046929534912109376, 0.04657395172119141, 0.04678656005859375, 0.04685004806518555, 0.04696268844604492, 0.04684902572631836, 0.04712099075317383, 0.046768543243408206, 0.04717363357543945, 0.04705484771728516, 0.04708726501464844, 0.047100257873535153, 0.04734975814819336, 0.047180801391601565, 0.04730265426635742, 0.04738150405883789, 0.04743737411499024, 0.047309249877929685, 0.047405055999755856, 0.04727603149414063, 0.04725964736938477, 0.04715929412841797, 0.0472138557434082, 0.047270622253417965, 0.04721622467041016, 0.04723548889160156, 0.04721855926513672, 0.04723110580444336, 0.047421440124511716, 0.047266014099121095, 0.04739459228515625, 0.04725507354736328, 0.04736867141723633, 0.04731084823608398, 0.04748204803466797, 0.0474796142578125, 0.04747465515136719, 0.04784483337402344, 0.04735622406005859, 0.04733552169799805, 0.04661248016357422, 0.04635136032104492, 0.04661897659301758, 0.046483585357666016, 0.04679324722290039, 0.04676732635498047, 0.046641952514648435, 0.04672447967529297, 0.04673190307617187, 0.046751201629638674, 0.04669289779663086, 0.046702625274658204, 0.046577632904052736, 0.04680470275878906, 0.04696640014648437, 0.04692201614379883, 0.04693955230712891, 0.04692803192138672, 0.046946399688720705, 0.047185760498046875, 0.048407424926757814, 0.04731843185424805, 0.04664508819580078, 0.04668492889404297, 0.046836894989013673, 0.0472625617980957, 0.04695859146118164, 0.046917057037353514, 0.04712886428833008, 0.047155487060546876, 0.04694630432128906, 0.047080928802490235, 0.047110687255859374, 0.04704051208496094, 0.04701353454589844, 0.04709020614624024, 0.047417152404785154, 0.047513118743896486, 0.047430110931396485, 0.047485088348388674, 0.04740694427490234, 0.0472627182006836, 0.04712243270874023, 0.04740182495117187, 0.047254753112792966, 0.0476743049621582, 0.047898494720458984, 0.047230655670166016, 0.04744646453857422, 0.0475546875, 0.04754009628295899, 0.047446014404296875, 0.04922163009643555, 0.047378593444824216, 0.047527137756347655, 0.04735654449462891, 0.047740478515625, 0.04751814270019531, 0.04765919876098633, 0.047876224517822266, 0.04768735885620117, 0.04722073745727539, 0.04664031982421875, 0.04659814453125, 0.04670956802368164, 0.046653438568115234, 0.04674560165405273, 0.04680499267578125, 0.04662227249145508, 0.046578079223632815, 0.04678659057617188, 0.046798080444335935, 0.047104766845703125, 0.047035808563232424, 0.04699135971069336, 0.04683353424072265, 0.04667660903930664, 0.04662895965576172, 0.0467127685546875, 0.04678649520874024, 0.04683116912841797, 0.0472110710144043, 0.047241214752197266, 0.04704051208496094, 0.04678041458129883, 0.046859455108642575, 0.04715193557739258, 0.047046016693115235, 0.047147647857666015, 0.0471674575805664, 0.04706307220458984, 0.04689625549316406, 0.04702870559692383, 0.04706467056274414, 0.04732704162597656, 0.04734374237060547, 0.047334175109863284, 0.0473612174987793, 0.047382881164550784, 0.04712505722045898, 0.047097759246826174, 0.04728022384643555, 0.047510848999023435, 0.047559680938720705, 0.047107776641845706, 0.04704051208496094, 0.047331329345703124, 0.047263744354248044, 0.047124481201171874, 0.04731289672851562, 0.04712243270874023, 0.04725356674194336, 0.04729439926147461, 0.047280384063720704, 0.047252769470214846, 0.04749935913085938, 0.04768601608276367, 0.04764057540893555, 0.04737638473510742, 0.04732723236083984, 0.04746444702148438, 0.04748896026611328, 0.04764678573608398, 0.0476440315246582, 0.047236927032470705, 0.046792896270751956, 0.04662886428833008, 0.04661638259887695, 0.046510272979736325, 0.0465428466796875, 0.04660361480712891, 0.04675753784179688, 0.04657664108276367, 0.04663241577148437, 0.046649887084960935, 0.04683161544799805, 0.046774112701416015, 0.04686044692993164, 0.04661625671386719, 0.04677580642700195, 0.0466624641418457, 0.04659609603881836, 0.046845951080322266, 0.04676607894897461, 0.0467836799621582, 0.04706150436401367, 0.04698963165283203, 0.04675484848022461, 0.04671148681640625, 0.04696707153320313, 0.046908863067626955, 0.047043296813964845, 0.04707027053833008, 0.04702288055419922, 0.04694217681884766, 0.04707126235961914, 0.047183265686035154, 0.04723295974731445, 0.04722140884399414, 0.04740483093261719, 0.04708713531494141, 0.0470731201171875, 0.0470880012512207, 0.04739904022216797, 0.0473803825378418, 0.04739487838745117, 0.047495552062988285, 0.047416576385498045, 0.04710067367553711, 0.047439617156982423, 0.04741145706176758, 0.04720435333251953, 0.04698316955566406, 0.04704460906982422, 0.04732505416870117, 0.04801548767089844, 0.04737638473510742, 0.04741510391235351, 0.04745644760131836, 0.047540321350097656, 0.047841182708740236, 0.04760313415527344, 0.04733599853515625, 0.047418975830078126, 0.04752988815307617, 0.04746905517578125, 0.04753952026367188, 0.047279678344726565, 0.04682387161254883, 0.046849502563476565, 0.04656387329101563, 0.0466833610534668, 0.0468691520690918, 0.046988510131835935, 0.0467239990234375, 0.0466464958190918, 0.04660508728027344, 0.04643804931640625, 0.046606464385986326, 0.046450912475585936, 0.046747871398925785, 0.04654467010498047, 0.04658790588378906, 0.04667801666259765, 0.046827518463134765, 0.04665244674682617, 0.04670479965209961, 0.04658998489379883, 0.04677465438842773, 0.046674110412597655, 0.04869142532348633, 0.04688281631469727, 0.04708534240722656, 0.046903518676757815, 0.04679884719848633, 0.047013343811035155, 0.046870399475097656, 0.046776992797851566, 0.04692172622680664, 0.04679065704345703, 0.04688838577270508, 0.046985790252685546, 0.04686643218994141, 0.04701980972290039, 0.04706335830688477, 0.046983070373535156, 0.046927871704101565, 0.04697907257080078, 0.04720982360839844, 0.047322784423828125, 0.047206878662109375, 0.0470984001159668, 0.04717567825317383, 0.04719001770019531, 0.04742550277709961, 0.047390335083007815, 0.047255233764648436, 0.04713516616821289, 0.047178016662597654, 0.04716134262084961, 0.047328960418701174, 0.047249664306640626, 0.04806588745117187, 0.04738326263427734, 0.04762419128417969, 0.047446014404296875, 0.04724118423461914, 0.04754947280883789, 0.04752870559692383, 0.047537952423095706, 0.047273983001708986, 0.046772224426269535, 0.046309280395507815, 0.04635862350463867, 0.04644464111328125, 0.046430110931396484, 0.04656528091430664, 0.04670883178710938, 0.046626495361328124, 0.04674553680419922, 0.04662217712402344, 0.046620960235595706, 0.04647180938720703, 0.046685985565185543, 0.04655324935913086, 0.04687200164794922, 0.046785152435302735, 0.04675980758666992, 0.04673651123046875, 0.04707030487060547, 0.04712847900390625, 0.04702819061279297, 0.04694019317626953, 0.046860286712646484, 0.04667391967773438, 0.04674560165405273, 0.046736961364746095, 0.04670300674438477, 0.046754878997802736, 0.04677321624755859, 0.04683366394042969, 0.046852096557617184, 0.0466033935546875, 0.04670876693725586, 0.047033184051513674, 0.04693376159667969, 0.047255809783935544, 0.047123680114746096, 0.046955295562744144, 0.04715625762939453, 0.04720943832397461, 0.04732463836669922, 0.04741926574707031, 0.047290721893310544, 0.04705904006958008, 0.04688284683227539, 0.04706732940673828, 0.04701388931274414, 0.04690124893188476, 0.04693376159667969, 0.04735145568847656, 0.04716134262084961, 0.04721110534667969, 0.047239166259765625, 0.047101951599121096, 0.04760115051269531, 0.04739328002929687, 0.047361248016357424, 0.04736665725708008, 0.04730704116821289, 0.04756003189086914, 0.04741392135620117, 0.04744521713256836, 0.04731017684936523, 0.04680156707763672, 0.04643123245239258, 0.046487934112548826, 0.04632022476196289, 0.04631964874267578, 0.04635647964477539, 0.046366111755371094, 0.046508544921875, 0.04657571029663086, 0.046753311157226564, 0.0467993278503418, 0.0467119369506836, 0.04680780792236328, 0.04686640167236328, 0.04783919906616211, 0.04794748687744141, 0.04702387237548828, 0.046924510955810544, 0.04677017593383789, 0.046811134338378906, 0.04712985610961914, 0.046795520782470706, 0.046900894165039064, 0.04700307083129883, 0.047079967498779296, 0.04665996932983398, 0.04680438232421875, 0.04674006271362305, 0.04709318542480469, 0.04693459320068359, 0.04722073745727539, 0.046924991607666014, 0.04721468734741211, 0.046875358581542965, 0.046882495880126954, 0.046905662536621096, 0.04715484619140625, 0.0472599983215332, 0.04720822525024414, 0.04737251281738281, 0.047260990142822264, 0.04716726303100586, 0.04706806564331055, 0.04721049499511719, 0.04753347015380859, 0.04739952087402344, 0.047288318634033204, 0.04702412796020508, 0.04721664047241211, 0.047325183868408206, 0.04718592071533203, 0.04742083358764648, 0.04736470413208008, 0.04719001770019531, 0.047338912963867184, 0.047486751556396485, 0.047285057067871096, 0.04744134521484375, 0.047415168762207034, 0.0474422721862793, 0.04749347305297852, 0.047344993591308594]",tokens/s,21.260373159572605,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,11170.459648,12553.4208,0.0,12150.898688,12116.742656,s,1,16.541673828125,16.541673828125,0.0,16.541673828125,16.541673828125,16.541673828125,16.541673828125,[16.541673828125],,kWh,0.0002728465994917011,3.0088333942573003e-05,8.776951465998273e-05,0.00039070444809425687,,MB,2167.066624,13505.527808,0.0,13088.325632,12892.834304,s,10,5.962787658691407,0.5962787658691406,0.0014361496247861764,0.5957967224121093,0.59900234375,0.5990214050292969,0.5990366540527344,"[0.5955895385742187, 0.5951190185546875, 0.5961220092773437, 0.59634423828125, 0.5949058837890625, 0.59600390625, 0.5952977294921875, 0.5989981079101563, 0.5953667602539062, 0.5990404663085938]",tokens/s,429.32939197801613,kWh,1.7416458374754362e-05,1.920704908292925e-06,1.156759095341193e-05,3.090475423645922e-05,tokens/kWh,8283515.1524353335,MB,2167.066624,13883.015168,0.0,13465.812992,13237.636096,s,10,40.78575366210938,4.078575366210937,0.00884052601947971,4.080454345703125,4.089705932617187,4.089837219238281,4.089942248535156,"[4.06139697265625, 4.068272216796875, 4.073059814453125, 4.0738955078125, 4.0814794921875, 4.07942919921875, 4.08378369140625, 4.08479150390625, 4.089968505859375, 4.0896767578125]",tokens/s,15.44657002587843,kWh,0.00011932692462524684,1.3162372901363742e-05,7.933331510058727e-05,0.00021182261262719785,tokens/kWh,297418.6713053073,,s,630,40.76741284942632,0.06471017912607345,0.0005584043339581829,0.06473465728759766,0.06544433975219727,0.0656420753479004,0.06585083534240722,"[0.06350163269042969, 0.06362796783447265, 0.06347161483764649, 0.06366764831542969, 0.06385107040405273, 0.06454886627197266, 0.0645241928100586, 0.06402057647705078, 0.06351414489746093, 0.06379292678833008, 0.06467206573486328, 0.0641415023803711, 0.06392031860351563, 0.06357196807861328, 0.06331391906738282, 0.06368051147460937, 0.06405913543701172, 0.06409446716308594, 0.06416793823242188, 0.06390371322631835, 0.06416591644287109, 0.06439254760742187, 0.0643897933959961, 0.06455910491943359, 0.0643399658203125, 0.06413516998291016, 0.06456025695800781, 0.06454156494140625, 0.06441165161132813, 0.06425126647949218, 0.06426438140869141, 0.06430486297607421, 0.06469705963134766, 0.06464044952392578, 0.06442185974121094, 0.06415216064453125, 0.06465293121337891, 0.06464979553222656, 0.06471456146240234, 0.06483491516113281, 0.06488745880126953, 0.06489087677001953, 0.06491340637207031, 0.06469542694091797, 0.06475251007080078, 0.06448258972167968, 0.06475440216064453, 0.06488432312011719, 0.06503810882568359, 0.06461830139160156, 0.06442198181152343, 0.06489161682128906, 0.06521222686767578, 0.06500761413574219, 0.06524272155761719, 0.06561033630371094, 0.06527970886230469, 0.0650627212524414, 0.0647493438720703, 0.06484038543701172, 0.0652390365600586, 0.06519602966308594, 0.06510134124755859, 0.06371299362182617, 0.06431334686279297, 0.06397689437866211, 0.06378515243530274, 0.0639614715576172, 0.06455206298828126, 0.06421798706054688, 0.06377660751342773, 0.06335001754760743, 0.06331881713867188, 0.06457907104492187, 0.06413712310791016, 0.06395337677001953, 0.06366790390014648, 0.06365651321411132, 0.06400819396972657, 0.06420480346679687, 0.06456729888916016, 0.06418013000488282, 0.06432777404785156, 0.06478848266601563, 0.06475981140136719, 0.06460211181640625, 0.06417203521728515, 0.06400819396972657, 0.06396454238891601, 0.06439564514160157, 0.06435804748535157, 0.06434467315673828, 0.06425804901123047, 0.06408995056152343, 0.06456050872802735, 0.06502185821533203, 0.06469107055664063, 0.0646344985961914, 0.06426866912841797, 0.06509516906738282, 0.0649969253540039, 0.06479055786132812, 0.0646968994140625, 0.06472473907470704, 0.06485052490234375, 0.06477005004882813, 0.0650588150024414, 0.06481613159179687, 0.0647504653930664, 0.06554227447509765, 0.06529840087890625, 0.06483309173583984, 0.06473334503173828, 0.06568787384033203, 0.06565052795410156, 0.06520764923095704, 0.0648650894165039, 0.0652918701171875, 0.06506742095947265, 0.06477619171142578, 0.06462393951416015, 0.06527830505371093, 0.06560179138183594, 0.06522796630859375, 0.065227294921875, 0.06523737335205078, 0.06363945770263672, 0.06371635055541992, 0.0643880615234375, 0.06439730834960937, 0.06400204467773438, 0.06390784072875977, 0.06395423889160157, 0.06367097473144531, 0.06402047729492187, 0.06387712097167969, 0.06454271697998047, 0.06414950561523437, 0.06384214401245117, 0.064012451171875, 0.06385798263549805, 0.06428300476074218, 0.06383571243286133, 0.0640843505859375, 0.06407337951660157, 0.0648252182006836, 0.06473596954345703, 0.06423155212402344, 0.0643604507446289, 0.06390979385375976, 0.06475532531738282, 0.06478076934814453, 0.06490230560302734, 0.06465827178955078, 0.06412226867675781, 0.06452489471435546, 0.06453862762451172, 0.06450701141357422, 0.06477686309814454, 0.06445008087158204, 0.06462083435058594, 0.0648298568725586, 0.06515916442871093, 0.06493772888183594, 0.0649826889038086, 0.06542601776123047, 0.0648638687133789, 0.06504281616210937, 0.06486006164550781, 0.0646488037109375, 0.0651431655883789, 0.06492787170410157, 0.06479462432861328, 0.06546371459960937, 0.06523750305175781, 0.06493782043457032, 0.06498102569580078, 0.06503404998779297, 0.06524098968505859, 0.06568396759033203, 0.0651727066040039, 0.06487324523925782, 0.06526927947998047, 0.06537165069580078, 0.06534857940673829, 0.06509260559082031, 0.06512742614746093, 0.06576127624511718, 0.06548627471923828, 0.06375628662109376, 0.0642498550415039, 0.06378496170043946, 0.06367027282714843, 0.06455814361572265, 0.06456159973144532, 0.0641049575805664, 0.06370076751708985, 0.06370860671997071, 0.06356047821044922, 0.06396518325805664, 0.06433382415771484, 0.06418966674804688, 0.06398406219482422, 0.06365014266967774, 0.06401023864746094, 0.0646197738647461, 0.06422351837158204, 0.06408035278320312, 0.06412429046630859, 0.06408665466308594, 0.06423129272460938, 0.06440144348144532, 0.06416188812255859, 0.06435804748535157, 0.06439139556884765, 0.06484185791015624, 0.06476927947998047, 0.06492825317382812, 0.06433984375, 0.06408560180664062, 0.06478313446044921, 0.06486179351806641, 0.06463120269775391, 0.06457478332519531, 0.06435501098632812, 0.06453008270263672, 0.06481279754638672, 0.06463549041748047, 0.06472086334228516, 0.06438435363769532, 0.06474822235107422, 0.06540697479248046, 0.06546431732177735, 0.06505267333984376, 0.06480076599121094, 0.06497484588623047, 0.06519602966308594, 0.06515049743652344, 0.06507158660888672, 0.06531276702880859, 0.06528614044189453, 0.06520832061767579, 0.0650255355834961, 0.06510972595214844, 0.06507590484619141, 0.06515516662597656, 0.06512579345703125, 0.06546902465820313, 0.06548889923095703, 0.06542540740966797, 0.06545999908447266, 0.0660134048461914, 0.06483411407470703, 0.06452019500732421, 0.06460211181640625, 0.06415888214111329, 0.0643239974975586, 0.0642933120727539, 0.06419395446777344, 0.0637724494934082, 0.06363407897949219, 0.0638568000793457, 0.06403276824951172, 0.06416912078857422, 0.06399881744384765, 0.06388531112670899, 0.06362643051147461, 0.06413394927978516, 0.06435020446777344, 0.06448121643066407, 0.06499129486083985, 0.06515711975097656, 0.06471177673339844, 0.06423235321044922, 0.06430105590820312, 0.0641269760131836, 0.06412831878662109, 0.06507081604003906, 0.06445769500732422, 0.0644336929321289, 0.06474800109863281, 0.06455228424072265, 0.06436460876464843, 0.06448188781738282, 0.06469149017333985, 0.065297119140625, 0.06497484588623047, 0.06514208221435547, 0.06501651000976562, 0.06541311645507812, 0.0648622055053711, 0.06474543762207031, 0.064499267578125, 0.06490774536132812, 0.06480214691162109, 0.06506153869628906, 0.06487964630126954, 0.06465372467041015, 0.0651269760131836, 0.06546431732177735, 0.06531021118164063, 0.06534758758544922, 0.0655631332397461, 0.06561980438232422, 0.06544400024414063, 0.06506495666503906, 0.06505439758300781, 0.06507961273193359, 0.06501376342773438, 0.06546534729003907, 0.065993408203125, 0.06524345397949219, 0.0650465316772461, 0.06507033538818359, 0.06580668640136719, 0.06421084594726563, 0.06457724761962891, 0.06433411407470703, 0.06394265747070313, 0.0639447021484375, 0.06401158142089844, 0.06402470397949218, 0.06384214401245117, 0.0641434555053711, 0.06454131317138671, 0.06416793823242188, 0.06444371032714843, 0.0642526092529297, 0.06418335723876953, 0.06394976043701171, 0.06367961502075195, 0.064050048828125, 0.0648089599609375, 0.06457138824462891, 0.06437273406982422, 0.0643185577392578, 0.0642364501953125, 0.06446284484863281, 0.06462054443359375, 0.06450086212158203, 0.06449199676513671, 0.06453699493408203, 0.06464102172851563, 0.06446854400634766, 0.06453702545166015, 0.06461824035644531, 0.0644938201904297, 0.06476595306396485, 0.06509945678710938, 0.06513488006591797, 0.06468816375732422, 0.06477823638916015, 0.06494822692871094, 0.064876220703125, 0.06496902465820313, 0.06514435577392579, 0.06494461059570313, 0.06478591918945313, 0.06470912170410156, 0.06492364501953125, 0.0651851806640625, 0.06481161499023437, 0.06482339477539062, 0.06501570892333984, 0.06531890869140625, 0.0656662368774414, 0.0651006088256836, 0.06502604675292968, 0.06498025512695313, 0.0652991714477539, 0.06577526092529297, 0.06524912261962891, 0.06506499481201172, 0.06526947021484375, 0.06549785614013671, 0.06585689544677735, 0.06532160186767579, 0.06524256134033203, 0.06434591674804688, 0.06442105865478516, 0.06436147308349609, 0.06449971008300781, 0.06428057861328125, 0.06407577514648438, 0.06405888366699218, 0.06394700622558594, 0.06402470397949218, 0.06401651000976563, 0.06462873840332031, 0.06457724761962891, 0.06459983825683593, 0.0641602554321289, 0.06393584060668946, 0.0639840965270996, 0.06460025787353516, 0.06445875549316406, 0.06490726470947265, 0.0647741470336914, 0.06425167846679687, 0.06439344024658203, 0.06481919860839844, 0.06445670318603515, 0.06431942749023438, 0.06435027313232422, 0.06490646362304688, 0.06459404754638672, 0.06449014282226563, 0.06430060577392578, 0.06406393432617187, 0.06490681457519532, 0.06464966583251953, 0.06502143859863281, 0.06516172790527344, 0.06499270629882813, 0.06518950653076172, 0.06489939117431641, 0.06477053070068359, 0.0653353271484375, 0.06571794891357421, 0.06544761657714844, 0.06496537780761719, 0.06478438568115234, 0.06470861053466796, 0.06543564605712891, 0.06519193267822265, 0.0652390365600586, 0.06517295837402344, 0.06516899108886719, 0.06566393280029297, 0.06524313354492188, 0.06515711975097656, 0.06489215850830078, 0.06567123413085937, 0.06567906951904297, 0.06583599853515625, 0.06537625885009765, 0.0652042236328125, 0.06534963226318359, 0.06543721771240234, 0.0658641586303711, 0.06560720062255859, 0.06392681503295898, 0.06446284484863281, 0.06430912017822266, 0.06369702529907227, 0.0647188491821289, 0.06503218841552734, 0.06453619384765626, 0.06398771286010742, 0.06356774520874023, 0.06379487991333008, 0.06424601745605468, 0.06407635498046875, 0.06419455718994141, 0.06424781036376953, 0.06421238708496094, 0.06429961395263672, 0.06457138824462891, 0.06421257781982422, 0.06437110137939453, 0.06428057861328125, 0.06482851409912109, 0.06497763061523437, 0.06489107513427735, 0.06424361419677735, 0.0639010238647461, 0.06441603088378907, 0.06454105377197265, 0.0644170913696289, 0.06446086120605468, 0.06446153259277344, 0.06496051025390626, 0.0646527328491211, 0.06485820770263671, 0.06485040283203125, 0.06506700897216797, 0.06517759704589844, 0.0652779541015625, 0.06520355224609375, 0.06483580780029297, 0.06520877075195312, 0.06487241363525391, 0.06483561706542969, 0.06471238708496094, 0.06532128143310546, 0.06499244689941407, 0.06493225860595703, 0.06496092987060546, 0.06489027404785157, 0.0649543685913086, 0.06527446746826172, 0.06568141174316407, 0.06566092681884765, 0.06562815856933593, 0.06550521850585937, 0.06563174438476563, 0.06560157012939453, 0.0654280014038086, 0.06529228973388672, 0.06562608337402344, 0.06533660888671874, 0.06522739410400391, 0.06582489776611328, 0.06539244842529297, 0.06409625244140625, 0.06451382446289063, 0.063842529296875, 0.06386278533935547, 0.06454681396484375, 0.06451760101318359, 0.06408451080322265, 0.06379225540161133, 0.06389017486572265, 0.06414553833007812, 0.06421708679199219, 0.06454489898681641, 0.06412895965576172, 0.0640099868774414, 0.06404934692382812, 0.06459782409667969, 0.06457977294921875, 0.0648305892944336, 0.06441283416748046, 0.06490083312988282, 0.0650416030883789, 0.0647892837524414, 0.0644544677734375, 0.06429676818847656, 0.06409801483154297, 0.06499603271484375, 0.06490726470947265, 0.06477117156982422, 0.06476659393310547, 0.06489730834960937, 0.06485606384277344, 0.06509977722167969, 0.06487401580810546, 0.06518422698974609, 0.0652410888671875, 0.06512191772460937, 0.06512665557861329, 0.06488896179199219, 0.06556172943115235, 0.06483647918701171, 0.06512435150146484, 0.06488473510742188, 0.06530982208251954, 0.06507305908203125, 0.06498397064208984, 0.0653404769897461, 0.06561894226074219, 0.06537779235839844, 0.06582851409912109, 0.06574495697021485, 0.0657823715209961, 0.06565404510498046, 0.06581686401367187, 0.06518592071533204, 0.0655220184326172, 0.06551785278320313, 0.06539865875244141, 0.06516754913330078, 0.06550099182128906, 0.06559273529052734, 0.06542915344238281, 0.06570285034179688, 0.06655795288085938, 0.06402518463134765, 0.06429430389404298, 0.06437741088867187, 0.0643515853881836, 0.0642721939086914, 0.06497574615478516, 0.06477180480957032, 0.06409871673583985, 0.06408914947509765, 0.06431212615966797, 0.06412493133544922, 0.06411603546142577, 0.06416659545898437, 0.064321533203125, 0.06430636596679687, 0.06407244873046875, 0.06413078308105469, 0.06469814300537109, 0.06459040069580078, 0.06492774200439454, 0.06486160278320313, 0.06478505706787109, 0.06492793273925782, 0.06462640380859375, 0.06453606414794921, 0.06441004943847656, 0.06448342132568359, 0.0643420181274414, 0.06455910491943359, 0.06456092834472656, 0.06449945831298828, 0.06468390655517578, 0.0650798110961914, 0.06512239837646484, 0.06490860748291015, 0.06511891174316406, 0.06547865295410156, 0.06542960357666015, 0.06523190307617187, 0.06497305297851562, 0.06523763275146484, 0.06468915557861328, 0.06469324493408203, 0.06464717102050781, 0.06493753814697266, 0.06520057678222656, 0.06556428527832031, 0.06516774749755859, 0.06542745971679688, 0.06572441864013671, 0.06544739532470703, 0.06570652770996094, 0.06538809967041016, 0.06588886260986328, 0.06535740661621094, 0.065697021484375, 0.06525234985351562, 0.06511798095703125, 0.06523926544189453, 0.06508354949951171, 0.06548258972167968, 0.06582208251953126, 0.06590118408203124]",tokens/s,15.453519268610298,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1586.962432,1604.190208,0.0,1201.668096,1189.151232,s,1,8.4352861328125,8.4352861328125,0.0,8.4352861328125,8.4352861328125,8.4352861328125,8.4352861328125,[8.4352861328125],,kWh,3.766827811249793e-05,4.147741742169834e-06,1.1847787255978015e-05,5.366380711064578e-05,,MB,1578.131456,1799.225344,0.0,1382.023168,1351.367168,s,10,0.484584831237793,0.048458483123779296,0.0007073508666428545,0.048841983795166015,0.04925450820922852,0.04929992637634277,0.04933626091003418,"[0.04924441528320313, 0.04882291030883789, 0.04934534454345703, 0.04773798370361328, 0.047417888641357424, 0.04770182418823242, 0.04761743927001953, 0.04892895889282227, 0.04890700912475586, 0.04886105728149414]",tokens/s,5282.872749980426,kWh,1.468573784589633e-06,1.6195723306047074e-07,9.74802566552759e-07,2.605333584202863e-06,tokens/kWh,98259970.06764363,MB,1581.7728,1841.168384,0.0,1423.966208,1407.328256,s,10,16.40045654296875,1.640045654296875,0.19219965471939635,1.7453414916992187,1.8165412719726564,1.8180832580566406,1.8193168469238283,"[1.819625244140625, 1.8146661376953126, 1.6843673095703124, 1.391161865234375, 1.378186279296875, 1.3861680908203124, 1.4925438232421875, 1.806315673828125, 1.8112235107421875, 1.8161986083984376]",tokens/s,38.41356479006649,kWh,4.459828528499658e-05,4.918199388182922e-06,2.007710316384735e-05,6.959358783702687e-05,tokens/kWh,905255.8139053325,,s,630,16.39684947013855,0.02602674519069611,0.0033714348664986384,0.02846175956726074,0.02908328266143799,0.02930663547515869,0.029913509006500246,"[0.02851430320739746, 0.028676095962524413, 0.02850764846801758, 0.028457216262817383, 0.02844268798828125, 0.028350656509399413, 0.02843631935119629, 0.02845916748046875, 0.028399616241455077, 0.028438528060913085, 0.0285347843170166, 0.02854015922546387, 0.0285100154876709, 0.02863171195983887, 0.028784032821655273, 0.02858006477355957, 0.02876483154296875, 0.02873535919189453, 0.02896294403076172, 0.029552255630493164, 0.02987660789489746, 0.02991440010070801, 0.02921481513977051, 0.02921126365661621, 0.029212671279907225, 0.029101600646972658, 0.02912495994567871, 0.02886992073059082, 0.02923129653930664, 0.029118911743164062, 0.028883167266845703, 0.029646015167236327, 0.02931180763244629, 0.029108224868774416, 0.02897830390930176, 0.028896127700805664, 0.028915712356567383, 0.02876620864868164, 0.028690431594848635, 0.028477439880371092, 0.02851020812988281, 0.028725248336791992, 0.028849824905395508, 0.028686080932617188, 0.028725215911865234, 0.029381248474121095, 0.02930009651184082, 0.029202112197875975, 0.028844991683959962, 0.029081247329711915, 0.028782943725585937, 0.028882368087768555, 0.028613183975219728, 0.02872480010986328, 0.029204160690307616, 0.029153215408325196, 0.02913158416748047, 0.029101951599121094, 0.028733568191528322, 0.028618240356445314, 0.028535295486450195, 0.028993535995483398, 0.02863920021057129, 0.028362079620361327, 0.029117088317871093, 0.028620800018310546, 0.028516351699829103, 0.028687936782836914, 0.02853318405151367, 0.02852659225463867, 0.028431840896606445, 0.02845244789123535, 0.028396480560302733, 0.028423488616943358, 0.028439231872558594, 0.02840575981140137, 0.02841708755493164, 0.028433151245117187, 0.028721343994140624, 0.02851430320739746, 0.028518016815185548, 0.028543359756469728, 0.02871500778198242, 0.028546367645263672, 0.02869728088378906, 0.028956672668457032, 0.02903232002258301, 0.029069440841674805, 0.029314207077026366, 0.029584224700927735, 0.029351936340332032, 0.02926905632019043, 0.02910857582092285, 0.028950592041015626, 0.02879267120361328, 0.028641984939575194, 0.028704383850097655, 0.028608896255493163, 0.028693920135498048, 0.02935663986206055, 0.02935807991027832, 0.029429759979248047, 0.029306880950927733, 0.02920649528503418, 0.02865564727783203, 0.02854265594482422, 0.028512575149536132, 0.02843615913391113, 0.028498239517211914, 0.028497919082641602, 0.02850201606750488, 0.02855731201171875, 0.028594175338745118, 0.028544351577758788, 0.02845884895324707, 0.028820287704467772, 0.03154944038391113, 0.028868608474731446, 0.028688383102416993, 0.028680192947387696, 0.028649471282958985, 0.02879283142089844, 0.028587711334228515, 0.028539199829101563, 0.02910553550720215, 0.029275903701782225, 0.02895462417602539, 0.0291757755279541, 0.02910825538635254, 0.028762111663818358, 0.028503583908081054, 0.028377248764038087, 0.028671615600585936, 0.028766496658325195, 0.02861471939086914, 0.02871331214904785, 0.02857779121398926, 0.028545024871826172, 0.028495872497558594, 0.028460575103759767, 0.0284881591796875, 0.028411872863769533, 0.02842166328430176, 0.028484256744384765, 0.02842198371887207, 0.028467199325561524, 0.028428287506103517, 0.028646400451660156, 0.028705791473388673, 0.03151667213439941, 0.02973695945739746, 0.02898863983154297, 0.028859167098999022, 0.02863046455383301, 0.028703296661376953, 0.028931135177612304, 0.02869875144958496, 0.02892678451538086, 0.028579423904418946, 0.028848543167114257, 0.028596223831176756, 0.028567359924316405, 0.028764352798461915, 0.028686336517333984, 0.028570688247680665, 0.028613088607788086, 0.028469728469848632, 0.028624223709106444, 0.02861532783508301, 0.028647167205810547, 0.02284124755859375, 0.021819744110107422, 0.021859807968139647, 0.021819936752319337, 0.02229862403869629, 0.021942272186279296, 0.02196601676940918, 0.021926719665527342, 0.021968095779418946, 0.02189958381652832, 0.022464544296264648, 0.02196518325805664, 0.021863967895507812, 0.022253376007080078, 0.022190176010131835, 0.022039167404174803, 0.02206924819946289, 0.021954559326171876, 0.022196224212646484, 0.022183168411254884, 0.02233011245727539, 0.022028064727783202, 0.02334230422973633, 0.022115327835083007, 0.021983232498168945, 0.021966848373413086, 0.02207529640197754, 0.022214591979980467, 0.021956768035888672, 0.022125696182250975, 0.022022911071777344, 0.0218789119720459, 0.021964799880981444, 0.02196201515197754, 0.02203926467895508, 0.021997568130493163, 0.02247475242614746, 0.022071296691894532, 0.021968832015991212, 0.02178873634338379, 0.02178006362915039, 0.02191916847229004, 0.021950592041015626, 0.022483999252319337, 0.022017183303833007, 0.02188051223754883, 0.021832672119140625, 0.021894399642944335, 0.021807872772216796, 0.021921791076660157, 0.022060224533081055, 0.02218067169189453, 0.022108160018920898, 0.0218603515625, 0.021830944061279296, 0.021928672790527345, 0.022018016815185545, 0.022220767974853516, 0.024346559524536134, 0.022990976333618164, 0.022018047332763673, 0.022171648025512695, 0.02257254409790039, 0.022130752563476564, 0.02179475212097168, 0.02195199966430664, 0.0218175048828125, 0.02176838493347168, 0.02192860794067383, 0.021950176239013672, 0.021839296340942383, 0.022208831787109376, 0.022055456161499024, 0.021899007797241212, 0.021753376007080077, 0.02196544075012207, 0.021884191513061525, 0.0220263671875, 0.021752511978149414, 0.02183782386779785, 0.021985279083251954, 0.02204649543762207, 0.021952512741088868, 0.02188444709777832, 0.021967327117919922, 0.021845279693603517, 0.022004447937011718, 0.021773664474487305, 0.02212931251525879, 0.021977088928222657, 0.021766143798828123, 0.02183510398864746, 0.021795488357543944, 0.02189107131958008, 0.021710432052612305, 0.023670240402221678, 0.02318636894226074, 0.021940223693847655, 0.021942272186279296, 0.02183782386779785, 0.021835775375366212, 0.021724159240722657, 0.021703680038452147, 0.021790719985961913, 0.021815135955810548, 0.02185759925842285, 0.021793632507324218, 0.021716096878051757, 0.02189606475830078, 0.021892255783081054, 0.021859167098999023, 0.02214240074157715, 0.0220513916015625, 0.02192793655395508, 0.021917695999145507, 0.021855487823486328, 0.021822208404541014, 0.021765312194824218, 0.02184684753417969, 0.021694208145141603, 0.021762304306030274, 0.021754880905151368, 0.021799936294555664, 0.02175369644165039, 0.02174172782897949, 0.021746816635131835, 0.02192473602294922, 0.021743616104125976, 0.02182943916320801, 0.021811391830444334, 0.021721088409423828, 0.021803007125854493, 0.02183782386779785, 0.021751808166503905, 0.022024192810058595, 0.021772192001342772, 0.021745119094848633, 0.021652095794677733, 0.021630815505981445, 0.021672096252441406, 0.021702655792236326, 0.021651391983032228, 0.021648704528808595, 0.021630912780761718, 0.021807935714721678, 0.0221429443359375, 0.022206464767456056, 0.021887008666992187, 0.022458368301391602, 0.022222208023071288, 0.021869184494018555, 0.0221265926361084, 0.02206947135925293, 0.022552352905273437, 0.021975040435791016, 0.02207289505004883, 0.021929952621459962, 0.022172128677368164, 0.022466560363769532, 0.022450111389160157, 0.022222911834716797, 0.02206515121459961, 0.022149120330810547, 0.021979135513305666, 0.022149120330810547, 0.022073183059692383, 0.02211609649658203, 0.022054752349853515, 0.022007423400878905, 0.021827520370483397, 0.02185113525390625, 0.021847135543823244, 0.021858911514282226, 0.02179462432861328, 0.021899200439453124, 0.02199545669555664, 0.022067840576171876, 0.02240620803833008, 0.022089759826660157, 0.022131616592407227, 0.022153120040893554, 0.021938079833984374, 0.022034496307373048, 0.022054975509643554, 0.022097984313964845, 0.022188032150268554, 0.022027488708496093, 0.021977088928222657, 0.02201228713989258, 0.022299039840698243, 0.02172297668457031, 0.02164908790588379, 0.021690847396850586, 0.02241663932800293, 0.021859071731567384, 0.02163836860656738, 0.02163996887207031, 0.02169856071472168, 0.021694303512573242, 0.021661855697631835, 0.021929824829101562, 0.021712608337402343, 0.02156287956237793, 0.021707712173461916, 0.021751808166503905, 0.021658655166625976, 0.021904352188110352, 0.02191708755493164, 0.02193427276611328, 0.02191001510620117, 0.021810943603515626, 0.021868608474731446, 0.021803199768066408, 0.021544960021972655, 0.022976512908935546, 0.02176345634460449, 0.021689983367919923, 0.021674367904663087, 0.02174627113342285, 0.02174569511413574, 0.021583391189575196, 0.0217969913482666, 0.021690719604492186, 0.021591968536376953, 0.02159452819824219, 0.021663423538208007, 0.021546848297119142, 0.021667167663574217, 0.02161337661743164, 0.021594112396240234, 0.02154457664489746, 0.02164143943786621, 0.021639328002929687, 0.022417407989501953, 0.021764095306396485, 0.02171673583984375, 0.021637247085571288, 0.021792896270751955, 0.022599679946899414, 0.021663200378417968, 0.02160892868041992, 0.021872480392456053, 0.021745344161987305, 0.021594240188598634, 0.021707168579101564, 0.021583871841430666, 0.021626399993896483, 0.024621023178100585, 0.02185990333557129, 0.021732160568237305, 0.021637247085571288, 0.021542911529541017, 0.021805055618286134, 0.021644351959228515, 0.028655616760253907, 0.02848863983154297, 0.02852022361755371, 0.02881065559387207, 0.028957504272460938, 0.028632352828979492, 0.0286561279296875, 0.02896303939819336, 0.029663232803344725, 0.029642751693725586, 0.028719104766845704, 0.028546783447265626, 0.02853264045715332, 0.02844495964050293, 0.028465248107910155, 0.028366847991943358, 0.028338176727294922, 0.028598623275756838, 0.028585535049438476, 0.028515199661254882, 0.02845030403137207, 0.028590591430664062, 0.028374208450317382, 0.028746559143066407, 0.02856550407409668, 0.028489728927612305, 0.028429599761962892, 0.028404191970825197, 0.028436735153198243, 0.028436479568481447, 0.028495168685913085, 0.028479455947875976, 0.02843084716796875, 0.028714879989624024, 0.02873788833618164, 0.028694528579711914, 0.028837888717651368, 0.02865705680847168, 0.02867056083679199, 0.028884992599487305, 0.028618623733520508, 0.028852352142333983, 0.029249536514282228, 0.02930633544921875, 0.02854966354370117, 0.028434431076049805, 0.028448768615722656, 0.028333887100219727, 0.028546688079833984, 0.028400192260742186, 0.02866147232055664, 0.028373279571533204, 0.02878220748901367, 0.028643104553222658, 0.028559263229370118, 0.028793056488037108, 0.02851478385925293, 0.028475391387939454, 0.02856857681274414, 0.028582847595214844, 0.028581951141357424, 0.02839344024658203, 0.028442655563354492, 0.02879897689819336, 0.02870649528503418, 0.028541248321533205, 0.028493183135986328, 0.028481216430664064, 0.028427200317382814, 0.02854707145690918, 0.028440576553344726, 0.028631040573120117, 0.028739456176757813, 0.028954559326171875, 0.029225120544433592, 0.029487136840820313, 0.029221920013427733, 0.029573951721191406, 0.029519231796264648, 0.028740383148193358, 0.028398143768310548, 0.028571104049682616, 0.028653247833251953, 0.028687200546264648, 0.028592319488525392, 0.028735008239746094, 0.028563743591308595, 0.028685983657836915, 0.032928001403808596, 0.028752288818359374, 0.028755136489868164, 0.028766719818115235, 0.028706016540527343, 0.028598207473754883, 0.02846726417541504, 0.02833692741394043, 0.028397216796875, 0.028636863708496094, 0.02862761688232422, 0.028450496673583986, 0.028444992065429688, 0.028418079376220703, 0.028487648010253906, 0.028339359283447267, 0.028527103424072265, 0.02863564872741699, 0.028921695709228517, 0.02861846351623535, 0.028767711639404298, 0.029160032272338866, 0.02914518356323242, 0.02962777519226074, 0.028701568603515627, 0.02857263946533203, 0.0285533447265625, 0.02843846321105957, 0.028469280242919923, 0.028449920654296874, 0.028475072860717772, 0.028446495056152345, 0.028659936904907226, 0.028528863906860352, 0.029849311828613282, 0.03144684791564942, 0.028708383560180663, 0.028773088455200196, 0.028602367401123048, 0.028498079299926756, 0.02846294403076172, 0.028399616241455077, 0.028393247604370116, 0.028797119140625, 0.028454944610595702, 0.02853887939453125, 0.028515968322753906, 0.028426624298095702, 0.028711231231689453, 0.028573375701904297, 0.028591680526733398, 0.02856697654724121, 0.028788831710815428, 0.02851728057861328, 0.02855936050415039, 0.02888470458984375, 0.029911327362060546, 0.028657087326049803, 0.028666368484497072, 0.028465215682983398, 0.02851126480102539, 0.028445663452148436, 0.028424095153808594, 0.02836284828186035, 0.02853068733215332, 0.028644832611083984, 0.0286909122467041, 0.028439872741699217, 0.02961401557922363, 0.02869487953186035, 0.02853321647644043, 0.029487104415893556, 0.028852224349975586, 0.029242944717407227, 0.028559808731079103, 0.028473184585571288, 0.02846476745605469, 0.028500511169433595, 0.028622848510742187, 0.028647424697875977, 0.028651103973388672, 0.028602783203125, 0.02874367904663086, 0.02854297637939453, 0.028637184143066406, 0.02877235221862793, 0.028469247817993162, 0.028488864898681642, 0.028562271118164062, 0.02854911994934082, 0.028726688385009767, 0.029516384124755858, 0.028747488021850585, 0.029053216934204103, 0.029009920120239258, 0.02914508819580078, 0.028837215423583983, 0.02860915184020996, 0.02886249542236328, 0.028978208541870117, 0.031196128845214843, 0.03057254409790039, 0.029245439529418944, 0.029483007431030273, 0.028788320541381834, 0.0285447998046875, 0.028705408096313476, 0.02855695915222168, 0.028792383193969727, 0.02848953628540039, 0.02895929527282715, 0.028620927810668946, 0.028474720001220703, 0.028607423782348634, 0.02855116844177246, 0.028610559463500978, 0.02899705505371094, 0.028781120300292968]",tokens/s,38.42201522599431,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,6767.423488,7769.817088,0.0,7367.294976,7351.94368,s,1,12.741939453125,12.741939453125,0.0,12.741939453125,12.741939453125,12.741939453125,12.741939453125,[12.741939453125],,kWh,0.0001642591552000037,1.811164823456318e-05,5.286865340600744e-05,0.0002352394568405743,,MB,1646.440448,8411.5456,0.0,7994.343424,7863.794176,s,10,3.0284338073730472,0.3028433807373047,0.0009454751657447772,0.3034013824462891,0.3036218139648437,0.3036933074951172,0.3037505023193359,"[0.30256051635742187, 0.30360592651367185, 0.301056640625, 0.3034053344726563, 0.3012239990234375, 0.30244915771484376, 0.30356362915039065, 0.3037648010253906, 0.3034063720703125, 0.3033974304199219]",tokens/s,845.3214310867238,kWh,8.846569199999517e-06,9.756156900101896e-07,5.855130946727264e-06,1.567731583673697e-05,tokens/kWh,16329325.929640967,MB,1655.382016,8684.17536,0.0,8266.973184,8120.408064,s,10,24.751498046875003,2.4751498046875,0.003349627317508383,2.47534521484375,2.478800634765625,2.479545947265625,2.480142197265625,"[2.4690849609375, 2.473358642578125, 2.471820068359375, 2.472419189453125, 2.476474853515625, 2.474215576171875, 2.477121337890625, 2.480291259765625, 2.478635009765625, 2.4780771484375]",tokens/s,25.453004856792525,kWh,7.241697299958683e-05,7.987631412326827e-06,4.823388454667258e-05,0.00012863848895858622,tokens/kWh,489744.5586466907,,s,630,24.74590123367307,0.03927920830741761,0.0003618952930649532,0.03927067184448242,0.03975482406616211,0.03983846321105957,0.04010098854064941,"[0.03852483367919922, 0.03845539093017578, 0.03868057632446289, 0.038620670318603514, 0.03852889633178711, 0.038536865234375, 0.03869795227050781, 0.038744064331054685, 0.038669727325439454, 0.03862764739990234, 0.038844287872314455, 0.03922294235229492, 0.039054080963134764, 0.03888742446899414, 0.0390549430847168, 0.039069889068603515, 0.03892444610595703, 0.038862911224365235, 0.03888947296142578, 0.0391693115234375, 0.03911692810058594, 0.03905596923828125, 0.03973529434204102, 0.03945471954345703, 0.03914044952392578, 0.03899484634399414, 0.03894844818115235, 0.03888579177856445, 0.03901401519775391, 0.03895846557617187, 0.03883049774169922, 0.03883683013916016, 0.03913523101806641, 0.03904716873168945, 0.039089248657226565, 0.03928102493286133, 0.03933622360229492, 0.039534847259521486, 0.03954998397827148, 0.03953071975708008, 0.03928271865844726, 0.03950665664672852, 0.039616512298583983, 0.039824928283691406, 0.03976646423339844, 0.03950985717773438, 0.03933407974243164, 0.03940556716918946, 0.039444480895996094, 0.03963046264648438, 0.03953702545166016, 0.03941923141479492, 0.039397472381591796, 0.03932831954956055, 0.03925193786621094, 0.03926019287109375, 0.03937484741210937, 0.039392959594726565, 0.039790687561035154, 0.03978406524658203, 0.03970723342895508, 0.03981107330322266, 0.039867904663085936, 0.039446529388427735, 0.039049217224121094, 0.03892019271850586, 0.038954017639160156, 0.039029727935791014, 0.03878297424316406, 0.03861280059814453, 0.03853535842895508, 0.038569183349609376, 0.03851753616333008, 0.038623233795166016, 0.038610366821289065, 0.038563713073730466, 0.03878895950317383, 0.039015262603759766, 0.039032833099365234, 0.03901030349731445, 0.03912835311889649, 0.03916259384155273, 0.03892019271850586, 0.03910192108154297, 0.03912348937988281, 0.039012065887451174, 0.03910172653198242, 0.039449600219726565, 0.03951142501831055, 0.03930515289306641, 0.03920966339111328, 0.039600128173828124, 0.039403518676757815, 0.039337982177734376, 0.03924918365478516, 0.03915439987182617, 0.03915161514282227, 0.0393438720703125, 0.039295230865478516, 0.03922534561157227, 0.03924694442749024, 0.03908403015136719, 0.03896207809448242, 0.03911372756958008, 0.03932672119140625, 0.0393809928894043, 0.039757598876953126, 0.039508190155029294, 0.039397377014160156, 0.03937203216552734, 0.0392628173828125, 0.03984147262573242, 0.03986048126220703, 0.03971481704711914, 0.039673408508300784, 0.039555744171142576, 0.03999097442626953, 0.039823169708251956, 0.03959561538696289, 0.03954937744140625, 0.039592414855957034, 0.039775390625, 0.03970729446411133, 0.039693695068359375, 0.03974636840820313, 0.03965465545654297, 0.03911340713500976, 0.038901504516601564, 0.03872358322143555, 0.03868902587890625, 0.03887923049926758, 0.03880099105834961, 0.038906272888183595, 0.03889705657958984, 0.03879792022705078, 0.03872079849243164, 0.03944051361083984, 0.039102752685546874, 0.03897171020507813, 0.038851646423339846, 0.03884553527832031, 0.03872137451171875, 0.038735870361328126, 0.03876172637939453, 0.03890047836303711, 0.03891814422607422, 0.03884422302246094, 0.03882412719726563, 0.03912499237060547, 0.039001857757568356, 0.03920409774780274, 0.03926278305053711, 0.03914387130737305, 0.03924956893920899, 0.039299007415771484, 0.03927008056640625, 0.039277217864990235, 0.039143489837646483, 0.03901440048217773, 0.03904431915283203, 0.03922758483886719, 0.03944713592529297, 0.03950387191772461, 0.039444480895996094, 0.03944857788085938, 0.0393256950378418, 0.03979257583618164, 0.039616287231445314, 0.0393485107421875, 0.03936460876464844, 0.03930931091308594, 0.03938508987426758, 0.03941580963134766, 0.03931071853637695, 0.0395428466796875, 0.039825984954833984, 0.039600128173828124, 0.03938710403442383, 0.03942607879638672, 0.039517982482910156, 0.03951433563232422, 0.039413726806640625, 0.039561054229736325, 0.03953206253051758, 0.03952025604248047, 0.03979740905761719, 0.03982950210571289, 0.03991686248779297, 0.03983225631713867, 0.03903120040893555, 0.038894977569580075, 0.03875526428222656, 0.03866419219970703, 0.03870105743408203, 0.03904307174682617, 0.03873756790161133, 0.038790592193603514, 0.03876752090454102, 0.03886489486694336, 0.03885446548461914, 0.038805694580078126, 0.03882732772827149, 0.03889836883544922, 0.039118209838867185, 0.039203456878662106, 0.03895040130615234, 0.038888095855712894, 0.039133022308349606, 0.03913523101806641, 0.03903833770751953, 0.0393131217956543, 0.039314334869384765, 0.03910995101928711, 0.03887295913696289, 0.03880428695678711, 0.03906969451904297, 0.03914080047607422, 0.03910863876342773, 0.039010848999023434, 0.03913113784790039, 0.03930108642578125, 0.03907497787475586, 0.0391910400390625, 0.03938137435913086, 0.03948339080810547, 0.03946700668334961, 0.03933980941772461, 0.03938489532470703, 0.039459232330322266, 0.03976704025268555, 0.0397256965637207, 0.03935884857177734, 0.039223297119140625, 0.03916185760498047, 0.039288833618164064, 0.03940556716918946, 0.039395328521728515, 0.03937484741210937, 0.0394444465637207, 0.039659393310546874, 0.03966329574584961, 0.03954940795898437, 0.03949363327026367, 0.039708671569824217, 0.03970041656494141, 0.03966128158569336, 0.03956316757202148, 0.039686592102050784, 0.03982745742797852, 0.039875873565673826, 0.039791328430175785, 0.0397209587097168, 0.03958911895751953, 0.039094207763671875, 0.03892643356323242, 0.038709888458251955, 0.03867452621459961, 0.03861503982543945, 0.038760448455810545, 0.038655265808105466, 0.03873190307617187, 0.03877948760986328, 0.0388587532043457, 0.03931340789794922, 0.03919027328491211, 0.03886105728149414, 0.03883808135986328, 0.03909212875366211, 0.03894095993041992, 0.03905945587158203, 0.03897958374023437, 0.03906880187988281, 0.03916479873657226, 0.039054561614990234, 0.038953758239746096, 0.039000064849853515, 0.039182334899902346, 0.03933900833129883, 0.039482368469238284, 0.039374431610107424, 0.03905980682373047, 0.03904431915283203, 0.039271263122558596, 0.03930112075805664, 0.03912268829345703, 0.03916211318969726, 0.03924911880493164, 0.03940576171875, 0.03946140670776367, 0.03919571304321289, 0.03930156707763672, 0.0394983024597168, 0.03957145690917969, 0.039362560272216796, 0.03933184051513672, 0.039430015563964846, 0.03946268844604492, 0.039721057891845706, 0.03956492614746094, 0.039524158477783206, 0.0394719352722168, 0.03968166351318359, 0.039667232513427735, 0.03956617736816406, 0.040021759033203125, 0.039966209411621094, 0.0398485107421875, 0.03966934585571289, 0.039516319274902345, 0.039544704437255856, 0.03973932647705078, 0.040029983520507816, 0.04001414489746094, 0.03975632095336914, 0.03974063873291016, 0.03870719909667969, 0.03872713470458984, 0.03862099075317383, 0.038709983825683594, 0.03871705627441406, 0.038908287048339846, 0.03892019271850586, 0.03880755233764648, 0.03906355285644531, 0.03894681549072266, 0.03896115112304688, 0.03896905517578125, 0.03914691162109375, 0.03901676940917969, 0.03885113525390625, 0.03891404724121094, 0.03899596786499023, 0.039000064849853515, 0.038978721618652346, 0.038896064758300784, 0.038784896850585934, 0.03924758529663086, 0.03943097686767578, 0.039290271759033206, 0.039238239288330076, 0.03911814498901367, 0.03903763198852539, 0.03910553741455078, 0.039010913848876956, 0.039354591369628905, 0.03952044677734375, 0.039374431610107424, 0.03922771072387695, 0.03937900924682617, 0.03921913528442383, 0.03934163284301758, 0.03957609558105469, 0.03950918579101562, 0.03929740905761719, 0.039293121337890625, 0.03935232162475586, 0.03931366348266602, 0.03931340789794922, 0.03918211364746094, 0.03934230422973633, 0.039583168029785155, 0.03956383895874024, 0.03951615905761719, 0.03983564758300781, 0.039790592193603515, 0.03959379196166992, 0.039440574645996096, 0.039288833618164064, 0.039444480895996094, 0.03955507278442383, 0.03958784103393555, 0.03967574310302734, 0.03987472152709961, 0.03962879943847656, 0.03958076858520508, 0.03975465774536133, 0.03990323257446289, 0.03993804931640625, 0.03883001708984375, 0.03869292831420899, 0.038916095733642575, 0.038874336242675785, 0.03859331130981445, 0.038696319580078124, 0.03876233673095703, 0.03866396713256836, 0.03898659133911133, 0.03912515258789063, 0.03893619155883789, 0.03903657531738281, 0.038890209197998044, 0.03898559951782227, 0.03894432067871094, 0.03904774475097656, 0.039064640045166014, 0.03897439956665039, 0.03888947296142578, 0.03915913772583008, 0.03914140701293945, 0.03938777542114258, 0.03940147018432617, 0.03929190444946289, 0.03919564819335938, 0.03920220947265625, 0.03916041564941406, 0.03909632110595703, 0.03901232147216797, 0.03907939147949219, 0.039156097412109375, 0.038999679565429685, 0.03924435043334961, 0.03936460876464844, 0.03918438339233398, 0.039311359405517575, 0.03948691177368164, 0.03978707122802734, 0.03965542221069336, 0.03947315216064453, 0.039486526489257816, 0.0392817268371582, 0.03978841781616211, 0.03970767974853515, 0.0396605110168457, 0.03984076690673828, 0.039752128601074216, 0.039627326965332034, 0.03963302230834961, 0.03943564987182617, 0.03934819030761719, 0.04101583862304688, 0.039239681243896485, 0.03951123046875, 0.03956409454345703, 0.03969833755493164, 0.039566688537597657, 0.039471199035644534, 0.03964380645751953, 0.039917312622070315, 0.03982556915283203, 0.039820545196533205, 0.03960425567626953, 0.039229438781738284, 0.039120094299316406, 0.03898857498168945, 0.03887308883666992, 0.03890367889404297, 0.03880972671508789, 0.03899110412597656, 0.03902694320678711, 0.038954944610595704, 0.03875644683837891, 0.03914937591552734, 0.03908646392822265, 0.03892047882080078, 0.03885615921020508, 0.038963230133056644, 0.03888374328613281, 0.03895305633544922, 0.03892995071411133, 0.03902409744262696, 0.03906377410888672, 0.0392322883605957, 0.03926988983154297, 0.039171871185302735, 0.03976265716552734, 0.03967795181274414, 0.03951001739501953, 0.0392765121459961, 0.039068927764892576, 0.038978336334228515, 0.03914035034179687, 0.03916697692871094, 0.03920444869995117, 0.03923503875732422, 0.03926931381225586, 0.039367774963378906, 0.039381439208984376, 0.03951254272460938, 0.03969023895263672, 0.03968771362304688, 0.03949615859985352, 0.039444480895996094, 0.039329792022705076, 0.03933388900756836, 0.03944758224487305, 0.0395882568359375, 0.03969187164306641, 0.03963779067993164, 0.039575489044189456, 0.03954012680053711, 0.03966038513183594, 0.04035174560546875, 0.040212318420410155, 0.039974143981933594, 0.03979337692260742, 0.039561344146728517, 0.03952032089233398, 0.03951142501831055, 0.03949820709228516, 0.03987216186523437, 0.03988243103027344, 0.03982009506225586, 0.03991756820678711, 0.040204288482666016, 0.03914934539794922, 0.038746944427490236, 0.03911231994628906, 0.03910899353027344, 0.039007774353027345, 0.0392193603515625, 0.039064895629882815, 0.038986751556396484, 0.03915980911254883, 0.03896710586547852, 0.03913689422607422, 0.038885025024414065, 0.03878940963745117, 0.03886336135864258, 0.038893310546875, 0.038883712768554686, 0.03906710433959961, 0.0389901123046875, 0.03906755065917969, 0.038984031677246095, 0.03907993698120117, 0.0392309455871582, 0.03932524871826172, 0.03917718505859375, 0.03917004776000976, 0.03919462585449219, 0.03924991989135742, 0.039122943878173826, 0.03915158462524414, 0.039273887634277346, 0.039244415283203125, 0.039288833618164064, 0.03991708755493164, 0.03964473724365234, 0.03962972640991211, 0.03949913787841797, 0.03930518341064453, 0.03917276763916016, 0.03926425552368164, 0.03922937774658203, 0.03918188858032227, 0.039608833312988284, 0.039626750946044925, 0.03955712127685547, 0.03981721496582031, 0.03970048141479492, 0.03967327880859375, 0.039744064331054686, 0.03967795181274414, 0.039489376068115235, 0.039377056121826175, 0.03938304138183594, 0.03937279891967774, 0.0395489273071289, 0.03971648025512695, 0.03967833709716797, 0.03963264083862305, 0.04019225692749023, 0.03993804931640625, 0.03982745742797852, 0.03977388763427735, 0.039645503997802735, 0.039933345794677735, 0.03886342239379883, 0.03893017578125, 0.038854881286621096, 0.03860201644897461, 0.03874278259277344, 0.03868672180175781, 0.03888508987426758, 0.038919937133789065, 0.03886134338378906, 0.03887308883666992, 0.0392355842590332, 0.03904307174682617, 0.03889113616943359, 0.03890419387817383, 0.039074848175048825, 0.03915187072753906, 0.039026527404785155, 0.03893519973754883, 0.03913750457763672, 0.03912704086303711, 0.039040382385253904, 0.03935430526733399, 0.039414337158203125, 0.03940121459960937, 0.03940595245361328, 0.03924787139892578, 0.03905945587158203, 0.038932479858398435, 0.038968704223632813, 0.038992446899414064, 0.039054817199707034, 0.039208671569824216, 0.03935110473632813, 0.03934799957275391, 0.039508129119873045, 0.03973542404174805, 0.039723007202148435, 0.03960969543457031, 0.03949619293212891, 0.03967388916015625, 0.03946713638305664, 0.03940556716918946, 0.039308929443359376, 0.03933427047729492, 0.03952230453491211, 0.039616512298583983, 0.03979673767089844, 0.0398131217956543, 0.03972710418701172, 0.03959750366210937, 0.03945529556274414, 0.03956099319458008, 0.0395634880065918, 0.039395328521728515, 0.039512065887451174, 0.03951001739501953, 0.03954278564453125, 0.039663200378417966, 0.03972716903686523, 0.04003385543823242, 0.040102176666259766, 0.04011030578613281, 0.04009807968139648]",tokens/s,25.458761596555828,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,11047.215104,12553.4208,0.0,12150.898688,12116.742656,s,1,17.374958984375,17.374958984375,0.0,17.374958984375,17.374958984375,17.374958984375,17.374958984375,[17.374958984375],,kWh,0.00026638753347497945,2.9377291599195945e-05,8.28420107179817e-05,0.0003786068357921571,,MB,2021.761024,13513.916416,0.0,13096.71424,12892.965376,s,10,6.35274005126953,0.635274005126953,0.0006303388391127866,0.6350317687988281,0.6361684387207032,0.6364086822509766,0.6366008770751953,"[0.634775390625, 0.6361150512695313, 0.6343910522460937, 0.63664892578125, 0.6349720458984375, 0.6353724975585937, 0.6354431762695313, 0.6349583740234375, 0.6350518188476563, 0.63501171875]",tokens/s,402.97572060868606,kWh,1.8560247330988964e-05,2.04686881637087e-06,1.2336190424500765e-05,3.29433065718606e-05,tokens/kWh,7770926.073907505,MB,2025.84064,13870.432256,0.0,13453.23008,13237.236736,s,10,43.37938134765625,4.337938134765625,0.013299954204283156,4.338426025390625,4.344648925781249,4.358195068359374,4.369031982421874,"[4.323328125, 4.32533935546875, 4.32271630859375, 4.3717412109375, 4.3379326171875, 4.338703125, 4.33959814453125, 4.33814892578125, 4.341638671875, 4.34023486328125]",tokens/s,14.523028693078363,kWh,0.00012886448709859374,1.4214455129869945e-05,8.558577680189763e-05,0.00022866471903036133,tokens/kWh,275512.550721194,,s,630,43.35546378326413,0.06881819648137168,0.0006119319921971192,0.0687756462097168,0.06942293167114258,0.06967524948120117,0.07112882720947265,"[0.06810841369628906, 0.06771737670898438, 0.06787359619140625, 0.06817683410644532, 0.06833561706542969, 0.06857878112792969, 0.06809648132324218, 0.06813702392578125, 0.06773145294189453, 0.06795263671875, 0.06788236999511718, 0.06833216094970704, 0.06849699401855469, 0.06811481475830078, 0.0684851531982422, 0.06822911834716797, 0.0680079345703125, 0.06861424255371094, 0.06842733001708984, 0.06820233917236328, 0.0681006088256836, 0.0679436798095703, 0.06795545959472657, 0.06861209869384766, 0.06821862030029296, 0.06829606628417968, 0.06848941040039062, 0.06891542053222656, 0.068751708984375, 0.06822515106201171, 0.06828166198730469, 0.06892620849609375, 0.06882093048095703, 0.06856195068359375, 0.06854959869384766, 0.06851583862304687, 0.06898470306396484, 0.06894166564941406, 0.06886224365234375, 0.06856918334960938, 0.0689552001953125, 0.06899798583984375, 0.06905350494384765, 0.0688050537109375, 0.06874905395507812, 0.06895894622802734, 0.06914784240722656, 0.06913497924804687, 0.06874543762207032, 0.06877104187011719, 0.06922115325927734, 0.06917257690429687, 0.06917919921875, 0.0694276123046875, 0.06919622039794922, 0.06917120361328125, 0.06926131439208984, 0.06913164520263672, 0.06903404998779297, 0.06900179290771484, 0.06925107574462891, 0.06930022430419921, 0.06925667572021485, 0.0686297607421875, 0.06816185760498047, 0.06789369964599609, 0.06798745727539063, 0.0681465301513672, 0.06794111633300781, 0.06812662506103516, 0.06795378875732422, 0.06789328002929687, 0.068176513671875, 0.06808940887451172, 0.06807516479492187, 0.06847369384765625, 0.06816767883300781, 0.06833993530273437, 0.06827744293212891, 0.06842649841308594, 0.06838050842285157, 0.06842556762695312, 0.06822124481201172, 0.06813699340820313, 0.06825510406494141, 0.06849187469482422, 0.06836137390136719, 0.06871485137939454, 0.06830719757080078, 0.06847103881835938, 0.06864691162109375, 0.06867558288574219, 0.06848822021484376, 0.06874620819091797, 0.06859081268310546, 0.06892022705078125, 0.06856486511230468, 0.06848649597167969, 0.068666015625, 0.06874451446533203, 0.06854099273681641, 0.0687883529663086, 0.06948454284667968, 0.06872847747802735, 0.0689176025390625, 0.06869766235351563, 0.06909792327880859, 0.06889881896972656, 0.06888992309570313, 0.06872489929199219, 0.06887888336181641, 0.06871449279785156, 0.0687831039428711, 0.06902012634277344, 0.06879695892333984, 0.06914252471923828, 0.06911542510986328, 0.06904265594482421, 0.06885785675048828, 0.06908489227294921, 0.06899935913085938, 0.06923273468017578, 0.06966607666015626, 0.0690711669921875, 0.06909552001953125, 0.06927187347412109, 0.06883468627929687, 0.06821459197998046, 0.06803129577636718, 0.06818355560302734, 0.06810265350341797, 0.06827145385742188, 0.06814582061767578, 0.06777811431884766, 0.06846099090576171, 0.06792790222167969, 0.06794802856445313, 0.06825392150878906, 0.06799609375, 0.0678023681640625, 0.06850841522216797, 0.06897408294677734, 0.06843443298339844, 0.06839318084716797, 0.06813426971435547, 0.06846505737304688, 0.06848223876953125, 0.0683485107421875, 0.06825945281982422, 0.0680754852294922, 0.06814988708496093, 0.06833971405029297, 0.0689560317993164, 0.06856102752685547, 0.06841548919677734, 0.06859923553466797, 0.06863100433349609, 0.06880879974365234, 0.06878358459472657, 0.06824969482421875, 0.06896275329589843, 0.06848512268066406, 0.06842272186279297, 0.0685777587890625, 0.06846691131591796, 0.0689359359741211, 0.06859366607666016, 0.06878610992431641, 0.0688476791381836, 0.06894786834716797, 0.06905999755859375, 0.06900192260742187, 0.06928153228759766, 0.06876531219482422, 0.06915340423583985, 0.06881689453125, 0.06902169799804687, 0.06885990142822265, 0.06884969329833984, 0.06888803100585937, 0.0690301742553711, 0.06893590545654296, 0.06907698822021484, 0.06908060455322265, 0.06921263885498047, 0.0691568603515625, 0.06912818908691407, 0.0692490234375, 0.06922444915771485, 0.06857270050048828, 0.06829539489746093, 0.06824960327148437, 0.06826780700683593, 0.06827369689941407, 0.06905107116699219, 0.06831104278564454, 0.06827388763427734, 0.06797926330566406, 0.06777680206298828, 0.06814924621582032, 0.067989501953125, 0.06801612854003906, 0.06815948486328124, 0.06840739440917969, 0.068312255859375, 0.06816226959228516, 0.06840457916259765, 0.06860047912597657, 0.06854860687255859, 0.06811840057373048, 0.06828659057617187, 0.06838646697998046, 0.06830438232421875, 0.06867440032958984, 0.068384765625, 0.06832089233398438, 0.0681844482421875, 0.06878556823730468, 0.0687069091796875, 0.06883328247070312, 0.0688721923828125, 0.06859139251708984, 0.06892156982421875, 0.06868991851806641, 0.06850137329101562, 0.06859174346923828, 0.06874111938476563, 0.06889676666259766, 0.06878141021728515, 0.07103350067138672, 0.07105126190185547, 0.07078899383544922, 0.0708629150390625, 0.07108016204833985, 0.0712927703857422, 0.07104243469238282, 0.07091577911376953, 0.07085062408447265, 0.07084735870361328, 0.07085874938964844, 0.07078502655029296, 0.07072345733642578, 0.07116748809814454, 0.07127049255371094, 0.07088336181640625, 0.0709043197631836, 0.07114870452880859, 0.07122758483886718, 0.0726817626953125, 0.0712542724609375, 0.06915888214111328, 0.0697364501953125, 0.06862473297119141, 0.06848502349853515, 0.06831836700439453, 0.06841356658935546, 0.06851872253417969, 0.0682592010498047, 0.06795961761474609, 0.06805484771728515, 0.06812979125976562, 0.0678670425415039, 0.0682767333984375, 0.06824150085449218, 0.06829388427734374, 0.06823737335205078, 0.06824784088134765, 0.06854889678955078, 0.06881513977050781, 0.06871842956542969, 0.06859353637695312, 0.06859379577636719, 0.06847647857666016, 0.06826627349853516, 0.06813097381591797, 0.06850559997558593, 0.06837580871582032, 0.06849817657470703, 0.06859366607666016, 0.06900847625732422, 0.06902003479003906, 0.06860345458984375, 0.06857382202148438, 0.06887254333496094, 0.06907215881347656, 0.06893830108642578, 0.06897475433349609, 0.06877587127685547, 0.06859744262695312, 0.06918592071533203, 0.06890422058105469, 0.0688544921875, 0.06895763397216798, 0.06921014404296875, 0.06906454467773437, 0.06897065734863281, 0.06905696105957031, 0.06904431915283203, 0.0695640640258789, 0.06946031951904297, 0.06931804656982422, 0.06944009399414063, 0.06907644653320312, 0.06913897705078124, 0.06899302673339844, 0.06940486145019531, 0.06923040008544921, 0.06918521881103516, 0.06945414733886719, 0.06914457702636718, 0.0692628173828125, 0.06926099395751953, 0.06973321533203125, 0.06976707458496094, 0.07001891326904297, 0.06824018859863282, 0.06840525054931641, 0.06816153717041015, 0.06825708770751954, 0.06869789123535157, 0.06857753753662109, 0.06816015625, 0.06787686157226562, 0.06799468994140626, 0.06810284423828125, 0.06835625457763672, 0.06845164489746093, 0.06836099243164062, 0.06873292541503906, 0.06841753387451172, 0.0688617935180664, 0.06850576019287109, 0.06848512268066406, 0.06870015716552734, 0.06886809539794922, 0.06890828704833984, 0.06848323059082032, 0.0683853759765625, 0.06871449279785156, 0.06846873474121094, 0.06844825744628906, 0.06845439910888672, 0.0687083511352539, 0.06861766052246093, 0.06850339508056641, 0.06897942352294922, 0.06888652801513671, 0.06878380584716796, 0.06890553283691406, 0.06891648101806641, 0.06892924499511718, 0.0688523178100586, 0.06868582153320313, 0.06907901000976563, 0.06900128173828125, 0.06902569580078124, 0.07017810821533203, 0.06928995513916016, 0.06890185546875, 0.069212158203125, 0.06893772888183594, 0.06936166381835937, 0.06927750396728516, 0.06916934204101563, 0.06914867401123047, 0.06910771179199218, 0.06887833404541016, 0.06909040069580077, 0.06896896362304687, 0.0693375015258789, 0.06913228607177735, 0.06934528350830078, 0.06920601654052734, 0.06922425842285156, 0.06963629150390625, 0.06924492645263672, 0.06984294128417969, 0.06931798553466798, 0.06906089782714844, 0.06837232208251953, 0.06793555450439454, 0.06819900512695312, 0.06847081756591797, 0.06833763122558593, 0.06874918365478516, 0.0681448974609375, 0.06810854339599609, 0.06830863952636719, 0.06831996917724609, 0.06853427124023438, 0.06835609436035156, 0.06823321533203125, 0.06855884552001953, 0.06827196502685547, 0.06907078552246093, 0.0685723876953125, 0.06838374328613281, 0.06872268676757813, 0.06863053131103515, 0.06860390472412109, 0.06836838531494141, 0.06880255889892578, 0.06849696350097656, 0.06868422698974609, 0.06895001220703124, 0.06853632354736328, 0.06859696197509765, 0.06886614227294922, 0.06879634857177734, 0.06882572937011719, 0.06883261108398438, 0.06890889739990234, 0.06874822235107422, 0.06881670379638671, 0.06888784027099609, 0.06873900604248047, 0.07004975891113281, 0.06932787322998046, 0.06879436492919921, 0.06880870056152344, 0.06903513336181641, 0.06901235198974609, 0.06896640014648438, 0.06912393951416015, 0.06925737762451172, 0.06903158569335938, 0.069542236328125, 0.06917478179931641, 0.06895871734619141, 0.068927490234375, 0.06954998779296875, 0.06950249481201172, 0.06907939147949219, 0.06904985809326172, 0.06911603546142578, 0.06943360137939453, 0.06925532531738281, 0.06969744110107422, 0.06944777679443359, 0.06922780609130859, 0.06955014038085937, 0.06851686096191406, 0.06808758544921875, 0.06797042846679688, 0.06842169952392578, 0.06872144317626953, 0.06833939361572265, 0.0679466552734375, 0.06798486328125, 0.06796550750732422, 0.06813299560546875, 0.06833971405029297, 0.06832284545898437, 0.0685060806274414, 0.06847283172607421, 0.06854243469238282, 0.06876573181152344, 0.06831718444824218, 0.0688345947265625, 0.06885596466064453, 0.06844882965087891, 0.06833766174316407, 0.06845407867431641, 0.0686226577758789, 0.06840255737304687, 0.06876998138427734, 0.06863113403320313, 0.06862989044189453, 0.068669921875, 0.06854844665527343, 0.06872284698486328, 0.06869116973876953, 0.06868851470947265, 0.06922051239013671, 0.06893158721923828, 0.06880358123779297, 0.06873395538330078, 0.0689450912475586, 0.06902003479003906, 0.06903443145751953, 0.06901471710205079, 0.06903014373779297, 0.06895878601074219, 0.06909935760498047, 0.0693638687133789, 0.06910546875, 0.06913043212890625, 0.06949292755126953, 0.06942291259765625, 0.0689208984375, 0.06976156616210938, 0.06892124938964844, 0.0690951690673828, 0.06929366302490235, 0.06896502685546875, 0.06910505676269531, 0.06912470245361328, 0.06936780548095703, 0.069410400390625, 0.06915267181396484, 0.06943590545654296, 0.0695889892578125, 0.06979788970947266, 0.06928572845458984, 0.06915116882324218, 0.0682760009765625, 0.06850975799560546, 0.06827228546142577, 0.06812057495117188, 0.06845388793945313, 0.0683873291015625, 0.06816767883300781, 0.06847596740722656, 0.06835295867919922, 0.06836019134521484, 0.06831715393066407, 0.06836380767822266, 0.06857344055175782, 0.06853673553466796, 0.0682936019897461, 0.06892173004150391, 0.06909184265136718, 0.0686592025756836, 0.06868544006347656, 0.06864934539794922, 0.06862595367431641, 0.06855625915527344, 0.06832844543457031, 0.06851500701904296, 0.06897337341308593, 0.06873414611816406, 0.06849110412597656, 0.06892179107666016, 0.06887235260009765, 0.06915277099609375, 0.06889612579345702, 0.06863155364990234, 0.06878412628173829, 0.0686919708251953, 0.06912726593017578, 0.06893628692626953, 0.06852243041992187, 0.06891072082519531, 0.06899890899658204, 0.06893990325927735, 0.06915478515625, 0.06918585968017578, 0.06917334747314453, 0.06920396423339843, 0.06933708953857422, 0.0692490234375, 0.0692257308959961, 0.06916786956787109, 0.06955622100830078, 0.06926060485839844, 0.06903206634521485, 0.06898473358154297, 0.06917705535888671, 0.06912300872802735, 0.06942310333251953, 0.06938575744628907, 0.0695253448486328, 0.06954662322998047, 0.06950297546386719, 0.06946326446533203, 0.06956060791015625, 0.06922489929199219, 0.06860205078125, 0.06819840240478516, 0.06784204864501953, 0.06839705657958985, 0.06860787200927734, 0.06802854156494141, 0.06795030212402343, 0.06832259368896484, 0.06815984344482422, 0.06839955139160156, 0.06802864074707031, 0.06819805145263672, 0.06835340881347657, 0.06885065460205078, 0.06863667297363281, 0.06864482879638673, 0.06879824066162109, 0.06855705261230469, 0.06877542114257812, 0.0686056671142578, 0.06887468719482422, 0.06864860534667969, 0.06821139526367187, 0.06890290832519531, 0.0686592025756836, 0.06849081420898437, 0.0685588150024414, 0.06893801879882812, 0.06884575653076172, 0.06901760101318359, 0.06864281463623047, 0.06890105438232422, 0.06896825408935547, 0.06891929626464843, 0.06881279754638672, 0.06873056030273438, 0.0686448974609375, 0.06858576202392579, 0.06884502410888672, 0.06902156829833984, 0.06896073913574219, 0.06937619018554687, 0.06918125152587891, 0.06912150573730469, 0.06946233367919921, 0.0691081314086914, 0.06935247802734375, 0.06932713317871093, 0.06927839660644532, 0.06898847961425782, 0.06941535949707031, 0.06894096374511718, 0.06910243225097656, 0.0688695068359375, 0.06955270385742188, 0.069476318359375, 0.06936380767822266, 0.06938214111328125, 0.06947392272949218, 0.06945420837402344, 0.06947203063964844, 0.06955622100830078, 0.06968275451660157]",tokens/s,14.531040496980895,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1584.631808,1570.635776,0.0,1201.668096,1189.151232,s,1,8.403615234375,8.403615234375,0.0,8.403615234375,8.403615234375,8.403615234375,8.403615234375,[8.403615234375],,kWh,3.511641749587397e-05,3.8660726060268e-06,1.1006119915987767e-05,4.998861001788854e-05,,MB,1708.802048,1799.225344,0.0,1382.023168,1351.367168,s,10,0.4759820137023926,0.047598201370239256,0.0001626858909872711,0.047567728042602536,0.04776416168212891,0.047842943954467775,0.047905969772338866,"[0.04792172622680664, 0.04764956665039063, 0.0474736328125, 0.047566303253173826, 0.04730271911621094, 0.04756915283203125, 0.047480705261230466, 0.04771945571899414, 0.04755209732055664, 0.047746654510498046]",tokens/s,5378.35448883293,kWh,1.4359977920954476e-06,1.5830856897339233e-07,9.491061514412116e-07,2.543412512510052e-06,tokens/kWh,100652174.48637846,MB,1717.399552,1841.168384,0.0,1423.966208,1407.328256,s,10,13.668268920898438,1.3668268920898439,0.0032423993119370872,1.3658818359375,1.3711612548828125,1.3717023315429686,1.3721351928710936,"[1.3639447021484374, 1.3659415283203125, 1.371041015625, 1.3658221435546876, 1.368722412109375, 1.372243408203125, 1.364673828125, 1.3626820068359375, 1.363250732421875, 1.3699471435546875]",tokens/s,46.092157218003365,kWh,3.999733017831921e-05,4.4113654140000186e-06,1.8718270693559428e-05,6.312696628587866e-05,tokens/kWh,997988.7155466386,,s,630,13.665357463836683,0.02169104359339154,0.00028029880422812236,0.021626880645751953,0.021889948654174804,0.022095055866241456,0.02298744827270508,"[0.021407743453979493, 0.021522432327270507, 0.021439775466918946, 0.021703392028808593, 0.021549055099487305, 0.02185625648498535, 0.021590015411376954, 0.021600255966186522, 0.02162483215332031, 0.021727231979370116, 0.02166912078857422, 0.021635839462280274, 0.02167398452758789, 0.021646976470947266, 0.02236240005493164, 0.021833824157714843, 0.021702655792236326, 0.02186854362487793, 0.021833375930786134, 0.021815647125244142, 0.021706560134887695, 0.021688512802124024, 0.021515487670898437, 0.02188982391357422, 0.021624000549316406, 0.02158880043029785, 0.02165555191040039, 0.021567039489746094, 0.02147577667236328, 0.021526527404785157, 0.021589056015014648, 0.021528671264648438, 0.02152534484863281, 0.02147123146057129, 0.021591392517089844, 0.021532703399658203, 0.0215599365234375, 0.021570623397827147, 0.02158870315551758, 0.021737184524536133, 0.02177027130126953, 0.021690847396850586, 0.0216944637298584, 0.021557247161865235, 0.021565439224243164, 0.021632640838623048, 0.021634687423706056, 0.02166655921936035, 0.021575679779052736, 0.021523584365844728, 0.021496543884277342, 0.021530752182006837, 0.021753055572509766, 0.021544767379760743, 0.021530656814575194, 0.021478048324584963, 0.02167740821838379, 0.021571968078613283, 0.021611103057861326, 0.021835712432861327, 0.021811264038085938, 0.021872608184814454, 0.021693599700927733, 0.021999807357788087, 0.02181990432739258, 0.021552352905273436, 0.021525279998779297, 0.021456895828247072, 0.021493759155273438, 0.021562688827514647, 0.021539520263671875, 0.02152038383483887, 0.02153267288208008, 0.021643264770507813, 0.02166783905029297, 0.02153628730773926, 0.02160073661804199, 0.021590015411376954, 0.02163302421569824, 0.021534719467163087, 0.021570655822753908, 0.021603231430053712, 0.021563392639160156, 0.02157151985168457, 0.021856319427490233, 0.02168832015991211, 0.021608448028564452, 0.02149990463256836, 0.02166374397277832, 0.021703680038452147, 0.021566463470458985, 0.02152038383483887, 0.021561344146728514, 0.021669599533081056, 0.0215546875, 0.021560096740722658, 0.02163302421569824, 0.021690048217773438, 0.021641536712646483, 0.021770240783691407, 0.022022144317626953, 0.02185420799255371, 0.021656831741333007, 0.021590688705444335, 0.021557344436645507, 0.021583871841430666, 0.021622783660888673, 0.021712799072265625, 0.021645408630371094, 0.02167913627624512, 0.022604768753051757, 0.022424703598022462, 0.02210700798034668, 0.022042015075683593, 0.02178268814086914, 0.021679935455322267, 0.021688959121704102, 0.02163865661621094, 0.021727680206298828, 0.02155939292907715, 0.02163705635070801, 0.021684032440185547, 0.02164963150024414, 0.02164531135559082, 0.021630495071411134, 0.02158639907836914, 0.02192793655395508, 0.02202009582519531, 0.02224742317199707, 0.02212777519226074, 0.021777248382568358, 0.021572959899902343, 0.021572256088256837, 0.02168988800048828, 0.021528863906860353, 0.02152262306213379, 0.021581663131713866, 0.021614751815795898, 0.021598207473754884, 0.021765888214111326, 0.02163849639892578, 0.021660575866699217, 0.02147327995300293, 0.02150809669494629, 0.021421695709228517, 0.021657312393188476, 0.022006431579589845, 0.023037952423095705, 0.021774335861206053, 0.021835615158081054, 0.021729440689086915, 0.021743776321411133, 0.021601791381835937, 0.02163337516784668, 0.02370969581604004, 0.023240703582763672, 0.021843967437744142, 0.0216494083404541, 0.021733375549316408, 0.021901311874389647, 0.02161408042907715, 0.02161305618286133, 0.02162892723083496, 0.021594112396240234, 0.02161664009094238, 0.021579776763916016, 0.021549055099487305, 0.021624383926391603, 0.021580095291137694, 0.021544960021972655, 0.02166592025756836, 0.021753856658935547, 0.0216944637298584, 0.02162073516845703, 0.021739456176757814, 0.021725248336791993, 0.02157695960998535, 0.021594879150390624, 0.0216760311126709, 0.021632511138916014, 0.021637088775634767, 0.02190185546875, 0.021567487716674806, 0.02166579246520996, 0.021757055282592773, 0.021522432327270507, 0.02155404853820801, 0.021626880645751953, 0.021610496520996093, 0.021438207626342774, 0.022194080352783203, 0.0225994873046875, 0.022131488800048827, 0.021884927749633788, 0.021753856658935547, 0.021843967437744142, 0.0216964168548584, 0.021827648162841797, 0.02159823989868164, 0.02159401512145996, 0.021620832443237304, 0.022241247177124022, 0.021644351959228515, 0.021642208099365234, 0.021639167785644533, 0.021691808700561522, 0.021686880111694336, 0.021682111740112305, 0.021628992080688476, 0.021716991424560548, 0.021624383926391603, 0.021641664505004883, 0.021626880645751953, 0.021551103591918946, 0.02168422317504883, 0.021581823348999024, 0.021604352951049805, 0.02173936080932617, 0.021776287078857422, 0.02184217643737793, 0.02165555191040039, 0.02170204734802246, 0.02157423973083496, 0.021626207351684572, 0.02154870414733887, 0.021568511962890623, 0.021659423828125, 0.021610719680786133, 0.021611839294433593, 0.02171139144897461, 0.021501312255859373, 0.021564191818237304, 0.0215285758972168, 0.021692256927490234, 0.02150726318359375, 0.021469152450561524, 0.02146406364440918, 0.021501951217651367, 0.02143436813354492, 0.021581375122070312, 0.02148192024230957, 0.021612543106079102, 0.021710847854614256, 0.02150739288330078, 0.02161734390258789, 0.02158527946472168, 0.021713535308837892, 0.021749759674072267, 0.02162483215332031, 0.021700607299804688, 0.021559072494506837, 0.021582048416137697, 0.021544832229614258, 0.021548959732055666, 0.021557952880859373, 0.021485567092895508, 0.02146713638305664, 0.021436416625976562, 0.021534719467163087, 0.021704095840454102, 0.02158857536315918, 0.02154607963562012, 0.021686336517333985, 0.021541248321533202, 0.021566944122314455, 0.021633056640625, 0.021607391357421873, 0.021712896347045898, 0.021696512222290038, 0.021909503936767577, 0.021944320678710938, 0.021784576416015625, 0.021575679779052736, 0.021741216659545898, 0.021770143508911134, 0.021823936462402344, 0.021765439987182618, 0.021975744247436525, 0.02289459228515625, 0.021933792114257812, 0.02174390411376953, 0.021725183486938478, 0.02164691162109375, 0.021549280166625977, 0.021602527618408203, 0.021700607299804688, 0.02163711929321289, 0.0216428165435791, 0.021627328872680665, 0.02149171257019043, 0.02168614387512207, 0.022005887985229493, 0.021809152603149414, 0.021706592559814452, 0.02205411148071289, 0.021908000946044923, 0.021878528594970702, 0.02222127914428711, 0.02176223945617676, 0.02162892723083496, 0.02231091117858887, 0.021575679779052736, 0.02165350341796875, 0.021585727691650392, 0.021620832443237304, 0.02174332809448242, 0.021675775527954102, 0.021635295867919922, 0.021615007400512695, 0.021583871841430666, 0.02165555191040039, 0.021710048675537108, 0.021596960067749024, 0.02169241523742676, 0.02161664009094238, 0.021493152618408205, 0.021549087524414062, 0.021717567443847657, 0.021718687057495117, 0.02164361572265625, 0.021573631286621094, 0.02162073516845703, 0.021811199188232423, 0.021648639678955077, 0.021577535629272462, 0.021549375534057617, 0.02212518310546875, 0.02353561592102051, 0.02466409683227539, 0.02191049575805664, 0.021789695739746092, 0.02172313690185547, 0.02169241523742676, 0.02162249565124512, 0.021635360717773437, 0.021610496520996093, 0.021612255096435547, 0.021633312225341796, 0.02167919921875, 0.021742496490478515, 0.02196272087097168, 0.021810239791870117, 0.021774335861206053, 0.02166032028198242, 0.021704095840454102, 0.022538751602172852, 0.021623199462890624, 0.021626207351684572, 0.02168899154663086, 0.021655296325683592, 0.02159846305847168, 0.02170675277709961, 0.021796415328979492, 0.02170719909667969, 0.021782112121582032, 0.021720607757568358, 0.021772960662841796, 0.02163929557800293, 0.02169660758972168, 0.021683807373046874, 0.021601760864257812, 0.02158892822265625, 0.02179452705383301, 0.02163711929321289, 0.021769760131835937, 0.021815935134887696, 0.02166489601135254, 0.02163609504699707, 0.021585920333862304, 0.02158585548400879, 0.02150579261779785, 0.022261280059814453, 0.02171539115905762, 0.02164975929260254, 0.021595199584960936, 0.02153772735595703, 0.021550687789916992, 0.021492128372192384, 0.021577375411987305, 0.02166204833984375, 0.021601343154907228, 0.021550016403198244, 0.02151628875732422, 0.021421920776367186, 0.021463199615478514, 0.021876735687255858, 0.021763999938964843, 0.021590015411376954, 0.02153071975708008, 0.021499807357788087, 0.02141391944885254, 0.021624895095825197, 0.021518335342407227, 0.021489664077758788, 0.02151206398010254, 0.021620223999023438, 0.021645952224731445, 0.021587968826293946, 0.02150339126586914, 0.021504608154296875, 0.021523744583129882, 0.021641952514648437, 0.021432319641113282, 0.021526527404785157, 0.021501951217651367, 0.021583871841430666, 0.021398591995239257, 0.021679040908813476, 0.021553152084350585, 0.021639167785644533, 0.021557279586791992, 0.021595680236816406, 0.021580223083496095, 0.021537952423095703, 0.02156220817565918, 0.021514240264892577, 0.021553152084350585, 0.021557247161865235, 0.022237056732177733, 0.021731456756591796, 0.021622783660888673, 0.021608448028564452, 0.0216711368560791, 0.021541568756103517, 0.021592159271240235, 0.021511520385742187, 0.021653823852539063, 0.021780511856079102, 0.022018112182617188, 0.021967103958129883, 0.021822879791259766, 0.021795103073120117, 0.021954879760742188, 0.022211904525756835, 0.02209452819824219, 0.02209548759460449, 0.0217706241607666, 0.02166783905029297, 0.022517759323120116, 0.021635072708129883, 0.02153379249572754, 0.021573631286621094, 0.021536767959594725, 0.021499519348144532, 0.021560800552368163, 0.021463968276977538, 0.021384416580200197, 0.02148428726196289, 0.021468832015991212, 0.021451135635375977, 0.02143846321105957, 0.02172313690185547, 0.021476608276367187, 0.021424831390380858, 0.021547071456909178, 0.021549055099487305, 0.02149718475341797, 0.021445280075073243, 0.021526527404785157, 0.02189107131958008, 0.021626880645751953, 0.021762048721313477, 0.021783647537231447, 0.02165225601196289, 0.02162601661682129, 0.021560287475585936, 0.0216494083404541, 0.021489664077758788, 0.021536767959594725, 0.021962751388549806, 0.024057855606079103, 0.0224768009185791, 0.021733375549316408, 0.021598207473754884, 0.021571584701538086, 0.02152390480041504, 0.021578304290771483, 0.02162220764160156, 0.02165203285217285, 0.02184806442260742, 0.021630592346191406, 0.021665567398071288, 0.02158038330078125, 0.02150399971008301, 0.02147871971130371, 0.02156819152832031, 0.02155513572692871, 0.02154911994934082, 0.021522432327270507, 0.02149760055541992, 0.02145510482788086, 0.021598207473754884, 0.021534719467163087, 0.02151603126525879, 0.021567487716674806, 0.021542303085327147, 0.02155561637878418, 0.02153900718688965, 0.021512447357177736, 0.021571584701538086, 0.021591360092163087, 0.021441215515136718, 0.02150921630859375, 0.021498783111572266, 0.021929632186889647, 0.02171939277648926, 0.021763328552246095, 0.0217259521484375, 0.021953792572021485, 0.02147929573059082, 0.021541759490966796, 0.021551103591918946, 0.021526527404785157, 0.021374975204467773, 0.02146099281311035, 0.021374975204467773, 0.021419776916503906, 0.021548448562622072, 0.021556032180786132, 0.021683584213256835, 0.02166032028198242, 0.02181331253051758, 0.02183366394042969, 0.02169980812072754, 0.021728031158447264, 0.021747200012207032, 0.021705055236816408, 0.021678239822387695, 0.02154431915283203, 0.02160908889770508, 0.02173689651489258, 0.02161827278137207, 0.02154582405090332, 0.02155353546142578, 0.021599136352539062, 0.02171126365661621, 0.021615039825439452, 0.021596160888671875, 0.0216180477142334, 0.021617279052734376, 0.021743392944335936, 0.021747392654418947, 0.021584415435791017, 0.02158380889892578, 0.02158188819885254, 0.02155104064941406, 0.021654815673828126, 0.02160892868041992, 0.021550912857055664, 0.02182102394104004, 0.021582752227783202, 0.021591039657592775, 0.02165043258666992, 0.02167398452758789, 0.021679231643676758, 0.021611391067504884, 0.021606399536132814, 0.021494943618774413, 0.02192860794067383, 0.021926080703735352, 0.021569536209106444, 0.021567487716674806, 0.021599615097045898, 0.021512832641601563, 0.02157904052734375, 0.02157209587097168, 0.02161257553100586, 0.021881088256835938, 0.021916959762573244, 0.021592256546020507, 0.022134880065917968, 0.022038976669311525, 0.022003551483154297, 0.022106271743774414, 0.0221265926361084, 0.021839872360229492, 0.021589279174804688, 0.021525215148925782, 0.021585119247436522, 0.021750015258789064, 0.02154960060119629, 0.021606399536132814, 0.021555200576782226, 0.021518335342407227, 0.021599584579467774, 0.021777055740356446, 0.02166169548034668, 0.021575679779052736, 0.021611520767211914, 0.02167830467224121, 0.021715744018554688, 0.021514240264892577, 0.023025375366210937, 0.022531967163085937, 0.021700927734375, 0.02165894317626953, 0.022059423446655273, 0.02187241554260254, 0.021807712554931642, 0.021718080520629884, 0.021750207901000976, 0.021633535385131835, 0.02161664009094238, 0.021710687637329102, 0.021766271591186524, 0.021634368896484374, 0.02167190361022949, 0.02152524757385254, 0.021559295654296876, 0.021496864318847658, 0.021611488342285157, 0.02167398452758789, 0.021618112564086914, 0.02188857650756836, 0.021840576171875, 0.021827903747558594, 0.021725183486938478, 0.021708480834960936, 0.02167430305480957, 0.021792064666748046, 0.02170719909667969, 0.021671199798583986, 0.021584863662719726, 0.021700607299804688, 0.021579776763916016, 0.021616575241088867, 0.021536512374877928, 0.021558719635009764, 0.02156563186645508, 0.021635776519775392]",tokens/s,46.101977329696716,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,813.334528,561.905664,0.0,159.383552,143.673856,s,1,7.50515478515625,7.50515478515625,0.0,7.50515478515625,7.50515478515625,7.50515478515625,7.50515478515625,[7.50515478515625],,kWh,1.1158478420854105e-05,1.2228986377304952e-06,3.6277806799989154e-06,1.6009157738583516e-05,,MB,1304.133632,616.431616,0.0,199.22944,186.684928,s,31,0.20093119907379153,0.006481651583025533,0.00010389777823592195,0.006445312023162842,0.006552927970886231,0.006615776062011719,0.006871209526062011,"[0.006973631858825683, 0.006423295974731446, 0.006409503936767578, 0.006398623943328858, 0.006476160049438477, 0.006443071842193604, 0.006445312023162842, 0.0064759359359741215, 0.006441184043884277, 0.006486112117767334, 0.006502399921417237, 0.0064174079895019534, 0.006491487979888916, 0.0064382081031799316, 0.006444416046142578, 0.006467167854309082, 0.006540671825408935, 0.006471487998962402, 0.006430975914001465, 0.006425663948059082, 0.006440703868865967, 0.006429535865783691, 0.0064618239402770995, 0.006448416233062744, 0.006552927970886231, 0.006467072010040284, 0.006432608127593994, 0.006438111782073975, 0.00659932804107666, 0.006632224082946777, 0.006425727844238281]",tokens/s,39496.106311919844,kWh,1.905631963776542e-07,2.10157273710509e-08,8.305072323708438e-08,2.9462964698578946e-07,tokens/kWh,868887440.9585379,MB,1317.588992,620.62592,0.0,203.423744,186.687488,s,31,10.048262298583984,0.3241374935027092,0.0024631135803904456,0.32332138061523436,0.32610025024414063,0.32665461730957035,0.33285871582031246,"[0.33548345947265623, 0.3229282531738281, 0.3227763671875, 0.32332138061523436, 0.3248694152832031, 0.3230039367675781, 0.32282720947265625, 0.32302767944335936, 0.32409942626953125, 0.3231819763183594, 0.3229313659667969, 0.3230043029785156, 0.3240220031738281, 0.3265749206542969, 0.3229845581054688, 0.3231822509765625, 0.32673431396484376, 0.3237119140625, 0.32159268188476564, 0.32234414672851563, 0.3259073486328125, 0.32258786010742185, 0.32466278076171873, 0.32340029907226564, 0.323005126953125, 0.32610025024414063, 0.3218153076171875, 0.323631103515625, 0.3234344787597656, 0.325078369140625, 0.3260378112792969]",tokens/s,194.3619644836719,kWh,9.273638420557685e-06,1.0227241197393212e-06,3.365724437214853e-06,1.3662086977511862e-05,tokens/kWh,4611301.340981036,,s,1953,10.033810359954812,0.005137639713238522,0.00013653811097587233,0.005112095832824707,0.005185254287719727,0.005239487934112549,0.005743982048034668,"[0.005298208236694336, 0.005466527938842773, 0.005439487934112549, 0.005302048206329345, 0.0052820158004760745, 0.00522649621963501, 0.005246848106384277, 0.0053208317756652836, 0.005187583923339844, 0.0051998720169067385, 0.005222239971160889, 0.005154975891113281, 0.005158912181854248, 0.00514899206161499, 0.005467840194702148, 0.0051383042335510255, 0.005202047824859619, 0.005142528057098389, 0.005130591869354248, 0.005101215839385986, 0.005177120208740235, 0.005129792213439941, 0.005271903991699219, 0.00520630407333374, 0.005162432193756104, 0.005194272041320801, 0.005191743850708008, 0.005169151782989502, 0.005134335994720459, 0.005189631938934326, 0.005213247776031494, 0.006446176052093506, 0.006696864128112793, 0.006758336067199707, 0.0067338237762451176, 0.006879327774047851, 0.005666719913482666, 0.005160192012786865, 0.00514739179611206, 0.0051439681053161625, 0.00518614387512207, 0.005152768135070801, 0.005144576072692871, 0.005179711818695068, 0.005193120002746582, 0.00514025592803955, 0.00513043212890625, 0.005146944046020508, 0.005152512073516846, 0.0051274561882019044, 0.005151711940765381, 0.005197824001312256, 0.005197824001312256, 0.005183487892150879, 0.005152768135070801, 0.005137792110443115, 0.005122687816619873, 0.005136032104492188, 0.005122399806976318, 0.0051036162376403805, 0.005215231895446777, 0.005118144035339355, 0.005124927997589112, 0.005079040050506592, 0.0050954241752624516, 0.005138336181640625, 0.005122144222259522, 0.0050954241752624516, 0.0051151041984558105, 0.005133088111877441, 0.005122047901153564, 0.005095615863800049, 0.005111616134643554, 0.005122047901153564, 0.005109151840209961, 0.005108320236206055, 0.005130239963531494, 0.005134335994720459, 0.005107711791992187, 0.005121056079864502, 0.005158175945281982, 0.005120704174041748, 0.005129216194152832, 0.005119999885559082, 0.005107711791992187, 0.005114175796508789, 0.005107391834259034, 0.005164447784423828, 0.005108320236206055, 0.005119999885559082, 0.005144576072692871, 0.005125408172607422, 0.005122719764709472, 0.0051131839752197265, 0.005133024215698242, 0.00511081600189209, 0.005100512027740479, 0.005117951869964599, 0.0051027522087097165, 0.005116767883300781, 0.0050924158096313475, 0.005141600131988526, 0.005129983901977539, 0.005095488071441651, 0.005095456123352051, 0.005122047901153564, 0.005134335994720459, 0.005085279941558838, 0.005124000072479248, 0.005091328144073487, 0.005107711791992187, 0.005253215789794922, 0.005144480228424072, 0.005115647792816162, 0.005107711791992187, 0.005175295829772949, 0.005103968143463135, 0.00509494400024414, 0.0050804481506347655, 0.005114880084991455, 0.005089280128479004, 0.005164383888244629, 0.005124000072479248, 0.005121088027954102, 0.005098336219787598, 0.005092192173004151, 0.005026336193084717, 0.0051294717788696285, 0.005112383842468262, 0.005079071998596191, 0.005079008102416992, 0.005101568222045898, 0.005106847763061523, 0.005110591888427734, 0.005128223896026612, 0.005141727924346924, 0.005100575923919678, 0.005088128089904785, 0.0051289920806884765, 0.0051327037811279294, 0.005096831798553467, 0.005125631809234619, 0.005138527870178222, 0.005092063903808594, 0.005085279941558838, 0.0051233282089233395, 0.005126368045806884, 0.005079040050506592, 0.005095871925354004, 0.005134335994720459, 0.005123616218566895, 0.005085728168487549, 0.005136672019958496, 0.005095071792602539, 0.0050769920349121095, 0.005089280128479004, 0.005138527870178222, 0.005103583812713623, 0.005154848098754883, 0.005154719829559326, 0.0050974078178405765, 0.005074687957763672, 0.005077407836914062, 0.005173344135284424, 0.0051253437995910645, 0.0051775679588317875, 0.005109119892120362, 0.005126751899719238, 0.00512656021118164, 0.005113823890686035, 0.005117536067962647, 0.005122560024261475, 0.005107679843902588, 0.00530838394165039, 0.005145792007446289, 0.005094431877136231, 0.005100607872009277, 0.0051145920753479, 0.005111104011535645, 0.005113952159881592, 0.005075808048248291, 0.005115231990814209, 0.005101280212402344, 0.005095647811889648, 0.0051331200599670414, 0.0051216320991516115, 0.005091392040252686, 0.005104928016662597, 0.005157536029815674, 0.005041759967803955, 0.005088831901550293, 0.005219168186187744, 0.005490143775939941, 0.005116064071655274, 0.005228928089141846, 0.005111999988555908, 0.005135744094848633, 0.005120448112487793, 0.00510969591140747, 0.005156544208526612, 0.0051101441383361815, 0.005117951869964599, 0.005180672168731689, 0.005133056163787842, 0.005125887870788574, 0.0051073598861694335, 0.005116511821746826, 0.005145919799804687, 0.0051101441383361815, 0.005085504055023194, 0.0051036162376403805, 0.0051199040412902835, 0.0052139201164245605, 0.0051569280624389644, 0.00512556791305542, 0.005122047901153564, 0.0050819840431213376, 0.005112959861755371, 0.00507539176940918, 0.0050631041526794434, 0.005096896171569824, 0.005108287811279297, 0.005183328151702881, 0.0050850238800048825, 0.005117792129516602, 0.005079520225524902, 0.0050871682167053225, 0.005095488071441651, 0.0051066880226135255, 0.0050835199356079105, 0.005073535919189453, 0.005148191928863525, 0.005109983921051026, 0.005100927829742432, 0.005073791980743409, 0.005107264041900635, 0.005132544040679932, 0.005089471817016602, 0.005093152046203613, 0.005101568222045898, 0.005107840061187744, 0.00518287992477417, 0.0051422080993652345, 0.00512662410736084, 0.0051099519729614256, 0.0051019201278686525, 0.005150720119476319, 0.005123551845550537, 0.005083775997161865, 0.005136288166046142, 0.0051363840103149415, 0.005162720203399658, 0.005079648017883301, 0.006504447937011719, 0.005146624088287354, 0.005119999885559082, 0.005119872093200683, 0.005099936008453369, 0.005150400161743164, 0.005197023868560791, 0.005120639801025391, 0.005089183807373047, 0.005074272155761719, 0.005141791820526123, 0.005123744010925293, 0.005082623958587646, 0.005071104049682617, 0.005135839939117432, 0.005101439952850342, 0.005081600189208985, 0.0050689601898193356, 0.005128575801849365, 0.005105535984039306, 0.005077311992645264, 0.005133376121520996, 0.005767327785491943, 0.005350944042205811, 0.005120031833648682, 0.005129119873046875, 0.005196832180023194, 0.0050843839645385745, 0.005115647792816162, 0.005066751956939697, 0.005086239814758301, 0.0051027522087097165, 0.005096864223480224, 0.005080959796905518, 0.0050795841217041015, 0.005150815963745117, 0.0051158080101013186, 0.00506060791015625, 0.005077280044555664, 0.005078495979309082, 0.00506496000289917, 0.005068863868713379, 0.005107200145721436, 0.005087456226348877, 0.00512559986114502, 0.005077151775360107, 0.00513651180267334, 0.005124576091766357, 0.005111807823181152, 0.005292031764984131, 0.00513647985458374, 0.0051158080101013186, 0.005070400238037109, 0.005151167869567871, 0.005118207931518555, 0.005132031917572021, 0.005230591773986816, 0.005138432025909424, 0.005067999839782715, 0.00506060791015625, 0.005185535907745361, 0.0050982718467712405, 0.005499743938446045, 0.005097248077392578, 0.0051550078392028804, 0.005134367942810058, 0.005105696201324463, 0.005118015766143799, 0.005134528160095215, 0.005129055976867676, 0.005097472190856934, 0.005116799831390381, 0.005092991828918457, 0.005148767948150635, 0.005109119892120362, 0.005102496147155761, 0.005095104217529297, 0.005095232009887696, 0.0051140799522399905, 0.005124063968658448, 0.005089727878570557, 0.005111199855804443, 0.0051179838180541995, 0.005117663860321045, 0.005094399929046631, 0.005107423782348633, 0.0051017279624938966, 0.005124063968658448, 0.005088448047637939, 0.005134528160095215, 0.005095456123352051, 0.005070784091949463, 0.005087776184082031, 0.005146463871002197, 0.005108160018920898, 0.005131840229034424, 0.005114016056060791, 0.005102687835693359, 0.005080031871795654, 0.0050802559852600095, 0.005103936195373535, 0.0050830078125, 0.0050850558280944825, 0.00507155179977417, 0.005124095916748047, 0.005110112190246582, 0.005090976238250732, 0.005099520206451416, 0.005096672058105468, 0.005092127799987793, 0.005130496025085449, 0.00514467191696167, 0.005317503929138183, 0.005100319862365723, 0.005125408172607422, 0.005186272144317627, 0.005075104236602783, 0.005084832191467285, 0.005103936195373535, 0.005100927829742432, 0.005091392040252686, 0.005097919940948486, 0.0051463360786437986, 0.005196063995361328, 0.005113344192504882, 0.005025919914245606, 0.005154816150665284, 0.005101280212402344, 0.005091008186340332, 0.005076863765716553, 0.0051270718574523925, 0.005098495960235596, 0.005370304107666015, 0.0051448321342468266, 0.005249311923980713, 0.00515283203125, 0.005118048191070557, 0.005162047863006592, 0.0051114559173583984, 0.005110432147979736, 0.005128640174865723, 0.005201791763305664, 0.005095776081085205, 0.005115551948547363, 0.005125855922698975, 0.0051039037704467775, 0.005090559959411621, 0.005115839958190918, 0.005100351810455322, 0.005076255798339844, 0.005173151969909668, 0.005114687919616699, 0.005103295803070069, 0.005075263977050781, 0.005173247814178467, 0.005101568222045898, 0.005078112125396729, 0.005077343940734863, 0.0050869441032409665, 0.005079840183258057, 0.005173247814178467, 0.005078623771667481, 0.005113759994506836, 0.00509830379486084, 0.005099264144897461, 0.005131775856018066, 0.0051082239151000975, 0.005088831901550293, 0.005091775894165039, 0.005126368045806884, 0.005103392124176025, 0.005086656093597412, 0.0050748162269592285, 0.005108479976654053, 0.005095071792602539, 0.005093440055847168, 0.005115551948547363, 0.00510646390914917, 0.005142399787902832, 0.005070752143859864, 0.005222400188446045, 0.005103007793426513, 0.005067359924316406, 0.005117663860321045, 0.005112095832824707, 0.005130239963531494, 0.005089280128479004, 0.005107967853546142, 0.005048319816589355, 0.0051016960144042965, 0.005142399787902832, 0.005087232112884522, 0.005127552032470703, 0.0051329278945922855, 0.005074944019317627, 0.005119999885559082, 0.005107679843902588, 0.0050854401588439945, 0.005097248077392578, 0.0051138877868652345, 0.005092512130737305, 0.005102303981781006, 0.005111167907714844, 0.0051975998878479, 0.00510262393951416, 0.005115488052368164, 0.005115744113922119, 0.005112063884735107, 0.005074975967407226, 0.005052608013153076, 0.005132287979125977, 0.005097504138946533, 0.005078239917755127, 0.005107999801635742, 0.005115520000457763, 0.005087296009063721, 0.005250944137573242, 0.005297311782836914, 0.005138175964355469, 0.005158912181854248, 0.005148191928863525, 0.005136864185333252, 0.005099520206451416, 0.005105792045593261, 0.005121920108795166, 0.005105663776397705, 0.005121791839599609, 0.005197984218597412, 0.005119967937469483, 0.005099904060363769, 0.005129536151885986, 0.005121664047241211, 0.005092160224914551, 0.005095232009887696, 0.005103807926177978, 0.005105535984039306, 0.005083263874053955, 0.005079040050506592, 0.005133535861968994, 0.00513097620010376, 0.0052163200378417965, 0.00510975980758667, 0.005214079856872559, 0.0051212477684020995, 0.005106592178344726, 0.005156864166259765, 0.0051363840103149415, 0.005111807823181152, 0.005074944019317627, 0.00510975980758667, 0.005099520206451416, 0.0050032639503479, 0.005066976070404052, 0.00511568021774292, 0.005107711791992187, 0.005082367897033691, 0.005175871849060059, 0.0051055998802185054, 0.005117343902587891, 0.005129055976867676, 0.005126143932342529, 0.005162847995758057, 0.005115488052368164, 0.005097536087036133, 0.005125919818878174, 0.00513478422164917, 0.0051123518943786625, 0.005109504222869873, 0.005154751777648926, 0.005107776165008545, 0.005099071979522705, 0.005132991790771485, 0.005133920192718506, 0.005111968040466309, 0.0051283202171325684, 0.00523360013961792, 0.005116543769836426, 0.005132160186767578, 0.005132319927215576, 0.0051158080101013186, 0.005155327796936035, 0.005128191947937012, 0.005148672103881836, 0.005152768135070801, 0.0051773438453674315, 0.005168863773345947, 0.0051567678451538085, 0.005206399917602539, 0.0056031041145324706, 0.005130464076995849, 0.005111711978912354, 0.0050953922271728515, 0.005140607833862305, 0.005203968048095703, 0.005089280128479004, 0.0051363840103149415, 0.005093376159667969, 0.00513372802734375, 0.005063263893127441, 0.005099520206451416, 0.005187007904052734, 0.005077600002288818, 0.005127999782562256, 0.0054265279769897465, 0.005102399826049805, 0.005109119892120362, 0.005112031936645508, 0.005130464076995849, 0.005093152046203613, 0.005130847930908203, 0.005119135856628418, 0.005098144054412842, 0.005105504035949707, 0.005127808094024658, 0.0050243520736694336, 0.0050802559852600095, 0.005086016178131103, 0.00507913589477539, 0.0052202558517456055, 0.005080863952636718, 0.005087456226348877, 0.005109600067138672, 0.00518943977355957, 0.005564767837524414, 0.005141791820526123, 0.005397215843200683, 0.0051877121925354005, 0.005285759925842285, 0.00512934398651123, 0.005108287811279297, 0.005095615863800049, 0.005163296222686768, 0.005123712062835693, 0.005105887889862061, 0.005120160102844239, 0.0051138558387756345, 0.005092927932739258, 0.005106143951416016, 0.005176191806793213, 0.005101535797119141, 0.005089856147766113, 0.0051838397979736325, 0.005105728149414062, 0.0050969281196594236, 0.005083680152893066, 0.005150144100189209, 0.005075263977050781, 0.005067008018493652, 0.005056320190429687, 0.005085696220397949, 0.0050657281875610355, 0.005075967788696289, 0.0051010560989379885, 0.005102816104888916, 0.005094367980957032, 0.005081088066101074, 0.005107903957366943, 0.005205632209777832, 0.005072415828704834, 0.005058656215667725, 0.0051099519729614256, 0.005120448112487793, 0.005076576232910157, 0.005089632034301758, 0.005129312038421631, 0.0050926079750061035, 0.005098400115966797, 0.005121151924133301, 0.005101151943206787, 0.005091231822967529, 0.005117536067962647, 0.0051164479255676265, 0.005124095916748047, 0.0050852479934692385, 0.005107647895812988, 0.005115903854370117, 0.0050991039276123045, 0.005021376132965088, 0.00507747220993042, 0.0051279358863830565, 0.005106080055236817, 0.005067776203155518, 0.005075488090515137, 0.005102047920227051, 0.0051435518264770505, 0.005090303897857666, 0.005191679954528809, 0.0050910720825195314, 0.005089695930480957, 0.0050657281875610355, 0.005136672019958496, 0.005084928035736084, 0.0051495041847229, 0.005103551864624024, 0.005113696098327637, 0.005175776004791259, 0.005080832004547119, 0.005117343902587891, 0.005097983837127685, 0.005128287792205811, 0.005079071998596191, 0.005131423950195313, 0.005118080139160156, 0.005077119827270508, 0.005136960029602051, 0.00547430419921875, 0.005111807823181152, 0.0051240320205688476, 0.005138495922088623, 0.005111807823181152, 0.005128191947937012, 0.005119999885559082, 0.0051039037704467775, 0.005085951805114746, 0.005079264163970947, 0.00521292781829834, 0.0051561279296875, 0.005114687919616699, 0.005134496212005615, 0.005082848072052002, 0.005058591842651367, 0.005089600086212159, 0.005115231990814209, 0.005089344024658203, 0.005091616153717041, 0.00509878396987915, 0.005135072231292725, 0.005089024066925049, 0.005074687957763672, 0.00507750415802002, 0.005205408096313476, 0.005179808139801025, 0.005091519832611084, 0.00524396800994873, 0.005092095851898193, 0.005116096019744873, 0.005150335788726807, 0.0050917119979858394, 0.005130303859710693, 0.005068128108978272, 0.004984831809997559, 0.005107711791992187, 0.005092959880828857, 0.005065152168273926, 0.005131936073303223, 0.005527872085571289, 0.005101568222045898, 0.0050728960037231445, 0.005119808197021484, 0.005102848052978516, 0.005084095954895019, 0.005140128135681152, 0.00509168004989624, 0.005110976219177246, 0.005094207763671875, 0.005124095916748047, 0.005117951869964599, 0.005107999801635742, 0.005257152080535889, 0.005101408004760742, 0.005074880123138428, 0.0050954241752624516, 0.0051242241859436035, 0.005100512027740479, 0.005087423801422119, 0.005097887992858887, 0.005151040077209472, 0.0051138558387756345, 0.005093696117401123, 0.005111680030822754, 0.005190624237060547, 0.0050793919563293455, 0.0050915517807006835, 0.005112095832824707, 0.005105792045593261, 0.005111680030822754, 0.005122047901153564, 0.005119552135467529, 0.00517574405670166, 0.00509984016418457, 0.0051155838966369625, 0.005111807823181152, 0.00508028793334961, 0.005082047939300537, 0.005125984191894532, 0.005167103767395019, 0.005096799850463867, 0.0050919361114501955, 0.005084959983825683, 0.005081151962280274, 0.005089471817016602, 0.005106847763061523, 0.005104512214660645, 0.005076511859893799, 0.005125664234161377, 0.005326879978179932, 0.005095776081085205, 0.005095839977264404, 0.0051262078285217285, 0.005107808113098145, 0.005146624088287354, 0.005154816150665284, 0.0051528000831604, 0.005125696182250976, 0.005145023822784424, 0.005160863876342773, 0.005212255954742432, 0.005118015766143799, 0.005105728149414062, 0.005134016036987305, 0.005140672206878662, 0.005150815963745117, 0.005112927913665771, 0.005192512035369873, 0.0051356801986694334, 0.0051084160804748535, 0.005146016120910644, 0.005126495838165283, 0.005248288154602051, 0.005264063835144043, 0.005300704002380371, 0.005127168178558349, 0.005124000072479248, 0.0051476478576660155, 0.005141856193542481, 0.00521235179901123, 0.005175680160522461, 0.005146624088287354, 0.005098720073699951, 0.005083936214447022, 0.005144576072692871, 0.005127327919006348, 0.005086239814758301, 0.005234496116638183, 0.005119999885559082, 0.005093023777008056, 0.005105472087860107, 0.005231135845184326, 0.005099711894989013, 0.005080671787261963, 0.005069056034088135, 0.005103456020355225, 0.00511353588104248, 0.005077184200286865, 0.005150752067565918, 0.005183712005615234, 0.005083136081695557, 0.005092480182647705, 0.005098368167877197, 0.005095776081085205, 0.0051075201034545895, 0.00509935998916626, 0.0051263999938964844, 0.00514025592803955, 0.005101535797119141, 0.005128191947937012, 0.005103104114532471, 0.005091584205627441, 0.0050936322212219234, 0.0051233601570129395, 0.00512278413772583, 0.005104832172393799, 0.005118783950805664, 0.005115903854370117, 0.005263040065765381, 0.005119328022003174, 0.005014336109161377, 0.0050991039276123045, 0.005201759815216064, 0.00514243221282959, 0.0051452798843383786, 0.005130655765533447, 0.005125728130340576, 0.005099679946899414, 0.00513369607925415, 0.005184127807617188, 0.005105792045593261, 0.005153823852539063, 0.0051040959358215334, 0.005091328144073487, 0.005108128070831299, 0.005127871990203857, 0.005105535984039306, 0.005190207958221436, 0.005115039825439453, 0.005110176086425782, 0.005081567764282226, 0.005111616134643554, 0.005238976001739502, 0.005117760181427002, 0.006131040096282959, 0.005272575855255127, 0.00517903995513916, 0.005822463989257813, 0.005722432136535644, 0.005141856193542481, 0.005157472133636475, 0.005136127948760986, 0.005117663860321045, 0.00510595178604126, 0.005134367942810058, 0.00512556791305542, 0.005132832050323486, 0.0051437759399414066, 0.005141280174255371, 0.005142528057098389, 0.005087232112884522, 0.005183487892150879, 0.0051036162376403805, 0.005285984039306641, 0.005138336181640625, 0.005130047798156738, 0.005093567848205567, 0.005089087963104248, 0.005206240177154541, 0.0051056318283081055, 0.00505676794052124, 0.005093120098114014, 0.005127871990203857, 0.005087456226348877, 0.005100895881652832, 0.005386047840118409, 0.005132959842681885, 0.005140768051147461, 0.005148672103881836, 0.005186975955963135, 0.005200831890106201, 0.005135615825653076, 0.005177248001098633, 0.005109439849853516, 0.005128479957580566, 0.0051082239151000975, 0.005124415874481201, 0.005109439849853516, 0.005098976135253906, 0.005080992221832275, 0.005124735832214356, 0.0051138558387756345, 0.005113088130950928, 0.00513699197769165, 0.005158239841461181, 0.005155263900756836, 0.005140768051147461, 0.0051448001861572265, 0.005132031917572021, 0.005116096019744873, 0.005128384113311768, 0.005134143829345703, 0.005122335910797119, 0.005115007877349854, 0.005130784034729004, 0.005115903854370117, 0.005078815937042237, 0.005112224102020264, 0.005124256134033203, 0.00510643196105957, 0.005080992221832275, 0.005108736038208008, 0.005134335994720459, 0.0051036162376403805, 0.00509935998916626, 0.005120160102844239, 0.005111743927001953, 0.005105728149414062, 0.005129407882690429, 0.005111775875091553, 0.00512886381149292, 0.005087584018707275, 0.005193920135498047, 0.005107647895812988, 0.00510537576675415, 0.005100575923919678, 0.005116928100585938, 0.005103583812713623, 0.005087007999420166, 0.005193920135498047, 0.005141952037811279, 0.005100255966186524, 0.005107583999633789, 0.005096799850463867, 0.00508790397644043, 0.005083104133605957, 0.005107840061187744, 0.005105120182037353, 0.005092959880828857, 0.005337952136993408, 0.0051530561447143556, 0.005102367877960205, 0.005098720073699951, 0.005125984191894532, 0.0051066560745239254, 0.00508406400680542, 0.005005311965942383, 0.00506879997253418, 0.0051298561096191405, 0.005095967769622803, 0.005092544078826904, 0.0050878400802612305, 0.005107647895812988, 0.005088543891906739, 0.005090112209320068, 0.005146912097930908, 0.0052221441268920895, 0.005119455814361572, 0.005144576072692871, 0.0051290240287780765, 0.005140192031860351, 0.005111199855804443, 0.005132895946502685, 0.00510975980758667, 0.005090559959411621, 0.005366144180297852, 0.005126719951629639, 0.005271359920501709, 0.0051133761405944824, 0.005145055770874023, 0.00511356782913208, 0.005124383926391601, 0.005087232112884522, 0.005107711791992187, 0.005091328144073487, 0.005093728065490723, 0.00524563217163086, 0.0051988158226013185, 0.005085375785827637, 0.005095232009887696, 0.005093376159667969, 0.005173247814178467, 0.0051131839752197265, 0.005118624210357666, 0.005105663776397705, 0.0050821762084960935, 0.0050694079399108884, 0.005091455936431885, 0.005081471920013428, 0.0050768318176269535, 0.005087520122528076, 0.005080800056457519, 0.005085184097290039, 0.0050702719688415524, 0.00509555196762085, 0.005198272228240966, 0.0050720000267028805, 0.005093567848205567, 0.005101984024047852, 0.005087520122528076, 0.005081088066101074, 0.005107135772705078, 0.005108160018920898, 0.005089536190032959, 0.005555488109588623, 0.005112192153930664, 0.005097856044769287, 0.005096479892730713, 0.005090367794036865, 0.0050618557929992675, 0.005105728149414062, 0.005079904079437256, 0.005074399948120117, 0.0051164479255676265, 0.005128255844116211, 0.005116896152496338, 0.005106751918792725, 0.005123936176300049, 0.005130303859710693, 0.0050908799171447755, 0.005153120040893555, 0.00510697603225708, 0.005100128173828125, 0.005087456226348877, 0.005115551948547363, 0.005110079765319824, 0.005134592056274414, 0.005123263835906983, 0.005169760227203369, 0.005090943813323975, 0.005095808029174804, 0.005106944084167481, 0.005112576007843018, 0.005227744102478027, 0.005525599956512451, 0.0056072001457214355, 0.005442080020904541, 0.005136767864227295, 0.0051363840103149415, 0.0051171197891235354, 0.006521664142608643, 0.005437439918518067, 0.0052013759613037105, 0.0051454720497131345, 0.005136159896850586, 0.005119872093200683, 0.005175295829772949, 0.005130239963531494, 0.005108767986297607, 0.0051578559875488286, 0.005144927978515625, 0.005141248226165771, 0.005169216156005859, 0.005196640014648438, 0.005189631938934326, 0.005165056228637695, 0.005133823871612549, 0.005165567874908447, 0.005082848072052002, 0.005126016139984131, 0.005124671936035156, 0.005117792129516602, 0.0050728960037231445, 0.005059904098510743, 0.0051975998878479, 0.005143392086029053, 0.005325088024139404, 0.005142303943634033, 0.00520195198059082, 0.005097439765930176, 0.005214303970336914, 0.005097184181213379, 0.006296576023101807, 0.0051271038055419925, 0.00511187219619751, 0.005150527954101563, 0.005099711894989013, 0.005146111965179443, 0.005127808094024658, 0.0053146882057189945, 0.0050965437889099125, 0.0051380801200866695, 0.005117023944854736, 0.005158944129943848, 0.005147359848022461, 0.005090816020965576, 0.005120160102844239, 0.005117919921875, 0.0051205439567565915, 0.005093599796295166, 0.005091104030609131, 0.005130239963531494, 0.005087584018707275, 0.005082784175872802, 0.005074944019317627, 0.0050926079750061035, 0.005095808029174804, 0.005079679965972901, 0.00510646390914917, 0.005114848136901855, 0.005117951869964599, 0.005081151962280274, 0.00510969591140747, 0.005127776145935059, 0.005112224102020264, 0.005105088233947754, 0.0051262078285217285, 0.005085504055023194, 0.0050804481506347655, 0.0050797438621521, 0.005088768005371094, 0.005087967872619629, 0.005215839862823486, 0.005122399806976318, 0.005113823890686035, 0.005075200080871582, 0.0050720000267028805, 0.005113952159881592, 0.005074687957763672, 0.005071648120880127, 0.00508348798751831, 0.005112864017486572, 0.005186495780944825, 0.005091008186340332, 0.005119999885559082, 0.005099167823791504, 0.005085599899291992, 0.005081024169921875, 0.0051212158203125, 0.005112703800201416, 0.005160704135894776, 0.005126272201538086, 0.005124576091766357, 0.00508681583404541, 0.005074304103851318, 0.0049909758567810054, 0.005093376159667969, 0.005091584205627441, 0.005115647792816162, 0.0050954241752624516, 0.005105120182037353, 0.005087776184082031, 0.0050852160453796385, 0.005107679843902588, 0.005107711791992187, 0.005066783905029297, 0.0051056318283081055, 0.005170976161956787, 0.005117152214050293, 0.005078015804290771, 0.005087391853332519, 0.005123136043548584, 0.005102367877960205, 0.005101280212402344, 0.005152031898498535, 0.005108736038208008, 0.005093440055847168, 0.00506873607635498, 0.005119999885559082, 0.005123680114746094, 0.005097887992858887, 0.005106719970703125, 0.005102911949157715, 0.00508073616027832, 0.0050746240615844726, 0.0051010241508483885, 0.005098336219787598, 0.0050728960037231445, 0.0050787520408630375, 0.005123904228210449, 0.005075712203979493, 0.005117663860321045, 0.005101568222045898, 0.005177631855010987, 0.005095136165618896, 0.00508512020111084, 0.005094560146331787, 0.005099552154541016, 0.005077439785003662, 0.0050668478012084964, 0.005088640213012695, 0.0050797438621521, 0.005072415828704834, 0.005095359802246094, 0.005100607872009277, 0.0050722241401672365, 0.005077311992645264, 0.005101664066314697, 0.005102911949157715, 0.0050858879089355465, 0.005082304000854492, 0.005126976013183594, 0.005110015869140625, 0.005080832004547119, 0.005132287979125977, 0.005092768192291259, 0.005110367774963379, 0.005076064109802246, 0.00499507188796997, 0.005081088066101074, 0.005096960067749024, 0.00506873607635498, 0.005058495998382568, 0.005093760013580322, 0.0050936322212219234, 0.005074463844299317, 0.005099008083343506, 0.005094367980957032, 0.005087232112884522, 0.0050841598510742185, 0.005135072231292725, 0.0051140480041503905, 0.005081056118011474, 0.005077119827270508, 0.005361408233642578, 0.005109824180603027, 0.0050787520408630375, 0.005105728149414062, 0.0051448321342468266, 0.005109024047851562, 0.005215136051177979, 0.005156479835510254, 0.0051019201278686525, 0.0051075520515441895, 0.0051140480041503905, 0.005221888065338135, 0.005132351875305176, 0.005110335826873779, 0.005134175777435303, 0.005112895965576172, 0.005125184059143067, 0.005137504100799561, 0.0051147518157958986, 0.005081056118011474, 0.005084703922271729, 0.005117599964141846, 0.005108511924743652, 0.00508681583404541, 0.005132607936859131, 0.0051018881797790525, 0.005117311954498291, 0.005087615966796875, 0.005086688041687012, 0.0051017279624938966, 0.005066880226135254, 0.0051857919692993165, 0.005097184181213379, 0.005148416042327881, 0.005073440074920654, 0.005099520206451416, 0.005089280128479004, 0.0050637760162353515, 0.005065631866455078, 0.005126143932342529, 0.005077055931091309, 0.005074079990386963, 0.005100319862365723, 0.005116064071655274, 0.005094687938690185, 0.005177919864654541, 0.00509939193725586, 0.005114655971527099, 0.0050728960037231445, 0.0050862717628479, 0.005089407920837403, 0.005089503765106201, 0.005072351932525635, 0.00506774377822876, 0.005060544013977051, 0.005119999885559082, 0.005074079990386963, 0.0050973758697509764, 0.005087584018707275, 0.00508896017074585, 0.005150847911834717, 0.005108736038208008, 0.005109312057495118, 0.0056360640525817875, 0.006101119995117188, 0.006256768226623535, 0.005269504070281982, 0.0051380801200866695, 0.005118303775787354, 0.005106847763061523, 0.0051199040412902835, 0.005143487930297851, 0.005081088066101074, 0.005083136081695557, 0.005130496025085449, 0.0051179838180541995, 0.0050869441032409665, 0.005416959762573242, 0.005142528057098389, 0.005117152214050293, 0.005114655971527099, 0.0051773438453674315, 0.005091328144073487, 0.005126143932342529, 0.00513753604888916, 0.005124991893768311, 0.00510697603225708, 0.005106592178344726, 0.0051133761405944824, 0.005122367858886719, 0.005227871894836426, 0.0051019201278686525, 0.005100959777832031, 0.0051147518157958986, 0.005113503932952881, 0.005116576194763183, 0.005100607872009277, 0.005108128070831299, 0.00512175989151001, 0.005097663879394531, 0.005240255832672119, 0.005102591991424561, 0.005123295783996582, 0.005118752002716064, 0.005091519832611084, 0.005136064052581787, 0.005127871990203857, 0.005259359836578369, 0.005120223999023438, 0.0051363840103149415, 0.005028128147125244, 0.005117184162139893, 0.005091263771057129, 0.005074048042297363, 0.005119103908538819, 0.0050878081321716305, 0.005091584205627441, 0.005111551761627197, 0.005097472190856934, 0.005068384170532227, 0.005073023796081543, 0.005115392208099365, 0.005094175815582276, 0.0051068801879882815, 0.005101823806762695, 0.005112576007843018, 0.005091135978698731, 0.00509555196762085, 0.005111680030822754, 0.0051036162376403805, 0.005087232112884522, 0.00506879997253418, 0.005128191947937012, 0.005105311870574951, 0.005076416015625, 0.00511683177947998, 0.005107776165008545, 0.0051075520515441895, 0.005105088233947754, 0.005151391983032227, 0.005173471927642823, 0.005103392124176025, 0.005097311973571777, 0.005120160102844239, 0.0050769920349121095, 0.005101247787475586, 0.005126463890075684, 0.005097536087036133, 0.005111072063446045, 0.005083775997161865, 0.005101600170135498, 0.005095071792602539, 0.0051981439590454105, 0.00516099214553833, 0.005105088233947754, 0.005084735870361328, 0.005098495960235596, 0.005691135883331299, 0.005228799819946289, 0.005082943916320801, 0.005101183891296387, 0.005090943813323975, 0.005086143970489502, 0.005079040050506592, 0.005093183994293213, 0.005089471817016602, 0.005079040050506592, 0.00510975980758667, 0.00508460807800293, 0.005105663776397705, 0.005084864139556885, 0.005140927791595459, 0.005104063987731933, 0.0050274238586425785, 0.005077216148376465, 0.005101664066314697, 0.005126336097717285, 0.00507689619064331, 0.005054463863372802, 0.005104928016662597, 0.005076960086822509, 0.0050731520652771, 0.00506112003326416, 0.005107711791992187, 0.005101568222045898, 0.00511084794998169, 0.005214303970336914, 0.0051147198677062985, 0.00509552001953125, 0.005101503849029541, 0.00511740779876709, 0.005888512134552002, 0.005181439876556396, 0.005384191989898681, 0.005389920234680176, 0.00558128023147583, 0.005196000099182129, 0.005133056163787842, 0.005120255947113037, 0.005143263816833496, 0.0051448001861572265, 0.005121823787689209, 0.005128223896026612, 0.005152736186981201, 0.005201920032501221, 0.005119999885559082, 0.005129824161529541, 0.0051183681488037105, 0.005167103767395019, 0.005113344192504882, 0.005099936008453369, 0.0051200962066650394, 0.005089056015014648, 0.005119391918182373, 0.00509830379486084, 0.005099743843078613, 0.005090144157409668, 0.005096384048461914, 0.005090847969055175, 0.005084799766540527, 0.0051018881797790525, 0.005139008045196534, 0.005113152027130127, 0.005075456142425537, 0.005114016056060791, 0.00517900800704956, 0.005119840145111084, 0.005100063800811768, 0.005160607814788818, 0.0051528639793396, 0.005093152046203613, 0.005136960029602051, 0.005136000156402588, 0.00510595178604126, 0.005087232112884522, 0.005224448204040527, 0.005092959880828857, 0.0052239041328430175, 0.0051311998367309574, 0.005145919799804687, 0.005176000118255615, 0.0051036162376403805, 0.005107295989990234, 0.005107679843902588, 0.0051224961280822755, 0.0050769920349121095, 0.005105663776397705, 0.005189727783203125, 0.005103487968444825, 0.005107232093811035, 0.005124000072479248, 0.005149280071258545, 0.00510745620727539, 0.005166336059570313, 0.005129216194152832, 0.005139904022216797, 0.005126719951629639, 0.005169151782989502, 0.0051404800415039064, 0.005122047901153564, 0.0051775360107421875, 0.005152575969696045, 0.005119647979736328, 0.005085536003112793, 0.005132127761840821, 0.005098944187164307, 0.005106400012969971, 0.005138656139373779, 0.005121312141418457, 0.005077824115753173, 0.00508896017074585, 0.005127615928649902, 0.005093535900115967, 0.00511843204498291, 0.005094848155975342, 0.005138207912445068, 0.0051140480041503905, 0.005248799800872803, 0.005122975826263428, 0.00511078405380249, 0.0050794239044189456, 0.005195648193359375, 0.005083744049072266, 0.005098944187164307, 0.005087423801422119, 0.005117311954498291, 0.005107872009277344, 0.005092192173004151, 0.005097472190856934, 0.005091328144073487, 0.005117023944854736, 0.005238880157470703, 0.005253695964813233, 0.005110015869140625, 0.00510745620727539, 0.0050854401588439945, 0.005107872009277344, 0.005128096103668213, 0.005095359802246094, 0.005056672096252442, 0.005111072063446045, 0.005110559940338134, 0.005111423969268799, 0.005071296215057373, 0.005119872093200683, 0.005090976238250732, 0.005129568099975586, 0.0050917119979858394, 0.005126527786254883, 0.005093376159667969, 0.005123648166656494, 0.005112512111663818, 0.005123551845550537, 0.005073440074920654, 0.005086656093597412, 0.005126719951629639, 0.005086400032043457, 0.005103744029998779, 0.00507155179977417, 0.005121088027954102, 0.005103775978088379, 0.005086336135864258, 0.0051623997688293455, 0.005113952159881592, 0.005068480014801025, 0.00506928014755249, 0.005117951869964599, 0.005105247974395752, 0.0051036162376403805, 0.005127871990203857, 0.0051758079528808594, 0.005095776081085205, 0.005115424156188965, 0.0051244478225708, 0.005130464076995849, 0.005119775772094727, 0.005137887954711914, 0.005181983947753906, 0.005119999885559082, 0.005156864166259765, 0.005158912181854248, 0.005144576072692871, 0.005111423969268799, 0.005184000015258789, 0.005129983901977539, 0.005275712013244629, 0.005142816066741943, 0.005132031917572021, 0.005152512073516846, 0.005124095916748047, 0.005120639801025391, 0.00513593578338623, 0.005086463928222656, 0.005110367774963379, 0.005126336097717285, 0.005124351978302002, 0.00510041618347168, 0.005114816188812256, 0.00517142391204834, 0.005095263957977295, 0.005114975929260254, 0.005129216194152832, 0.005048351764678955, 0.005126368045806884, 0.00509503984451294, 0.005111936092376709, 0.0051281599998474125, 0.005253471851348877, 0.005101471900939941, 0.005117184162139893, 0.005095359802246094, 0.005067615985870361, 0.005091040134429931, 0.0050928001403808595, 0.005094240188598633, 0.005054399967193603, 0.005119135856628418, 0.00511030387878418, 0.005125760078430176, 0.005100224018096924, 0.005119584083557129, 0.006115263938903809, 0.006099423885345459, 0.006352447986602784, 0.005190080165863037, 0.00514409589767456, 0.005121952056884766, 0.005101664066314697, 0.00516483211517334, 0.005126336097717285, 0.005249375820159912, 0.005130239963531494, 0.00512172794342041, 0.00510748815536499, 0.0051157760620117185, 0.0051495680809020995, 0.005199808120727539, 0.005123551845550537, 0.005134592056274414, 0.005115647792816162, 0.0050795841217041015, 0.005101568222045898, 0.005128191947937012, 0.0051138558387756345, 0.005087071895599365, 0.0050791997909545895, 0.005117663860321045, 0.005089056015014648, 0.005081471920013428, 0.005089503765106201, 0.005139967918395996, 0.005097887992858887, 0.005111807823181152, 0.0051036162376403805, 0.0051274561882019044, 0.0050850238800048825, 0.0051056318283081055, 0.00511683177947998, 0.0050969281196594236, 0.005114175796508789, 0.005148575782775879, 0.0051121277809143065, 0.0051036162376403805, 0.005179391860961914, 0.00510697603225708, 0.0050360321998596195, 0.005111807823181152, 0.005095232009887696, 0.0052135357856750485, 0.005086048126220703, 0.005083136081695557, 0.0050954241752624516, 0.005101119995117188, 0.005093823909759521, 0.005076191902160645, 0.005090047836303711, 0.005106783866882324, 0.0050731520652771, 0.005075808048248291, 0.005113728046417236, 0.005080895900726318, 0.005099679946899414, 0.005089280128479004, 0.0051138558387756345, 0.005103519916534424, 0.00509552001953125, 0.005099520206451416, 0.005084479808807373, 0.005143231868743896, 0.005087232112884522, 0.005098847866058349, 0.005110176086425782, 0.005090976238250732, 0.005083744049072266, 0.005101344108581543, 0.005093599796295166, 0.005090591907501221, 0.005106143951416016, 0.005089183807373047, 0.00508348798751831, 0.005079040050506592, 0.005105663776397705, 0.005080895900726318, 0.005075232028961181, 0.005080543994903565, 0.005114304065704346, 0.005107935905456543, 0.0051320638656616215, 0.005119935989379883, 0.005104800224304199, 0.005090496063232422, 0.005134079933166504, 0.005120128154754639, 0.005096735954284668, 0.005079872131347656, 0.005087135791778564, 0.005098336219787598, 0.005094048023223877, 0.005099264144897461, 0.005122623920440674, 0.005099552154541016, 0.005099520206451416, 0.0051008639335632325, 0.005118656158447266, 0.005137951850891114, 0.0051051521301269534, 0.005144544124603271, 0.005129439830780029, 0.005080607891082764, 0.005140223979949951, 0.005108448028564453, 0.005123551845550537, 0.005104383945465088, 0.0050952000617980955, 0.005195775985717774, 0.005115071773529053, 0.005102399826049805, 0.005120255947113037, 0.0051418237686157225, 0.0051224961280822755, 0.0051056318283081055, 0.0050869760513305665, 0.005125440120697021, 0.005109024047851562, 0.005095104217529297, 0.0051315841674804685, 0.0051075520515441895, 0.005102528095245361, 0.005138336181640625, 0.005128191947937012, 0.005117280006408691, 0.005112256050109863, 0.005189856052398681, 0.00511081600189209, 0.005094367980957032, 0.005092735767364502, 0.005110367774963379, 0.005083168029785156, 0.005080607891082764, 0.005085152149200439, 0.005118624210357666, 0.0051066880226135255, 0.005078207969665527, 0.005321824073791504, 0.005118527889251709, 0.0052674560546875, 0.005146687984466553, 0.005140416145324707, 0.005155839920043945, 0.005109920024871826, 0.005159039974212646, 0.005139167785644531, 0.005099647998809814, 0.005133952140808105, 0.005232895851135254, 0.005158912181854248, 0.0051279358863830565, 0.0051857919692993165, 0.005151840209960937, 0.00513321590423584, 0.005148672103881836, 0.0051404800415039064, 0.005117695808410644, 0.005079296112060547, 0.005154047966003418, 0.005116223812103271, 0.005117599964141846, 0.0051372480392456055, 0.00516870403289795, 0.005125919818878174, 0.005126175880432129, 0.005041791915893555, 0.0051429119110107425, 0.005123680114746094, 0.00511030387878418, 0.00509503984451294, 0.005150976181030273, 0.005105663776397705, 0.00510975980758667, 0.005148608207702637, 0.005103487968444825, 0.005097695827484131, 0.005103839874267578, 0.005128255844116211, 0.005132192134857177, 0.005134399890899658, 0.005168288230895996, 0.005173823833465576, 0.005226111888885498, 0.005134719848632813, 0.005131519794464111, 0.005126016139984131, 0.005090176105499267, 0.005144768238067627, 0.005130047798156738, 0.005110911846160889, 0.005095488071441651, 0.005102015972137451, 0.005109216213226318, 0.005098112106323242, 0.005140607833862305, 0.005114016056060791, 0.005109375953674317, 0.0051138558387756345, 0.005147295951843262, 0.0052715840339660645, 0.005118720054626465, 0.0051352958679199215, 0.005118271827697754, 0.005248479843139649, 0.005257440090179443, 0.005137728214263916, 0.005109920024871826, 0.005107840061187744, 0.005137951850891114, 0.00513318395614624, 0.0050824317932128904, 0.005096127986907959, 0.0051140480041503905, 0.00509830379486084, 0.0050800638198852536, 0.005088863849639892, 0.005125631809234619, 0.005106592178344726, 0.005101535797119141, 0.005105663776397705, 0.0051138877868652345, 0.005085184097290039, 0.005107711791992187, 0.005182688236236572, 0.005120800018310547, 0.005105663776397705, 0.005157087802886963, 0.005130015850067139, 0.005236512184143067, 0.005116032123565674, 0.005146687984466553, 0.005126944065093994, 0.005099455833435059, 0.005127423763275146, 0.005112639904022217, 0.005089216232299805, 0.005087615966796875, 0.005797599792480469, 0.005090496063232422, 0.005103936195373535, 0.005131999969482422, 0.006148608207702637, 0.005228799819946289, 0.005406303882598877, 0.005136896133422852, 0.005128096103668213, 0.005154335975646972, 0.005132991790771485, 0.005121823787689209, 0.005111807823181152, 0.005131680011749268, 0.00512608003616333, 0.005094048023223877, 0.005107711791992187, 0.00515392017364502, 0.005116479873657227, 0.005205567836761475, 0.00513100814819336, 0.005251071929931641, 0.0051133761405944824, 0.005095967769622803, 0.005120063781738281, 0.005111680030822754, 0.005107711791992187, 0.005122047901153564, 0.005099520206451416, 0.005103263854980469, 0.00509065580368042, 0.0051066880226135255, 0.005111328125, 0.0051036481857299806, 0.00510211181640625, 0.005107615947723389, 0.0050954241752624516, 0.005099040031433105, 0.00512662410736084, 0.005182943820953369, 0.0051164479255676265, 0.005079040050506592, 0.00521776008605957, 0.005083680152893066, 0.0050833277702331545, 0.005103392124176025, 0.0050926079750061035, 0.005071487903594971, 0.005085343837738037, 0.005093152046203613, 0.005166431903839111, 0.005071807861328125, 0.005111743927001953, 0.005089280128479004, 0.005070464134216308, 0.005075551986694336, 0.0052899842262268066, 0.0051888318061828614, 0.005092288017272949, 0.005104832172393799, 0.00508134412765503, 0.005116288185119629, 0.0050811200141906735, 0.005069920063018799, 0.005160927772521973, 0.005145535945892334, 0.005087232112884522, 0.005068287849426269, 0.005109888076782226, 0.005104000091552734, 0.005096896171569824, 0.005104191780090332, 0.005133344173431397, 0.005170303821563721, 0.005096447944641113, 0.005138495922088623, 0.0050982718467712405, 0.005074944019317627, 0.005099616050720215, 0.0051199040412902835, 0.005105023860931397, 0.005108352184295654, 0.005119008064270019, 0.005104608058929443, 0.005099520206451416, 0.005105567932128906, 0.0051110081672668456, 0.005104512214660645, 0.0050769920349121095, 0.005117919921875, 0.005118015766143799, 0.005103040218353272, 0.00511030387878418, 0.005416959762573242, 0.005672959804534912, 0.0052715520858764645, 0.005197728157043457, 0.0052939200401306155, 0.005238751888275147, 0.005159200191497803, 0.0051773438453674315, 0.005125152111053467, 0.005235487937927246, 0.005138175964355469, 0.005140384197235107, 0.005148255825042724, 0.005159264087677002, 0.006121695995330811, 0.005196159839630127, 0.005160448074340821, 0.005102335929870605, 0.00515231990814209, 0.005105855941772461, 0.005163008213043213, 0.0050969281196594236, 0.0056243519783020016, 0.005117951869964599]",tokens/s,194.64190870045417,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4294.889472,4886.233088,0.0,4483.710976,4465.672704,s,1,10.8904892578125,10.8904892578125,0.0,10.8904892578125,10.8904892578125,10.8904892578125,10.8904892578125,[10.8904892578125],,kWh,0.00010626884295412917,1.171483841686056e-05,3.3577804640000375e-05,0.00015156148601099011,,MB,2155.393024,5309.857792,0.0,4892.655616,4841.339904,s,10,1.9586477813720704,0.19586477813720704,0.00029519232091201945,0.19595532989501951,0.1961327163696289,0.1961373649597168,0.1961410838317871,"[0.19562754821777345, 0.19594793701171875, 0.19599331665039063, 0.19606419372558595, 0.19614201354980468, 0.19510330200195314, 0.19591778564453124, 0.1957572784423828, 0.19613168334960937, 0.1959627227783203]",tokens/s,1307.024174712347,kWh,5.786576667483756e-06,6.381546521312642e-07,3.825013953254458e-06,1.0249745272869478e-05,tokens/kWh,24976230.451074544,MB,2160.193536,5477.629952,0.0,5060.427776,5012.931584,s,10,24.409556640625002,2.4409556640625003,0.006774720228408151,2.4418928222656247,2.4479796142578123,2.449918151855469,2.4514689819335937,"[2.437376953125, 2.43127685546875, 2.4377001953125, 2.444419921875, 2.451856689453125, 2.441253173828125, 2.447548828125, 2.4463115234375, 2.429280029296875, 2.442532470703125]",tokens/s,25.809563413023504,kWh,7.101976219001333e-05,7.83331610585867e-06,4.600652591194827e-05,0.00012485960420782028,tokens/kWh,504566.71234629897,,s,630,24.40638483810425,0.038740293393816266,0.000532612278388482,0.038604207992553714,0.03927266998291016,0.0396168155670166,0.04087017627716064,"[0.03919190216064453, 0.03850102233886719, 0.03834019088745117, 0.04040745544433594, 0.03880550384521484, 0.03836476898193359, 0.03818848037719726, 0.038164512634277344, 0.038982017517089844, 0.038604446411132816, 0.038161312103271484, 0.03849216079711914, 0.03831808090209961, 0.03826483154296875, 0.038454849243164065, 0.03845151901245117, 0.03840777587890625, 0.03834889602661133, 0.038341182708740235, 0.03821145629882813, 0.03846144104003906, 0.03827097702026367, 0.038292991638183595, 0.03860736083984375, 0.03854950332641602, 0.041444385528564456, 0.039016544342041014, 0.039217121124267576, 0.038745121002197264, 0.03862307357788086, 0.03847542572021485, 0.03824031829833984, 0.03833478546142578, 0.038225921630859375, 0.03899004745483398, 0.03862300872802735, 0.03853311920166016, 0.03822499084472656, 0.03840415954589844, 0.03832204818725586, 0.038381919860839844, 0.038412929534912106, 0.039311359405517575, 0.0384155502319336, 0.03836099243164062, 0.038310817718505856, 0.03831727981567383, 0.0394596176147461, 0.04017488098144531, 0.03949641418457031, 0.03901433563232422, 0.0390840950012207, 0.03873996734619141, 0.039941761016845705, 0.038594944000244144, 0.03883827209472656, 0.03852492904663086, 0.03833625411987305, 0.03843123245239258, 0.038475006103515626, 0.03880585479736328, 0.03865756988525391, 0.038419071197509765, 0.0385830078125, 0.03869481658935547, 0.03942822265625, 0.03875801467895508, 0.03841206359863281, 0.038505279541015625, 0.03818905639648437, 0.03818086242675781, 0.038182910919189454, 0.03849216079711914, 0.038547454833984376, 0.038370689392089846, 0.03825423812866211, 0.038369438171386716, 0.03841215896606445, 0.03825964736938477, 0.03819475173950195, 0.038617118835449216, 0.03871318435668945, 0.038902336120605466, 0.03851216125488281, 0.03878499221801758, 0.038785537719726565, 0.038365184783935545, 0.038290496826171874, 0.03836332702636719, 0.03824089431762695, 0.03847372817993164, 0.03844518280029297, 0.038648094177246094, 0.03889273452758789, 0.03878348922729492, 0.038551582336425784, 0.038932289123535156, 0.038612350463867184, 0.03851299285888672, 0.038743934631347655, 0.03889030456542969, 0.03856915283203125, 0.038803295135498045, 0.03838956832885742, 0.03902256011962891, 0.03847267150878906, 0.03892019271850586, 0.03832627105712891, 0.038866943359375, 0.03879731369018555, 0.0388939208984375, 0.03851433563232422, 0.0384134407043457, 0.03850739288330078, 0.03848191833496094, 0.03828326416015625, 0.038469791412353516, 0.03856108856201172, 0.03914166259765625, 0.038543392181396484, 0.03853657531738281, 0.038435489654541015, 0.03851676940917969, 0.03901046371459961, 0.03855769729614258, 0.03890176010131836, 0.03933993530273437, 0.03992153549194336, 0.04027391815185547, 0.03927702331542969, 0.0391657600402832, 0.03904022216796875, 0.03885564804077148, 0.03861503982543945, 0.038529022216796875, 0.03852288055419922, 0.0384279670715332, 0.038365886688232424, 0.03961452865600586, 0.03874604797363281, 0.03858748626708984, 0.038540191650390625, 0.038569984436035154, 0.03865766525268555, 0.03844140625, 0.038510238647460934, 0.038570369720458984, 0.03853916931152344, 0.03876454544067383, 0.038408191680908206, 0.03850239944458008, 0.038469566345214846, 0.0391448974609375, 0.03937753677368164, 0.038787071228027346, 0.038542430877685545, 0.038423454284667966, 0.03835279846191406, 0.03843075180053711, 0.03846547317504883, 0.038512767791748045, 0.03851603317260742, 0.038491039276123046, 0.03832524871826172, 0.03828611373901367, 0.03845939254760742, 0.03955644989013672, 0.03846416091918945, 0.03859024047851563, 0.038940448760986325, 0.038701473236083986, 0.03856387329101563, 0.038608257293701174, 0.0385665283203125, 0.038718784332275394, 0.03833516693115235, 0.03836220932006836, 0.03838822555541992, 0.038555904388427736, 0.039274654388427734, 0.039215103149414066, 0.03860396957397461, 0.03836409759521484, 0.03831795120239258, 0.03828736114501953, 0.038434814453125, 0.038434814453125, 0.038365184783935545, 0.03841558456420899, 0.03898809432983399, 0.038809951782226564, 0.03840528106689453, 0.03855174255371094, 0.03867679977416992, 0.03864361572265625, 0.03889980697631836, 0.03844841766357422, 0.038141857147216796, 0.03826361465454101, 0.038266880035400394, 0.03921644973754883, 0.038763198852539066, 0.038806976318359374, 0.03898751831054687, 0.03892425537109375, 0.038851425170898436, 0.03864166259765625, 0.038669471740722654, 0.03850758361816406, 0.038286945343017575, 0.03827881622314453, 0.038166465759277346, 0.03826748657226563, 0.03829145431518555, 0.03879731369018555, 0.040812545776367185, 0.03909632110595703, 0.03915980911254883, 0.03882345581054687, 0.03907788848876953, 0.03996716690063477, 0.039022624969482424, 0.03877024078369141, 0.038506942749023436, 0.03853513717651367, 0.03845737457275391, 0.0390060806274414, 0.03830387115478515, 0.038567745208740234, 0.038478015899658206, 0.038522144317626954, 0.038419166564941404, 0.0388317756652832, 0.03841059112548828, 0.03831577682495117, 0.0383135986328125, 0.038255233764648434, 0.0382558708190918, 0.03831270217895508, 0.03816998291015625, 0.03864873504638672, 0.04138979339599609, 0.03914473724365235, 0.038982368469238284, 0.03902873611450195, 0.03915676879882812, 0.040471519470214844, 0.03906719970703125, 0.03900665664672852, 0.0391756477355957, 0.03895555114746094, 0.03896473693847656, 0.03899667358398438, 0.03908425521850586, 0.038981632232666014, 0.038908958435058594, 0.038964191436767576, 0.03876588821411133, 0.03875279998779297, 0.038989185333251956, 0.03882918548583984, 0.03928438568115234, 0.03915590286254883, 0.03928044891357422, 0.04157030487060547, 0.039200702667236326, 0.03922051239013672, 0.038887744903564454, 0.04150934219360351, 0.0393359375, 0.039190528869628906, 0.03926835250854492, 0.040474494934082034, 0.03961868667602539, 0.039157024383544924, 0.039266334533691404, 0.03910425567626953, 0.039067966461181644, 0.03927664184570313, 0.039268638610839846, 0.038995552062988284, 0.03846416091918945, 0.03842201614379883, 0.03850060653686523, 0.03837734222412109, 0.04087196731567383, 0.038812000274658205, 0.03888873672485352, 0.03864620971679687, 0.03865129470825195, 0.04004131317138672, 0.03882175827026367, 0.03921526336669922, 0.038645633697509764, 0.03824796676635742, 0.0383144302368164, 0.03823190307617187, 0.03825212860107422, 0.03910921478271484, 0.03838886260986328, 0.03827609634399414, 0.038292736053466794, 0.03825126266479492, 0.038199295043945314, 0.0382457275390625, 0.03847808074951172, 0.03824886322021484, 0.038158336639404294, 0.038309471130371094, 0.03823001480102539, 0.03820710372924805, 0.038433567047119144, 0.03829350280761719, 0.03832121658325195, 0.038333023071289066, 0.03891263961791992, 0.038505535125732425, 0.038343456268310545, 0.03883638381958008, 0.03931340789794922, 0.038547454833984376, 0.0388361587524414, 0.03824851226806641, 0.03817881774902344, 0.03826851272583008, 0.03831439971923828, 0.038153953552246093, 0.03832374572753906, 0.03818678283691406, 0.03821363067626953, 0.03840304183959961, 0.038704544067382815, 0.03859040069580078, 0.03832284927368164, 0.03846758270263672, 0.03826835250854492, 0.038218303680419924, 0.038858528137207034, 0.038613216400146484, 0.0390546875, 0.038730400085449215, 0.038430721282958984, 0.038555648803710936, 0.03870924758911133, 0.038564895629882814, 0.03929328155517578, 0.038793952941894534, 0.038413631439208985, 0.038974048614501954, 0.03838332748413086, 0.03855388641357422, 0.03921065521240234, 0.038871711730957034, 0.03888300704956055, 0.03873382568359375, 0.03875148773193359, 0.03852521514892578, 0.03862575912475586, 0.03846108627319336, 0.03847766494750977, 0.038551231384277344, 0.03863020706176758, 0.03862278366088867, 0.03899027252197266, 0.03895004653930664, 0.04044038391113281, 0.038955295562744144, 0.03875180816650391, 0.03913363265991211, 0.03975167846679688, 0.0391363525390625, 0.03993894577026367, 0.039530078887939454, 0.0393460807800293, 0.03897398376464844, 0.03902409744262696, 0.038817790985107424, 0.03878761672973633, 0.039434177398681644, 0.03920640182495117, 0.03987760162353516, 0.038946304321289066, 0.03875481414794922, 0.03864371109008789, 0.03879731369018555, 0.0387562255859375, 0.03929334259033203, 0.03915683364868164, 0.03894028854370117, 0.03887936019897461, 0.03882675170898438, 0.03947065734863281, 0.03895558547973633, 0.03879116821289062, 0.038531295776367186, 0.03845916748046875, 0.03828736114501953, 0.038555648803710936, 0.03841024017333984, 0.038432510375976565, 0.038496513366699216, 0.03845119857788086, 0.038456737518310545, 0.03866070556640625, 0.03875020980834961, 0.03885228729248047, 0.039272449493408204, 0.03957177734375, 0.038779071807861325, 0.03890499114990234, 0.03883411026000977, 0.038767326354980466, 0.03909222412109375, 0.03883996963500977, 0.03859081649780274, 0.03876831817626953, 0.03881811141967773, 0.039783935546875, 0.0389832649230957, 0.03894073486328125, 0.03865180969238281, 0.03874297714233398, 0.038825984954833984, 0.038967296600341796, 0.03885670471191406, 0.03865599822998047, 0.03849785614013672, 0.038403518676757814, 0.03849065780639648, 0.038281696319580075, 0.03851571273803711, 0.038838623046875, 0.039032608032226565, 0.03884940719604492, 0.03869664001464844, 0.038869087219238284, 0.038883808135986325, 0.03905305480957031, 0.039346176147460936, 0.038981632232666014, 0.038795455932617184, 0.039261184692382815, 0.03896745681762695, 0.039128929138183596, 0.03904512023925781, 0.03904492950439453, 0.039088382720947265, 0.039196609497070316, 0.03895865631103516, 0.03914591979980469, 0.03876812744140625, 0.03873023986816406, 0.04092911911010742, 0.0394090576171875, 0.03879193496704102, 0.038956222534179685, 0.039230270385742186, 0.0389119987487793, 0.04018758392333984, 0.039290206909179684, 0.03941996765136719, 0.03926681518554687, 0.038717857360839845, 0.038346752166748044, 0.03908403015136719, 0.03875430297851563, 0.03930316925048828, 0.038768577575683597, 0.03859465789794922, 0.03844464111328125, 0.038342048645019534, 0.03823715209960937, 0.03831571197509766, 0.03830201721191406, 0.03822143936157227, 0.03843929672241211, 0.03845529556274414, 0.03860889434814453, 0.04086579132080078, 0.03908198547363281, 0.038705471038818356, 0.03855699157714844, 0.03860892868041992, 0.038354721069335934, 0.040185630798339846, 0.03891654586791992, 0.03889555358886719, 0.038731616973876955, 0.038545982360839844, 0.0390366096496582, 0.039212478637695315, 0.03839884948730469, 0.03827711868286133, 0.03827273559570313, 0.038295841217041014, 0.03821964645385742, 0.03817609786987305, 0.038239009857177736, 0.0381952018737793, 0.03824435043334961, 0.0382033920288086, 0.038198593139648435, 0.038720191955566405, 0.0382454719543457, 0.03923353576660156, 0.03910860824584961, 0.03868409729003906, 0.038558273315429686, 0.038485824584960936, 0.038598846435546875, 0.039090175628662106, 0.03892598342895508, 0.03864432144165039, 0.038323040008544924, 0.03842755126953125, 0.03844025421142578, 0.03836179351806641, 0.03846553421020508, 0.03863552093505859, 0.03834624099731445, 0.03836537551879883, 0.03854163360595703, 0.03837164688110352, 0.0402940788269043, 0.03870051193237305, 0.03862348937988281, 0.038306079864501956, 0.03836108779907227, 0.03844467163085938, 0.03834918212890625, 0.03887104034423828, 0.03869900894165039, 0.03873993682861328, 0.03865398406982422, 0.0384345588684082, 0.038306049346923825, 0.038595905303955076, 0.03892089462280274, 0.03834470367431641, 0.03840409469604492, 0.03823446273803711, 0.03818588638305664, 0.038312255859375, 0.03829913711547851, 0.03822844696044922, 0.03831241607666016, 0.038301055908203124, 0.038295520782470706, 0.038574752807617185, 0.03840409469604492, 0.03838771057128906, 0.03845119857788086, 0.03836659240722656, 0.03832896041870117, 0.038406143188476564, 0.038389759063720705, 0.03824639892578125, 0.03890995025634766, 0.03847372817993164, 0.03871334457397461, 0.038505664825439455, 0.03856438446044922, 0.03848255920410156, 0.03947020721435547, 0.038621406555175784, 0.03844697570800781, 0.03843638229370117, 0.03837209701538086, 0.03882419204711914, 0.03859379196166992, 0.038591232299804684, 0.03846316909790039, 0.03864198303222656, 0.03826892852783203, 0.03846963119506836, 0.038594558715820314, 0.038327423095703125, 0.038634368896484375, 0.03845523071289063, 0.038449214935302733, 0.038645889282226564, 0.03849612808227539, 0.038372638702392575, 0.038373119354248045, 0.038865886688232425, 0.03830774307250977, 0.038228065490722656, 0.038166526794433595, 0.03813343811035156, 0.038185279846191404, 0.03812172698974609, 0.038144798278808595, 0.03811836624145508, 0.03821363067626953, 0.038121055603027344, 0.03817308807373047, 0.03807027053833008, 0.03808051300048828, 0.038056961059570314, 0.0381921272277832, 0.038112415313720706, 0.03815439987182617, 0.03823481750488281, 0.038215679168701173, 0.03831193542480469, 0.038171871185302735, 0.040063774108886716, 0.039079486846923826, 0.03916054534912109, 0.039300830841064456, 0.04043145751953125, 0.039639198303222656, 0.03977830505371094, 0.03921075057983398, 0.03913580703735352, 0.03911443328857422, 0.039221248626708984, 0.03924284744262695, 0.039142303466796875, 0.04066611099243164, 0.03919974517822265, 0.03934003067016602, 0.03927878570556641, 0.03922438430786133, 0.038921215057373046, 0.039096000671386716, 0.0388733139038086, 0.039230751037597655, 0.03884089660644531, 0.042057727813720705]",tokens/s,25.812917569685215,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1061.236736,912.130048,0.0,509.607936,491.434496,s,1,7.93610986328125,7.93610986328125,0.0,7.93610986328125,7.93610986328125,7.93610986328125,7.93610986328125,[7.93610986328125],,kWh,2.4285686454148463e-05,2.6702019786394506e-06,7.264450256005972e-06,3.422033868879388e-05,,MB,1401.614336,1046.347776,0.0,629.1456,592.24832,s,10,0.262838659286499,0.026283865928649904,0.00033437420198730304,0.026199616432189942,0.0265708646774292,0.026870744228363035,0.02711064786911011,"[0.025845760345458983, 0.02650422477722168, 0.026157344818115234, 0.027170623779296875, 0.02610348892211914, 0.02625484848022461, 0.026118656158447266, 0.02621286392211914, 0.026284479141235353, 0.026186368942260743]",tokens/s,9739.815318451889,kWh,7.652417729057633e-07,8.439221195687993e-08,4.93555514099496e-07,1.3431894989621391e-06,tokens/kWh,190591126.71578142,MB,1441.202176,1061.02784,0.0,643.825664,605.085696,s,10,13.67902734375,1.367902734375,0.004353706976141033,1.3682092895507814,1.3732679443359375,1.3739672241210938,1.3745266479492189,"[1.3710374755859376, 1.3671981201171874, 1.373112548828125, 1.368277099609375, 1.3681414794921876, 1.37466650390625, 1.368664306640625, 1.36397802734375, 1.3588216552734376, 1.365130126953125]",tokens/s,46.05590618165182,kWh,5.1841454566261775e-05,5.7177801905249995e-06,1.8949182262301657e-05,7.650841701908843e-05,tokens/kWh,823438.811762134,,s,630,13.673743951797496,0.021704355479043626,0.0003393185883234052,0.021631551742553712,0.021929119682312013,0.022142153072357177,0.022981150226593017,"[0.024559167861938475, 0.021752191543579102, 0.02149580764770508, 0.021538816452026367, 0.021514240264892577, 0.021462047576904297, 0.021683263778686523, 0.021548959732055666, 0.021474624633789064, 0.021705087661743165, 0.021620319366455077, 0.021545087814331055, 0.021480224609375, 0.021507871627807616, 0.02163920021057129, 0.02208665657043457, 0.021766176223754884, 0.021594623565673828, 0.0218668155670166, 0.021719200134277344, 0.02171494483947754, 0.021734624862670898, 0.021695072174072266, 0.021700895309448243, 0.02152969551086426, 0.021545888900756836, 0.021507360458374022, 0.021507968902587892, 0.021702463150024415, 0.021676992416381834, 0.021542911529541017, 0.02167398452758789, 0.021608064651489258, 0.021897600173950194, 0.02277689552307129, 0.02192902374267578, 0.02194553565979004, 0.022237888336181642, 0.021773920059204102, 0.021566911697387694, 0.021622016906738283, 0.02176348876953125, 0.021606719970703125, 0.02159814453125, 0.02161884880065918, 0.021561248779296875, 0.021643264770507813, 0.02182143974304199, 0.02166761589050293, 0.021899488449096678, 0.021985376358032226, 0.02189673614501953, 0.02177020835876465, 0.021759647369384766, 0.021810943603515626, 0.021784799575805664, 0.0217096004486084, 0.021657855987548828, 0.021651296615600585, 0.02165305519104004, 0.0218175048828125, 0.021634496688842774, 0.021770559310913085, 0.021354560852050782, 0.02152511978149414, 0.021600255966186522, 0.021755903244018555, 0.021800960540771484, 0.021819391250610352, 0.02173708724975586, 0.021733663558959962, 0.021653600692749023, 0.021608640670776367, 0.02162067222595215, 0.021646303176879884, 0.021580896377563476, 0.021582944869995117, 0.021631839752197266, 0.021632896423339844, 0.021595455169677733, 0.021661951065063478, 0.02171254348754883, 0.021826240539550783, 0.02187923240661621, 0.021925535202026367, 0.02187264060974121, 0.02180940818786621, 0.021613887786865234, 0.021759904861450196, 0.021639711380004884, 0.02162073516845703, 0.0215285758972168, 0.021511808395385742, 0.02147760009765625, 0.021663904190063477, 0.02199295997619629, 0.02259030342102051, 0.022222496032714843, 0.02191974449157715, 0.021723104476928712, 0.021725343704223632, 0.02151750373840332, 0.02162144088745117, 0.021567487716674806, 0.02162892723083496, 0.02179199981689453, 0.021621503829956056, 0.021524288177490233, 0.022069440841674805, 0.02224332809448242, 0.021571584701538086, 0.021589120864868163, 0.021592960357666016, 0.0215285758972168, 0.02165555191040039, 0.02148975944519043, 0.021571424484252928, 0.02157369613647461, 0.02146076774597168, 0.02147555160522461, 0.021583871841430666, 0.021685312271118164, 0.021701568603515624, 0.021669471740722656, 0.0217576961517334, 0.021622848510742188, 0.021284543991088867, 0.02293017578125, 0.02187161636352539, 0.021808128356933593, 0.021803007125854493, 0.02159555244445801, 0.02151817512512207, 0.021662239074707032, 0.021655391693115235, 0.021498239517211915, 0.02153071975708008, 0.022636640548706056, 0.02164873504638672, 0.021623199462890624, 0.02153887939453125, 0.021514240264892577, 0.021507423400878908, 0.021574047088623045, 0.02169267272949219, 0.021606399536132814, 0.021585727691650392, 0.021666048049926757, 0.02163475227355957, 0.02165171241760254, 0.02166579246520996, 0.021605728149414062, 0.021657855987548828, 0.02219379234313965, 0.022186784744262694, 0.021755903244018555, 0.022173696517944336, 0.021557247161865235, 0.021627168655395507, 0.021657567977905273, 0.021753599166870117, 0.021564672470092774, 0.021654272079467775, 0.021612287521362305, 0.02176639938354492, 0.021876735687255858, 0.02238649559020996, 0.02168828773498535, 0.02179852867126465, 0.021712896347045898, 0.021680511474609376, 0.021745664596557617, 0.021715200424194336, 0.021679519653320312, 0.021606176376342774, 0.022358816146850587, 0.022573055267333983, 0.0219814395904541, 0.021792831420898436, 0.022326080322265626, 0.0217445125579834, 0.021657440185546876, 0.021567520141601564, 0.02169664001464844, 0.021589664459228514, 0.022097408294677736, 0.021786495208740233, 0.02206399917602539, 0.021962080001831055, 0.02140140724182129, 0.02187500762939453, 0.021778303146362303, 0.021712896347045898, 0.021661119461059572, 0.021721664428710936, 0.021929983139038087, 0.02158777618408203, 0.022239776611328126, 0.021806751251220703, 0.022013952255249023, 0.021702592849731445, 0.02156265640258789, 0.021580575942993164, 0.021600288391113283, 0.021622528076171876, 0.021530847549438476, 0.02292736053466797, 0.021748928070068358, 0.021598623275756835, 0.021574047088623045, 0.021706911087036134, 0.021649248123168947, 0.02172313690185547, 0.02189334487915039, 0.021720447540283204, 0.021662111282348632, 0.021579776763916016, 0.021671392440795897, 0.02179724884033203, 0.0217576961517334, 0.02163337516784668, 0.021794912338256835, 0.021676000595092774, 0.02171494483947754, 0.02187468719482422, 0.021579776763916016, 0.021735424041748046, 0.021902559280395507, 0.021576032638549805, 0.021586496353149413, 0.021554208755493163, 0.0215665283203125, 0.021606176376342774, 0.02164121627807617, 0.021622783660888673, 0.02167807960510254, 0.021653408050537108, 0.02156153678894043, 0.021595935821533203, 0.021686399459838867, 0.021651456832885742, 0.02169036865234375, 0.021553152084350585, 0.021536575317382813, 0.021858495712280275, 0.021941951751708984, 0.021769887924194337, 0.021702911376953123, 0.021623199462890624, 0.02165350341796875, 0.021646751403808593, 0.02156368064880371, 0.021283199310302734, 0.02153228759765625, 0.021538400650024415, 0.021508512496948243, 0.021626880645751953, 0.021774112701416017, 0.02163324737548828, 0.021751808166503905, 0.02178860855102539, 0.021852224349975587, 0.021864160537719727, 0.022038015365600586, 0.021844768524169923, 0.021663936614990234, 0.021923168182373047, 0.021699039459228516, 0.021647327423095702, 0.021865856170654296, 0.02174633598327637, 0.021938175201416017, 0.021585920333862304, 0.02159119987487793, 0.021549919128417968, 0.021642751693725586, 0.021464767456054686, 0.02173593521118164, 0.022061376571655272, 0.021934080123901366, 0.021792768478393554, 0.02170172882080078, 0.02167900848388672, 0.021612640380859374, 0.021780384063720702, 0.021760000228881835, 0.021594432830810546, 0.021660831451416014, 0.02218556785583496, 0.021695423126220702, 0.021935808181762696, 0.0216180477142334, 0.021588287353515624, 0.02151487922668457, 0.021689920425415038, 0.02154694366455078, 0.021683712005615235, 0.021584896087646483, 0.021510143280029297, 0.021581823348999024, 0.021592063903808592, 0.021796640396118165, 0.021579135894775392, 0.02155606460571289, 0.021704160690307617, 0.021553695678710936, 0.021581823348999024, 0.021516063690185546, 0.021700960159301758, 0.021776256561279298, 0.02163302421569824, 0.021546239852905272, 0.022497888565063476, 0.021860511779785156, 0.021923360824584962, 0.02124630355834961, 0.021753503799438478, 0.021993728637695314, 0.021576223373413087, 0.021599903106689453, 0.021622848510742188, 0.021647455215454102, 0.0217109432220459, 0.021593280792236328, 0.0216092472076416, 0.02164134407043457, 0.02178656005859375, 0.021698495864868165, 0.021540191650390624, 0.021506591796875, 0.021762304306030274, 0.02191360092163086, 0.021917760848999022, 0.021936063766479493, 0.022169248580932617, 0.02172758483886719, 0.021712095260620116, 0.02162326431274414, 0.021520128250122072, 0.021572160720825195, 0.021503807067871094, 0.02147961616516113, 0.021482528686523436, 0.021466272354125977, 0.021534528732299805, 0.02148940849304199, 0.0214880313873291, 0.021837535858154296, 0.021503488540649415, 0.02163302421569824, 0.021512832641601563, 0.021443647384643556, 0.021910463333129883, 0.022910943984985353, 0.02201193618774414, 0.022165536880493164, 0.021880800247192383, 0.02186604881286621, 0.02163961601257324, 0.021947744369506837, 0.024476320266723632, 0.02298806381225586, 0.02170524787902832, 0.02173766326904297, 0.021720672607421877, 0.021604352951049805, 0.02171945571899414, 0.02152409553527832, 0.021407903671264647, 0.021518304824829103, 0.021428319931030275, 0.021465248107910156, 0.021563392639160156, 0.021699712753295897, 0.021586111068725586, 0.021643455505371095, 0.022964223861694336, 0.024291936874389648, 0.02133843231201172, 0.021587871551513673, 0.02160451126098633, 0.021581760406494142, 0.021622783660888673, 0.021833728790283204, 0.021594112396240234, 0.02163692855834961, 0.021635135650634765, 0.021620288848876953, 0.02156915283203125, 0.02156470489501953, 0.021556032180786132, 0.021575712203979493, 0.021740255355834962, 0.021579519271850586, 0.021676383972167968, 0.021567487716674806, 0.02188697624206543, 0.024649728775024415, 0.02373980712890625, 0.02185481643676758, 0.022011232376098634, 0.02186204719543457, 0.021834144592285155, 0.021678464889526367, 0.021767776489257814, 0.021676671981811522, 0.021585344314575195, 0.021870975494384767, 0.021669151306152344, 0.021801055908203124, 0.021738304138183593, 0.021663135528564453, 0.02155926322937012, 0.021694944381713866, 0.021592063903808592, 0.021600095748901368, 0.021581119537353515, 0.02147430419921875, 0.021555200576782226, 0.021577472686767577, 0.02153273582458496, 0.02157702445983887, 0.021681024551391603, 0.021589536666870118, 0.021549535751342774, 0.021835775375366212, 0.021757951736450197, 0.021763296127319337, 0.021863199234008788, 0.021589248657226563, 0.02155356788635254, 0.02154070472717285, 0.021458816528320313, 0.02150655937194824, 0.02142630386352539, 0.021482719421386718, 0.021473119735717774, 0.021510271072387694, 0.021549312591552735, 0.0215219841003418, 0.021574848175048827, 0.021211135864257814, 0.021506048202514647, 0.021429920196533205, 0.02146143913269043, 0.021387168884277344, 0.0214138240814209, 0.02153887939453125, 0.021530624389648437, 0.021561311721801757, 0.021644895553588867, 0.021436864852905274, 0.021526527404785157, 0.021406719207763672, 0.021521408081054686, 0.021435808181762696, 0.021582080841064454, 0.02144000053405762, 0.021502368927001952, 0.021535167694091795, 0.02151136016845703, 0.021527360916137696, 0.022164703369140625, 0.02162726402282715, 0.021429887771606447, 0.02144041633605957, 0.021445472717285155, 0.02142620849609375, 0.02145894432067871, 0.021426368713378906, 0.02148067283630371, 0.021516191482543946, 0.021510656356811524, 0.021859519958496092, 0.02155404853820801, 0.021577856063842774, 0.021577247619628905, 0.021430591583251953, 0.0215512638092041, 0.021456575393676756, 0.021438432693481446, 0.021600608825683595, 0.021468416213989257, 0.021457664489746092, 0.02170684814453125, 0.022110111236572267, 0.023874719619750975, 0.022114591598510744, 0.021731903076171875, 0.021958015441894532, 0.02156403160095215, 0.021598207473754884, 0.021739519119262696, 0.021606399536132814, 0.021583839416503905, 0.021633056640625, 0.02182566452026367, 0.021587808609008788, 0.021559328079223634, 0.02201615905761719, 0.0215765438079834, 0.02267033576965332, 0.02205900764465332, 0.021866559982299805, 0.02144339179992676, 0.02180191993713379, 0.021736160278320312, 0.021561344146728514, 0.021532991409301757, 0.021456575393676756, 0.02146303939819336, 0.021559295654296876, 0.021518335342407227, 0.021493759155273438, 0.02150809669494629, 0.02164246368408203, 0.02167875289916992, 0.021599807739257813, 0.021965311050415038, 0.021522495269775392, 0.02152620887756348, 0.021500223159790038, 0.02153267288208008, 0.02160207939147949, 0.02147724723815918, 0.021393760681152344, 0.021520351409912108, 0.02159414482116699, 0.021521951675415037, 0.021506528854370117, 0.021481472015380858, 0.021604352951049805, 0.02149519920349121, 0.0215063362121582, 0.021438112258911134, 0.021504671096801757, 0.02146713638305664, 0.02151801681518555, 0.021637760162353515, 0.021569215774536132, 0.02145894432067871, 0.021497472763061524, 0.021541215896606444, 0.02155027198791504, 0.021526527404785157, 0.021578912734985353, 0.021444608688354492, 0.021573312759399416, 0.02147737693786621, 0.02167795181274414, 0.021680192947387697, 0.02164726448059082, 0.02158812713623047, 0.02155507278442383, 0.021717119216918945, 0.02152560043334961, 0.021543807983398437, 0.02181532859802246, 0.02147532844543457, 0.021572864532470704, 0.021762815475463868, 0.0214466552734375, 0.0216494083404541, 0.021501951217651367, 0.021581375122070312, 0.021551584243774412, 0.021476320266723633, 0.021174560546875, 0.021547008514404296, 0.02145075225830078, 0.021416032791137695, 0.021536575317382813, 0.02164575958251953, 0.02192367935180664, 0.021759807586669924, 0.021744768142700197, 0.021582719802856445, 0.021518335342407227, 0.021510143280029297, 0.02153891181945801, 0.02151612854003906, 0.021558399200439452, 0.021576736450195314, 0.021903263092041016, 0.021768159866333008, 0.02177462387084961, 0.021662912368774413, 0.021525056838989257, 0.021515392303466798, 0.021828351974487306, 0.02167616081237793, 0.02165555191040039, 0.021669376373291017, 0.021706880569458006, 0.021475711822509767, 0.021583103179931642, 0.021651424407958985, 0.02184886360168457, 0.02170035171508789, 0.021790111541748047, 0.02159814453125, 0.021663679122924804, 0.021582304000854494, 0.021729408264160158, 0.02148384094238281, 0.02173958396911621, 0.021546239852905272, 0.022010623931884767, 0.021597600936889647, 0.02157423973083496, 0.021741567611694337, 0.02159382438659668, 0.021631263732910157, 0.0220731201171875, 0.02166374397277832, 0.021672256469726564, 0.021618688583374023, 0.021718944549560547, 0.02150115203857422, 0.02165353584289551, 0.021630783081054688, 0.021754816055297853, 0.021559072494506837, 0.02173891258239746, 0.021692384719848634, 0.021623743057250976, 0.021620319366455077, 0.022312799453735353, 0.021561824798583984, 0.021991296768188475]",tokens/s,46.07370170312303,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1059.762176,912.130048,0.0,509.607936,491.434496,s,1,7.85234912109375,7.85234912109375,0.0,7.85234912109375,7.85234912109375,7.85234912109375,7.85234912109375,[7.85234912109375],,kWh,2.405287149998306e-05,2.646042185613344e-06,7.292783612000697e-06,3.39916972975971e-05,,MB,1381.920768,1025.376256,0.0,608.17408,592.24832,s,10,0.19996665763854982,0.019996665763854983,0.000131748681444734,0.01998686408996582,0.02008093795776367,0.02019747714996338,0.020290708503723144,"[0.02005504035949707, 0.020047040939331056, 0.02003971290588379, 0.019862207412719726, 0.020009376525878905, 0.020314016342163087, 0.019800159454345705, 0.019929088592529298, 0.019964351654052734, 0.01994566345214844]",tokens/s,12802.13426694031,kWh,5.840438472750217e-07,6.44096236234857e-08,3.6833473911201953e-07,1.016788210010527e-06,tokens/kWh,251773178.99599716,MB,1395.699712,1040.05632,0.0,622.854144,605.085696,s,10,10.51122424316406,1.0511224243164061,0.008739214203431933,1.0486399536132813,1.0635650024414063,1.0646485290527345,1.065515350341797,"[1.057982421875, 1.06332421875, 1.056958984375, 1.044591064453125, 1.0473111572265625, 1.042011962890625, 1.04996875, 1.041940673828125, 1.0414029541015626, 1.0657320556640626]",tokens/s,59.93592995694277,kWh,3.037873132938943e-05,3.350275178322624e-06,1.2106258573885142e-05,4.5835265081597193e-05,tokens/kWh,1374487.523696998,,s,630,10.505954067230212,0.016676117567032103,0.0003330798190631205,0.016607407569885255,0.01693511657714844,0.017105914974212644,0.017864472084045416,"[0.0163786563873291, 0.016881120681762694, 0.01668899154663086, 0.016775775909423828, 0.016633056640625, 0.01662447929382324, 0.016636127471923827, 0.016643808364868163, 0.01698521614074707, 0.01774224090576172, 0.01695555114746094, 0.016858816146850586, 0.016792192459106445, 0.016719871520996094, 0.016769023895263673, 0.016736415863037108, 0.016672607421875, 0.016950592041015625, 0.01675334358215332, 0.016763999938964845, 0.016693727493286133, 0.016664255142211915, 0.016906240463256835, 0.01709337615966797, 0.016959680557250976, 0.016735519409179687, 0.016595487594604493, 0.01662566375732422, 0.016701440811157226, 0.01683443260192871, 0.016754816055297852, 0.016744447708129884, 0.016750848770141602, 0.01660492706298828, 0.01666633605957031, 0.016806175231933593, 0.016600896835327148, 0.01663404846191406, 0.016571392059326173, 0.01667558479309082, 0.01672422409057617, 0.016731647491455077, 0.01665011215209961, 0.01668592071533203, 0.017293088912963866, 0.016687103271484375, 0.01661747169494629, 0.01661952018737793, 0.01659596824645996, 0.01660825538635254, 0.016539167404174805, 0.016727872848510742, 0.016983840942382814, 0.01690435218811035, 0.01674131202697754, 0.01658233642578125, 0.016640256881713868, 0.01672380828857422, 0.016873472213745116, 0.016955455780029296, 0.017143999099731445, 0.01779871940612793, 0.017024991989135742, 0.016321407318115235, 0.016663679122924803, 0.016652063369750978, 0.017554559707641602, 0.01742732810974121, 0.01802047920227051, 0.016994176864624025, 0.01956662368774414, 0.019191776275634766, 0.016884735107421875, 0.016840927124023436, 0.016883871078491212, 0.016982656478881836, 0.016772544860839844, 0.016773855209350586, 0.01665007972717285, 0.016732160568237304, 0.016969728469848632, 0.016723424911499023, 0.01663030433654785, 0.016652288436889647, 0.016629600524902345, 0.016611488342285156, 0.016600831985473633, 0.016611263275146483, 0.01658310317993164, 0.016635744094848633, 0.01685001564025879, 0.016873504638671873, 0.016642175674438476, 0.01680668830871582, 0.016611328125, 0.016578559875488282, 0.016658079147338866, 0.016631135940551756, 0.016679744720458984, 0.016777088165283203, 0.016630079269409178, 0.016689151763916017, 0.016689151763916017, 0.016660480499267577, 0.016650495529174806, 0.01662950325012207, 0.01691152000427246, 0.016540000915527344, 0.016607776641845703, 0.01663587188720703, 0.01660723114013672, 0.016623264312744142, 0.01694905662536621, 0.01709110450744629, 0.01676288032531738, 0.016744447708129884, 0.01679580879211426, 0.01651696014404297, 0.016636032104492188, 0.01677507209777832, 0.017276575088500976, 0.017144128799438475, 0.017338367462158204, 0.01676873588562012, 0.016812255859375, 0.016664447784423827, 0.016662752151489258, 0.01654537582397461, 0.016786880493164062, 0.016593664169311524, 0.016556032180786134, 0.016614559173583985, 0.016527807235717774, 0.016664608001708985, 0.016679264068603514, 0.016637632369995117, 0.016584672927856446, 0.01661292839050293, 0.020450111389160155, 0.016725311279296873, 0.016951711654663085, 0.016615711212158202, 0.016698848724365233, 0.016542240142822264, 0.016613056182861328, 0.016521055221557616, 0.01682099151611328, 0.016629247665405272, 0.016721887588500975, 0.01672012710571289, 0.01660723114013672, 0.016598335266113283, 0.016898752212524414, 0.016698432922363282, 0.01669526481628418, 0.01662460708618164, 0.016604991912841798, 0.0167521915435791, 0.01664499282836914, 0.016598751068115234, 0.016601119995117188, 0.01656131172180176, 0.016696159362792968, 0.016508544921875, 0.016625152587890626, 0.016493600845336916, 0.01662566375732422, 0.016704639434814452, 0.016630495071411133, 0.016608415603637697, 0.01661628723144531, 0.01677507209777832, 0.01660108757019043, 0.016875455856323242, 0.017057952880859376, 0.016639999389648438, 0.016683008193969725, 0.016866527557373046, 0.016933664321899414, 0.016934911727905275, 0.01732601547241211, 0.01765711975097656, 0.017260576248168947, 0.01680460739135742, 0.016660415649414062, 0.016637664794921875, 0.016678239822387694, 0.01655705642700195, 0.01665023994445801, 0.016229631423950196, 0.016578432083129882, 0.016524192810058593, 0.01663372802734375, 0.016545791625976563, 0.016589088439941405, 0.016598751068115234, 0.016489599227905275, 0.016771039962768554, 0.01647609519958496, 0.01666908836364746, 0.01676950454711914, 0.016848352432250975, 0.016634496688842773, 0.01649072074890137, 0.01647724723815918, 0.01657110404968262, 0.01664204788208008, 0.016773056030273438, 0.01651308822631836, 0.016424736022949218, 0.01664224052429199, 0.016599071502685546, 0.01660108757019043, 0.01664022445678711, 0.016549663543701174, 0.016463872909545898, 0.016338687896728515, 0.016586175918579103, 0.016578784942626955, 0.01647369575500488, 0.01646080017089844, 0.016416767120361327, 0.016484352111816408, 0.01652511978149414, 0.016410272598266603, 0.016479808807373045, 0.016606176376342773, 0.01674617576599121, 0.016537055969238282, 0.016509855270385742, 0.01653548812866211, 0.01643929672241211, 0.016602783203125, 0.016501087188720703, 0.016809247970581056, 0.016653024673461914, 0.016731967926025392, 0.016678655624389648, 0.016996288299560548, 0.016568832397460938, 0.016605344772338868, 0.016562015533447265, 0.016686880111694335, 0.016464096069335937, 0.016794815063476562, 0.016542528152465822, 0.01652467155456543, 0.01648089599609375, 0.016547679901123047, 0.016478368759155274, 0.01651424026489258, 0.016452415466308594, 0.016229120254516602, 0.01654368019104004, 0.016504032135009766, 0.01663862419128418, 0.016512351989746092, 0.016436031341552734, 0.016436607360839843, 0.01657904052734375, 0.016486400604248046, 0.016527263641357422, 0.016653600692749022, 0.016444223403930664, 0.01651705551147461, 0.01650694465637207, 0.016553983688354493, 0.01656012725830078, 0.016515071868896485, 0.01646940803527832, 0.016577119827270507, 0.016537599563598633, 0.01640003204345703, 0.01670582389831543, 0.016781375885009765, 0.016592159271240234, 0.016548576354980468, 0.016809696197509764, 0.016621856689453124, 0.016721920013427736, 0.016506879806518555, 0.01640415954589844, 0.016419103622436523, 0.016525344848632814, 0.016483808517456056, 0.016797887802124024, 0.016540000915527344, 0.016451520919799803, 0.01640777587890625, 0.016680959701538087, 0.01645244789123535, 0.01662566375732422, 0.016533504486083983, 0.016701440811157226, 0.016558080673217773, 0.01692643165588379, 0.0169800968170166, 0.01688153648376465, 0.01666076850891113, 0.01664112091064453, 0.016722944259643553, 0.016625568389892577, 0.016730112075805666, 0.016760831832885743, 0.017098751068115235, 0.016553983688354493, 0.01644361686706543, 0.01668499183654785, 0.016734048843383788, 0.01680588722229004, 0.016508928298950197, 0.016695295333862305, 0.01664787292480469, 0.016611839294433595, 0.01743244743347168, 0.016620000839233397, 0.01643519973754883, 0.016451456069946288, 0.01672819137573242, 0.016492544174194337, 0.01665023994445801, 0.016778432846069335, 0.016493375778198243, 0.0164454402923584, 0.016736255645751954, 0.016484352111816408, 0.016508127212524416, 0.016466720581054688, 0.016451583862304688, 0.016446847915649414, 0.01653209686279297, 0.016504352569580077, 0.01655855941772461, 0.016490495681762696, 0.01647724723815918, 0.016434112548828126, 0.01653590393066406, 0.01653536033630371, 0.01653936004638672, 0.016455808639526368, 0.01679155158996582, 0.01652908706665039, 0.01655593681335449, 0.01648873519897461, 0.01644761657714844, 0.016459232330322267, 0.01645417594909668, 0.016576671600341798, 0.016559392929077148, 0.016525888442993165, 0.016447231292724608, 0.016656639099121094, 0.01640671920776367, 0.016485343933105467, 0.016564800262451173, 0.01663942337036133, 0.01675948715209961, 0.016565887451171876, 0.01644393539428711, 0.01649564743041992, 0.016436288833618164, 0.016537504196166994, 0.016447488784790038, 0.016504831314086914, 0.016553056716918944, 0.016491424560546874, 0.016504127502441405, 0.016583072662353517, 0.016492832183837892, 0.016622655868530272, 0.01650169563293457, 0.016689151763916017, 0.016488447189331054, 0.01658470344543457, 0.016504831314086914, 0.01655193519592285, 0.016430463790893555, 0.016488544464111327, 0.01623289680480957, 0.017002847671508788, 0.016899744033813478, 0.016854272842407227, 0.017040063858032226, 0.016982080459594727, 0.01743052864074707, 0.016697536468505858, 0.01674630355834961, 0.01656012725830078, 0.01656947135925293, 0.016509824752807618, 0.01659449577331543, 0.016545984268188478, 0.016498943328857422, 0.016740224838256837, 0.016533279418945314, 0.016584287643432616, 0.01662835121154785, 0.01657206344604492, 0.016562656402587892, 0.0164977912902832, 0.0164401912689209, 0.016504640579223632, 0.016508224487304688, 0.016552831649780272, 0.016496288299560548, 0.016506752014160155, 0.016570848464965822, 0.016478208541870116, 0.01789132881164551, 0.016517120361328123, 0.01699782371520996, 0.016534080505371095, 0.016652288436889647, 0.016515071868896485, 0.01680998420715332, 0.016864383697509765, 0.017021087646484374, 0.016831039428710937, 0.016605344772338868, 0.016592096328735352, 0.01663171195983887, 0.016988895416259767, 0.016583871841430665, 0.0166242561340332, 0.01645724868774414, 0.016457727432250976, 0.016707456588745118, 0.016468255996704102, 0.016515743255615233, 0.016515039443969728, 0.016445472717285158, 0.01651308822631836, 0.01653753662109375, 0.016424095153808594, 0.016964448928833007, 0.016662111282348634, 0.016538015365600588, 0.016475679397583008, 0.01662607955932617, 0.016475488662719726, 0.01668783950805664, 0.01620172882080078, 0.016416767120361327, 0.016566368103027345, 0.01658425521850586, 0.016496992111206053, 0.016561983108520507, 0.016423072814941406, 0.01651430320739746, 0.016607583999633788, 0.016490943908691408, 0.016541439056396483, 0.016462080001831053, 0.016457727432250976, 0.016482559204101563, 0.016568063735961914, 0.016475135803222657, 0.016579584121704103, 0.01662067222595215, 0.01640505599975586, 0.016513343811035155, 0.016673887252807617, 0.016563232421875, 0.016682111740112304, 0.016468671798706053, 0.016615392684936524, 0.016506879806518555, 0.016496736526489256, 0.016438688278198242, 0.016464479446411134, 0.016695295333862305, 0.016408575057983397, 0.016355520248413087, 0.01655900764465332, 0.016456607818603516, 0.016623424530029296, 0.01655027198791504, 0.01674835205078125, 0.016639999389648438, 0.01660326385498047, 0.016535776138305664, 0.016909120559692382, 0.01665119934082031, 0.016490144729614256, 0.016445695877075197, 0.016453279495239257, 0.016511327743530275, 0.01647113609313965, 0.016628000259399416, 0.01655036735534668, 0.016461088180541993, 0.01648115158081055, 0.016528608322143555, 0.016550239562988282, 0.016590816497802734, 0.016500991821289064, 0.016417024612426757, 0.016494815826416015, 0.01654755210876465, 0.01652876853942871, 0.016519840240478516, 0.016611328125, 0.016508928298950197, 0.01652124786376953, 0.016139776229858398, 0.01646214485168457, 0.01649273681640625, 0.016408384323120116, 0.016475679397583008, 0.016463552474975586, 0.016519264221191408, 0.016452287673950194, 0.01644476890563965, 0.016451871871948243, 0.01644697570800781, 0.016479103088378907, 0.016453632354736326, 0.016647327423095704, 0.016449728012084962, 0.016466592788696287, 0.016555648803710937, 0.016613471984863282, 0.016551584243774415, 0.01654438400268555, 0.01641062355041504, 0.016488544464111327, 0.016524864196777345, 0.016507232666015625, 0.01645120048522949, 0.016842815399169923, 0.0164150390625, 0.016365568161010743, 0.01644339179992676, 0.016463872909545898, 0.016475967407226563, 0.016537792205810548, 0.016521215438842773, 0.016483936309814453, 0.01657494354248047, 0.01665567970275879, 0.016703807830810546, 0.01675267219543457, 0.016535839080810546, 0.01641267204284668, 0.016414464950561522, 0.01649007987976074, 0.016458400726318358, 0.016440832138061523, 0.01656284713745117, 0.01665827178955078, 0.01662156867980957, 0.016527360916137695, 0.01654911994934082, 0.01704012870788574, 0.016517120361328123, 0.016523263931274415, 0.016492319107055665, 0.01659017562866211, 0.016536640167236327, 0.016578176498413085, 0.01652960014343262, 0.01647542381286621, 0.01655062484741211, 0.016615423202514648, 0.01656012725830078, 0.016574464797973632, 0.016519168853759765, 0.016147903442382813, 0.016570911407470704, 0.016889856338500975, 0.016939008712768554, 0.016846111297607422, 0.01676697540283203, 0.0169335994720459, 0.016953344345092772, 0.016893951416015626, 0.016863231658935548, 0.016676864624023437, 0.016633087158203125, 0.016722688674926756, 0.016752096176147462, 0.016800287246704102, 0.01686697578430176, 0.01688438415527344, 0.016848031997680663, 0.016681695938110353, 0.01675872039794922, 0.016697216033935546, 0.016936960220336913, 0.01665843200683594, 0.01666281509399414, 0.01652707290649414, 0.016506879806518555, 0.016816032409667968, 0.016543840408325194, 0.01669081687927246, 0.016519519805908205, 0.01756572723388672, 0.01859584045410156, 0.016750591278076172, 0.017161376953125, 0.01729827117919922, 0.01698918342590332, 0.016763872146606445, 0.016680992126464844, 0.01660723114013672, 0.016705631256103515, 0.016546815872192384, 0.017515424728393555, 0.016747711181640625, 0.017057888031005858, 0.01711177635192871, 0.017575103759765624, 0.017582527160644533, 0.017535200119018556, 0.017660064697265623, 0.017666271209716797, 0.01822287940979004, 0.01725644874572754, 0.016910335540771485, 0.016824512481689452, 0.016786815643310547, 0.01668342399597168, 0.016594879150390623, 0.01661756706237793, 0.016625280380249022, 0.01650726318359375, 0.016664575576782227, 0.016740352630615234, 0.016639999389648438]",tokens/s,59.96599604076628,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1516.163072,1350.434816,0.0,981.467136,978.805248,s,1,8.3325400390625,8.3325400390625,0.0,8.3325400390625,8.3325400390625,8.3325400390625,8.3325400390625,[8.3325400390625],,kWh,3.8126878737504434e-05,4.198042305263677e-06,1.1770287193985096e-05,5.409520823675321e-05,,MB,1629.294592,1526.595584,0.0,1109.393408,1064.45312,s,10,0.5448845062255859,0.054488450622558596,0.00035772993408945606,0.05462516784667969,0.054786217880249025,0.05480843601226807,0.0548262105178833,"[0.05375833511352539, 0.054522529602050784, 0.054359710693359375, 0.0538930549621582, 0.05474764633178711, 0.05483065414428711, 0.05478128051757813, 0.054740959167480466, 0.05458499145507813, 0.05466534423828125]",tokens/s,4698.243335515475,kWh,1.560762001715623e-06,1.7212397160919612e-07,1.033411403080306e-06,2.7662973764051256e-06,tokens/kWh,92542472.90386349,MB,1631.002624,1610.481664,0.0,1193.279488,1117.180928,s,10,14.827466064453123,1.4827466064453123,0.0058437859367857035,1.4808502807617185,1.4896680541992187,1.4927709777832032,1.4952533166503907,"[1.4958739013671876, 1.481080810546875, 1.47506494140625, 1.48024267578125, 1.488978515625, 1.4868951416015626, 1.4812662353515624, 1.4769952392578125, 1.4804488525390624, 1.4806197509765624]",tokens/s,42.48871636336712,kWh,3.313050732661795e-05,3.653835636957376e-06,1.6357824732118522e-05,5.314216769569385e-05,tokens/kWh,1185499.2510044887,,s,630,14.824115055084235,0.023530341357276552,0.0005934390800975151,0.023418272018432617,0.023780060386657715,0.023972012901306152,0.02561443174362183,"[0.024063232421875, 0.023460351943969726, 0.02398847961425781, 0.025544704437255858, 0.0341319694519043, 0.023613439559936524, 0.023398399353027344, 0.023440704345703126, 0.02359891128540039, 0.023732864379882812, 0.02342732810974121, 0.023455007553100586, 0.023855104446411132, 0.024265439987182617, 0.02333286476135254, 0.02333638381958008, 0.023290111541748048, 0.023474496841430666, 0.024202463150024413, 0.027228256225585938, 0.023413440704345704, 0.023586559295654296, 0.023318464279174805, 0.023600576400756836, 0.023275680541992187, 0.023243488311767577, 0.023349248886108398, 0.023201791763305665, 0.02331648063659668, 0.02365235137939453, 0.023578367233276366, 0.02343881607055664, 0.023493408203125, 0.023408191680908203, 0.023372224807739258, 0.023265087127685546, 0.0234105281829834, 0.023325023651123048, 0.02346598434448242, 0.02334886360168457, 0.02340083122253418, 0.023736320495605468, 0.023644159317016602, 0.0236440315246582, 0.02339558410644531, 0.02332352066040039, 0.02331648063659668, 0.023371776580810546, 0.023324031829833985, 0.023414527893066406, 0.023360383987426757, 0.023640064239501952, 0.023387584686279297, 0.02326585578918457, 0.023427072525024413, 0.02324803161621094, 0.02330300712585449, 0.023254751205444336, 0.02326483154296875, 0.02331622314453125, 0.023641056060791015, 0.02333695983886719, 0.02365235137939453, 0.023247072219848633, 0.023308895111083985, 0.023351423263549803, 0.02342092704772949, 0.02327961540222168, 0.023373504638671876, 0.023208255767822265, 0.023640064239501952, 0.023670783996582033, 0.023408639907836915, 0.023404544830322265, 0.023391584396362304, 0.023482847213745116, 0.023533439636230467, 0.023279199600219725, 0.02331622314453125, 0.023286752700805664, 0.023426399230957032, 0.023386783599853515, 0.023620864868164063, 0.02654640007019043, 0.023779136657714844, 0.023750879287719726, 0.024687103271484375, 0.02346188735961914, 0.023523231506347657, 0.02344099235534668, 0.023732736587524415, 0.023676128387451173, 0.02355407905578613, 0.02335206413269043, 0.023453184127807617, 0.023734399795532227, 0.02360972785949707, 0.023301759719848634, 0.02326924705505371, 0.023484256744384764, 0.02345974349975586, 0.02335545539855957, 0.023472831726074218, 0.023289056777954103, 0.023429920196533203, 0.023528640747070312, 0.02327414321899414, 0.023225919723510742, 0.023308576583862303, 0.023466304779052736, 0.023442848205566406, 0.023247295379638672, 0.023292064666748047, 0.023797632217407227, 0.023495935440063478, 0.023583776473999025, 0.023347040176391602, 0.023576576232910155, 0.02328780746459961, 0.023334911346435547, 0.02327756881713867, 0.02332841682434082, 0.023478176116943358, 0.023390432357788087, 0.02332899284362793, 0.023346687316894533, 0.02345369529724121, 0.023371807098388674, 0.023449567794799803, 0.023414783477783203, 0.02326937675476074, 0.023541759490966797, 0.023459840774536132, 0.023375072479248048, 0.023384864807128907, 0.02343731117248535, 0.02331999969482422, 0.023267648696899415, 0.02344576072692871, 0.023388160705566406, 0.023391328811645507, 0.023356319427490235, 0.02341449546813965, 0.023316768646240233, 0.023360767364501954, 0.023380735397338866, 0.023369728088378908, 0.023248895645141602, 0.023275007247924806, 0.023282175064086915, 0.02344988822937012, 0.023303583145141603, 0.02328607940673828, 0.023262336730957033, 0.023763263702392578, 0.023384639739990234, 0.023355072021484374, 0.02335478401184082, 0.023343296051025392, 0.023200223922729492, 0.02332819175720215, 0.02323744010925293, 0.023358495712280273, 0.02338096046447754, 0.02370560073852539, 0.023492416381835936, 0.023283071517944336, 0.024103679656982423, 0.023504192352294923, 0.023923456192016603, 0.02330419158935547, 0.023468032836914062, 0.023347200393676756, 0.02326937675476074, 0.023557952880859375, 0.02326300811767578, 0.02331484794616699, 0.0233570556640625, 0.023892160415649413, 0.023488704681396484, 0.02327347183227539, 0.02332876777648926, 0.023285184860229492, 0.023435392379760743, 0.023392704010009764, 0.02342905616760254, 0.023398271560668947, 0.023510271072387696, 0.023433631896972656, 0.023337535858154297, 0.023594688415527344, 0.023531839370727538, 0.023440607070922853, 0.023300895690917967, 0.02334671974182129, 0.023992799758911134, 0.02368921661376953, 0.023597055435180665, 0.023379072189331055, 0.0233624324798584, 0.02329190444946289, 0.023345151901245118, 0.02331020736694336, 0.023922719955444337, 0.023451744079589845, 0.02334275245666504, 0.023361888885498047, 0.02333407974243164, 0.023341344833374023, 0.023395999908447266, 0.023475296020507814, 0.02350057601928711, 0.02341856002807617, 0.02335366439819336, 0.023533567428588868, 0.023793088912963868, 0.023601728439331053, 0.02326118469238281, 0.023504831314086913, 0.024449087142944335, 0.023325792312622072, 0.02329884719848633, 0.023423072814941406, 0.02338559913635254, 0.02324720001220703, 0.0233371524810791, 0.023328351974487304, 0.023403072357177736, 0.02359075164794922, 0.023330816268920897, 0.02329599952697754, 0.02328166389465332, 0.023422975540161133, 0.02347007942199707, 0.02343731117248535, 0.023394304275512694, 0.023713760375976563, 0.02385103988647461, 0.024205024719238282, 0.023823040008544922, 0.023889503479003905, 0.02399168014526367, 0.023754911422729494, 0.023505504608154298, 0.023383487701416017, 0.02331216049194336, 0.023239328384399415, 0.023385280609130858, 0.023497343063354492, 0.02326857566833496, 0.02323747253417969, 0.023337024688720703, 0.023562368392944337, 0.026116191864013674, 0.023615007400512696, 0.023603679656982422, 0.02489139175415039, 0.024788639068603516, 0.02366703987121582, 0.02360655975341797, 0.028541631698608398, 0.023597087860107422, 0.023547679901123046, 0.02330406379699707, 0.02332640075683594, 0.023368352890014647, 0.023334720611572265, 0.023417375564575196, 0.02324239921569824, 0.023345151901245118, 0.023265087127685546, 0.023249088287353517, 0.023364959716796876, 0.023368223190307617, 0.02337366485595703, 0.023314720153808595, 0.023257087707519532, 0.023234560012817384, 0.023682687759399416, 0.023590944290161134, 0.02343065643310547, 0.023491424560546877, 0.02351702308654785, 0.023572639465332033, 0.02348624038696289, 0.02331056022644043, 0.023320287704467774, 0.023906591415405274, 0.02441046333312988, 0.023355039596557617, 0.023408639907836915, 0.023455743789672853, 0.023759103775024413, 0.02338707160949707, 0.02345599937438965, 0.02355833625793457, 0.023812480926513672, 0.023675935745239258, 0.023415775299072267, 0.023409696578979493, 0.023430112838745118, 0.023349248886108398, 0.0234715518951416, 0.023427616119384764, 0.023287839889526367, 0.023382015228271484, 0.023348352432250977, 0.02349964714050293, 0.023494335174560548, 0.02343961524963379, 0.02339779281616211, 0.023384735107421874, 0.023473312377929687, 0.023249759674072265, 0.02330828857421875, 0.02352742385864258, 0.02369536018371582, 0.023478271484375, 0.02347417640686035, 0.023842815399169923, 0.02345699119567871, 0.023442207336425783, 0.023599103927612306, 0.023417984008789063, 0.023314559936523437, 0.023419071197509765, 0.023517599105834962, 0.02342032051086426, 0.023495424270629884, 0.023747615814208985, 0.023301088333129882, 0.02326688003540039, 0.024531391143798827, 0.023275360107421875, 0.023349407196044922, 0.023518592834472656, 0.023299936294555665, 0.023790496826171875, 0.023696575164794922, 0.02345235252380371, 0.023314367294311522, 0.023904319763183593, 0.02350284767150879, 0.02331772804260254, 0.02332342338562012, 0.023410335540771484, 0.023375232696533202, 0.02330473518371582, 0.023622079849243163, 0.02371798324584961, 0.02346486473083496, 0.023442432403564452, 0.023352319717407227, 0.023658815383911132, 0.0238721923828125, 0.023952863693237306, 0.02410099220275879, 0.024029600143432618, 0.023760351181030273, 0.02343503952026367, 0.02345417594909668, 0.023279903411865234, 0.023862560272216796, 0.02346575927734375, 0.02339731216430664, 0.023391904830932616, 0.023224672317504882, 0.023299360275268556, 0.02350476837158203, 0.023313247680664062, 0.02327756881713867, 0.023390207290649414, 0.023302143096923827, 0.02517919921875, 0.026559423446655274, 0.023554048538208007, 0.02348441505432129, 0.023453311920166017, 0.02343731117248535, 0.023684383392333985, 0.02346995162963867, 0.023421791076660155, 0.02351513671875, 0.023406015396118165, 0.023513631820678713, 0.02327350425720215, 0.023221439361572265, 0.023324480056762697, 0.023298336029052735, 0.023321184158325195, 0.02326095962524414, 0.02330611228942871, 0.023261663436889648, 0.023369728088378908, 0.023330623626708985, 0.02328985595703125, 0.023314624786376952, 0.023379968643188476, 0.023481407165527345, 0.023595968246459962, 0.023779199600219725, 0.024022815704345703, 0.023987680435180663, 0.023904800415039062, 0.023596511840820313, 0.023630720138549804, 0.023600736618041993, 0.023499168395996094, 0.023393728256225585, 0.023560768127441407, 0.02331648063659668, 0.02332262420654297, 0.023343103408813477, 0.024182783126831055, 0.025642911911010743, 0.023894111633300782, 0.023516191482543945, 0.023438304901123048, 0.02344927978515625, 0.023314687728881837, 0.023776416778564454, 0.02337593650817871, 0.023419647216796874, 0.023947359085083008, 0.023576543807983397, 0.02339638328552246, 0.023377727508544922, 0.023490623474121095, 0.023431135177612306, 0.023317823410034178, 0.023388128280639648, 0.02347097587585449, 0.023359296798706054, 0.023306432723999022, 0.02327961540222168, 0.023293727874755858, 0.023371360778808595, 0.023414400100708006, 0.023391231536865235, 0.02333286476135254, 0.023367679595947266, 0.02370137596130371, 0.0236627197265625, 0.02346134376525879, 0.023472671508789063, 0.02335673522949219, 0.023392032623291016, 0.0233371524810791, 0.023280351638793946, 0.023394304275512694, 0.023404544830322265, 0.023371135711669922, 0.023304800033569335, 0.023330848693847658, 0.02332057571411133, 0.02348784065246582, 0.02338268852233887, 0.023236608505249022, 0.024130687713623047, 0.023327615737915038, 0.02364355278015137, 0.02337455940246582, 0.023275392532348633, 0.023375871658325196, 0.02332464027404785, 0.02350214385986328, 0.0234237117767334, 0.023398399353027344, 0.023326496124267578, 0.023287839889526367, 0.02331443214416504, 0.023328672409057616, 0.02335526466369629, 0.023376256942749023, 0.023279647827148437, 0.02331648063659668, 0.023443456649780273, 0.023435264587402343, 0.02333078384399414, 0.023449151992797852, 0.023302719116210936, 0.023241727828979493, 0.023268255233764648, 0.02326323127746582, 0.023318527221679687, 0.02336067199707031, 0.023270240783691408, 0.02324412727355957, 0.023255584716796875, 0.023323776245117188, 0.023299072265625, 0.023346656799316405, 0.023276063919067384, 0.02328166389465332, 0.024982976913452148, 0.024568384170532226, 0.02381999969482422, 0.02384515190124512, 0.023414783477783203, 0.023222272872924804, 0.02341231918334961, 0.023490943908691407, 0.023353055953979494, 0.023611711502075194, 0.023853408813476563, 0.023375232696533202, 0.023483200073242186, 0.023820287704467775, 0.02349875259399414, 0.023367328643798826, 0.023400543212890625, 0.023384319305419923, 0.023412736892700195, 0.023331968307495118, 0.023407487869262694, 0.02333695983886719, 0.02330944061279297, 0.023438207626342772, 0.023367679595947266, 0.02342905616760254, 0.02352649688720703, 0.02360371208190918, 0.023514591217041015, 0.02343622398376465, 0.023504831314086913, 0.02324678421020508, 0.02333247947692871, 0.02330067253112793, 0.02345510482788086, 0.02386089515686035, 0.023591232299804688, 0.02347279930114746, 0.023355712890625, 0.02345747184753418, 0.023336767196655273, 0.023496896743774413, 0.023465503692626954, 0.02328828811645508, 0.023422367095947267, 0.02338262367248535, 0.023549951553344727, 0.023379039764404298, 0.023505088806152343, 0.024122079849243163, 0.02371993637084961, 0.023729568481445314, 0.023523935317993162, 0.02348179244995117, 0.02345631980895996, 0.023406591415405274, 0.02354617691040039, 0.023695039749145507, 0.023629823684692384, 0.023588863372802735, 0.02364959907531738, 0.023562944412231446, 0.023558143615722657, 0.023814144134521483, 0.023555360794067382, 0.023451776504516603, 0.023378528594970704, 0.023369119644165038, 0.023439296722412108, 0.023435199737548828, 0.023556255340576173, 0.02338240051269531, 0.023363744735717774, 0.02327881622314453, 0.023438240051269533, 0.023367679595947266, 0.023330816268920897, 0.023407840728759767, 0.023572639465332033, 0.023597696304321288, 0.023605247497558594, 0.02351923179626465, 0.023433120727539062, 0.023427007675170898, 0.023638175964355468, 0.02349056053161621, 0.023443359375, 0.023384159088134765, 0.023431167602539063, 0.023475456237792968, 0.02357686424255371, 0.023452480316162108, 0.023342655181884765, 0.023867488861083985, 0.023467199325561523, 0.023474336624145508, 0.023433887481689453, 0.023367103576660157, 0.0234968318939209, 0.02354630470275879, 0.02349273681640625, 0.023460960388183592, 0.023642879486083984, 0.023498783111572264, 0.023533279418945313, 0.02378780746459961, 0.02342086410522461, 0.023453760147094726, 0.023363584518432616, 0.02352720069885254, 0.0233842887878418, 0.023396352767944335, 0.02345369529724121, 0.0233670711517334, 0.023426847457885744, 0.023326879501342775, 0.023345279693603515, 0.02336128044128418, 0.02345062446594238, 0.023412511825561522, 0.023367679595947266, 0.023472127914428712, 0.023377920150756838, 0.02341584014892578, 0.0233154239654541, 0.023404544830322265, 0.023390207290649414, 0.023318496704101563, 0.023569728851318358, 0.02356502342224121, 0.023746591567993164, 0.02375267219543457, 0.023948768615722656, 0.02373686408996582, 0.023812095642089845, 0.023916543960571288]",tokens/s,42.49832098975303,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,879.448064,647.888896,0.0,260.046848,258.555392,s,1,7.8527939453125,7.8527939453125,0.0,7.8527939453125,7.8527939453125,7.8527939453125,7.8527939453125,[7.8527939453125],,kWh,1.5225077583333283e-05,1.672210803189071e-06,4.538892519986848e-06,2.1436180906509204e-05,,MB,1310.605312,759.037952,0.0,341.835776,317.950464,s,18,0.1996141767501831,0.011089676486121284,0.00011796392470782542,0.011055184364318849,0.011199401378631593,0.011332230377197265,0.011437722930908204,"[0.010964768409729004, 0.011021056175231934, 0.011035743713378907, 0.01102022361755371, 0.011152447700500489, 0.010991711616516114, 0.011068384170532226, 0.011464096069335938, 0.011044992446899413, 0.010987711906433105, 0.011045056343078613, 0.011077504158020019, 0.011089920043945312, 0.01112700843811035, 0.011065312385559082, 0.011034655570983886, 0.0111146240234375, 0.0113089599609375]",tokens/s,23084.532747225196,kWh,4.156658072411516e-07,4.583220738520306e-08,1.9724256045298298e-07,6.587405750793376e-07,tokens/kWh,388620360.8593076,MB,1324.593152,786.300928,0.0,369.098752,317.953024,s,18,10.26638409423828,0.5703546719021269,0.002953374153126736,0.5709329223632813,0.5733548583984375,0.5744043029785156,0.5761015637207031,"[0.5730655517578125, 0.5728255004882813, 0.571229736328125, 0.57109521484375, 0.572139404296875, 0.5660000610351562, 0.57652587890625, 0.567683837890625, 0.5707706298828125, 0.5740299072265626, 0.5716837768554688, 0.5695390625, 0.5675003051757812, 0.5672777099609375, 0.5703414916992188, 0.5653462524414062, 0.5724132690429687, 0.56691650390625]",tokens/s,110.4575856105389,kWh,2.214961834083761e-05,2.4426860927560373e-06,7.998725534778504e-06,3.259102996837216e-05,tokens/kWh,1933047.223764886,,s,1134,10.257575246810896,0.009045480817293575,0.0002845528273059231,0.008989727973937989,0.009139334201812744,0.009240865707397461,0.010659195566177383,"[0.008824831962585449, 0.008984576225280762, 0.009033727645874023, 0.009118911743164062, 0.009225312232971192, 0.009236191749572754, 0.009191424369812011, 0.009148415565490722, 0.009139967918395995, 0.009076992034912109, 0.009066495895385742, 0.00902143955230713, 0.009048064231872559, 0.009017120361328125, 0.009055871963500977, 0.009022047996520996, 0.009340415954589844, 0.009185952186584473, 0.009137696266174316, 0.009173024177551269, 0.009058112144470215, 0.009036064147949219, 0.009011391639709473, 0.008996864318847657, 0.009076767921447754, 0.009062432289123535, 0.008977375984191894, 0.008940511703491211, 0.009054112434387206, 0.009009023666381837, 0.00899443244934082, 0.008974656105041504, 0.009012928009033203, 0.00908512020111084, 0.009079551696777344, 0.00908681583404541, 0.009107263565063476, 0.009052160263061524, 0.009202719688415528, 0.009141216278076172, 0.009013248443603515, 0.009506815910339356, 0.00918553638458252, 0.009190272331237793, 0.009091744422912598, 0.00902348804473877, 0.009130111694335937, 0.009134176254272462, 0.009027584075927735, 0.009009311676025391, 0.009107647895812988, 0.009432736396789551, 0.009070591926574707, 0.009091072082519532, 0.009160223960876466, 0.009187552452087402, 0.009032064437866211, 0.00903718376159668, 0.009046079635620118, 0.008970975875854492, 0.009128767967224121, 0.008989567756652833, 0.009017375946044922, 0.008761024475097657, 0.00905401611328125, 0.009236639976501464, 0.009026111602783203, 0.008990559577941894, 0.009113120079040528, 0.00896678352355957, 0.008957344055175781, 0.008970848083496094, 0.00893289566040039, 0.008982848167419434, 0.009015456199645996, 0.009198752403259278, 0.00912384033203125, 0.009040736198425292, 0.00901529598236084, 0.00898812770843506, 0.008962656021118164, 0.009125951766967774, 0.00921340847015381, 0.011413727760314941, 0.010834112167358398, 0.009102751731872558, 0.009097824096679688, 0.009096480369567872, 0.008952639579772948, 0.00891484832763672, 0.009098272323608398, 0.008995648384094238, 0.00901750373840332, 0.008988863945007324, 0.009310015678405762, 0.009037823677062988, 0.008992768287658692, 0.009486335754394531, 0.008916383743286132, 0.008972895622253419, 0.008955488204956055, 0.008944160461425781, 0.008931167602539062, 0.008989855766296387, 0.009034079551696777, 0.008939264297485351, 0.008909600257873535, 0.008935423851013183, 0.009006208419799804, 0.008940480232238769, 0.008918975830078125, 0.008993824005126954, 0.008987615585327149, 0.008951359748840333, 0.008928704261779784, 0.008969504356384278, 0.008938591957092285, 0.008972512245178223, 0.008937151908874511, 0.009013983726501465, 0.009068544387817384, 0.009104991912841797, 0.009091232299804687, 0.00905023956298828, 0.00898198413848877, 0.008949631690979004, 0.008942079544067384, 0.00899891185760498, 0.008994815826416015, 0.009102592468261719, 0.008919424057006835, 0.008950143814086915, 0.008951264381408692, 0.008978143692016601, 0.008960831642150878, 0.008928288459777832, 0.008936544418334962, 0.008941408157348632, 0.008910304069519042, 0.00890732765197754, 0.00910483169555664, 0.009015872001647949, 0.00890880012512207, 0.008953696250915527, 0.009928416252136231, 0.009596384048461915, 0.010367679595947265, 0.009158944129943848, 0.009119487762451172, 0.009094688415527344, 0.00900547218322754, 0.009076992034912109, 0.009230175971984863, 0.00904582405090332, 0.009050527572631835, 0.008969504356384278, 0.009010016441345215, 0.008953215599060059, 0.008992608070373536, 0.009040543556213379, 0.00899449634552002, 0.00899078369140625, 0.008996992111206055, 0.00897036838531494, 0.008918304443359375, 0.009128671646118164, 0.009087200164794923, 0.008988320350646973, 0.009169024467468262, 0.009011199951171875, 0.009024576187133789, 0.009041152000427245, 0.009049792289733887, 0.008982848167419434, 0.008965824127197266, 0.008950016021728516, 0.009275263786315917, 0.009215456008911133, 0.009009568214416504, 0.008999199867248535, 0.008984064102172852, 0.008978591918945313, 0.00894364833831787, 0.00896617603302002, 0.00912179183959961, 0.00900499153137207, 0.00891443157196045, 0.009015872001647949, 0.008998208045959473, 0.008762751579284668, 0.008957056045532227, 0.008929056167602539, 0.009076576232910156, 0.009037983894348145, 0.010182271957397461, 0.012036479949951173, 0.00903769588470459, 0.009033280372619629, 0.009003583908081054, 0.008968064308166504, 0.00899289608001709, 0.00897436809539795, 0.00893881607055664, 0.008913567543029785, 0.008951807975769043, 0.008986944198608399, 0.008984512329101562, 0.008912639617919922, 0.008949664115905762, 0.008966239929199218, 0.00893449592590332, 0.008977312088012696, 0.009026592254638671, 0.009024479866027831, 0.009007424354553222, 0.008989983558654786, 0.008946080207824707, 0.00909216022491455, 0.009009663581848144, 0.009034175872802735, 0.009006431579589844, 0.009052831649780274, 0.009000672340393066, 0.008943424224853516, 0.00893996810913086, 0.00951097583770752, 0.009012895584106445, 0.008974656105041504, 0.009010687828063964, 0.009020000457763673, 0.009040800094604493, 0.008993184089660645, 0.009009152412414552, 0.008974944114685059, 0.00891808032989502, 0.008958975791931152, 0.008998847961425782, 0.008989919662475586, 0.008979104042053222, 0.009021344184875489, 0.008949983596801757, 0.008973312377929688, 0.0089584321975708, 0.00895849609375, 0.008936736106872558, 0.00899350357055664, 0.008962047576904298, 0.00894156837463379, 0.008933119773864746, 0.009144543647766113, 0.008907936096191406, 0.008962143898010254, 0.008814720153808594, 0.008988544464111329, 0.008971615791320801, 0.008968992233276367, 0.008984576225280762, 0.008964287757873534, 0.00896992015838623, 0.008986432075500489, 0.009668255805969238, 0.00896457576751709, 0.009019359588623047, 0.008980704307556151, 0.008955072402954102, 0.00899078369140625, 0.00894438362121582, 0.008927040100097657, 0.009002495765686035, 0.009011808395385743, 0.00900271987915039, 0.008961440086364746, 0.008956928253173829, 0.009063584327697754, 0.008964351654052734, 0.008974911689758301, 0.009029600143432617, 0.00912816047668457, 0.008976192474365234, 0.0089169921875, 0.00894057559967041, 0.008974528312683106, 0.008946656227111816, 0.009022624015808105, 0.009222816467285157, 0.009005151748657226, 0.008968095779418945, 0.008965855598449706, 0.008974847793579101, 0.008962016105651855, 0.008949567794799804, 0.008975872039794922, 0.009052672386169434, 0.009010687828063964, 0.009017855644226073, 0.008996864318847657, 0.009043968200683594, 0.008974464416503906, 0.00907260799407959, 0.008953760147094727, 0.00894976043701172, 0.008964159965515136, 0.008941184043884278, 0.009055808067321777, 0.008950336456298828, 0.009115103721618653, 0.009001184463500976, 0.01083033561706543, 0.011249119758605958, 0.009349663734436036, 0.00909721565246582, 0.009121248245239258, 0.00900051212310791, 0.00897532844543457, 0.00894156837463379, 0.008746175765991212, 0.009067328453063964, 0.009004544258117676, 0.009038399696350098, 0.008998496055603027, 0.00898083209991455, 0.008974111557006836, 0.008947104454040527, 0.008989503860473633, 0.008937439918518066, 0.00898256015777588, 0.00899891185760498, 0.008981663703918457, 0.009091456413269043, 0.008997183799743652, 0.008958271980285645, 0.008947104454040527, 0.008985024452209472, 0.008955679893493652, 0.008913472175598144, 0.009120512008666991, 0.008928031921386718, 0.008980607986450196, 0.008965408325195313, 0.008939711570739747, 0.008939200401306153, 0.008940383911132813, 0.008951807975769043, 0.008945792198181153, 0.008900480270385742, 0.008931520462036133, 0.0089967041015625, 0.009021792411804199, 0.008916095733642578, 0.008898752212524414, 0.00893779182434082, 0.008937472343444825, 0.008914079666137695, 0.008954719543457031, 0.00904412841796875, 0.009126815795898437, 0.008999679565429687, 0.009021280288696289, 0.00900051212310791, 0.008991519927978515, 0.009002495765686035, 0.008947903633117676, 0.008900927543640138, 0.008945407867431641, 0.008932607650756836, 0.008926207542419434, 0.008972096443176269, 0.009004575729370117, 0.009034527778625488, 0.008996735572814942, 0.00899721622467041, 0.009176735877990722, 0.008988672256469727, 0.00894156837463379, 0.008962176322937011, 0.008963968276977539, 0.00899071979522705, 0.009021663665771484, 0.008823200225830078, 0.009004447937011719, 0.009206560134887696, 0.008980607986450196, 0.008959327697753906, 0.009006752014160156, 0.009367648124694825, 0.008960607528686524, 0.008998815536499023, 0.009031488418579101, 0.009036128044128417, 0.009025664329528808, 0.009023360252380371, 0.00898812770843506, 0.008980863571166993, 0.009421088218688966, 0.009178655624389648, 0.009238816261291504, 0.00902560043334961, 0.008990880012512206, 0.009031455993652343, 0.00901692771911621, 0.009094911575317383, 0.00898464012145996, 0.009137855529785157, 0.008951040267944336, 0.008967840194702148, 0.008957951545715333, 0.00917199993133545, 0.008948672294616699, 0.008959775924682617, 0.009027839660644531, 0.009220095634460449, 0.009342464447021484, 0.009299712181091309, 0.009506943702697754, 0.009324959754943848, 0.009171456336975097, 0.00909705638885498, 0.009418335914611817, 0.009103808403015137, 0.012152671813964844, 0.010419615745544434, 0.00908694362640381, 0.009067135810852051, 0.009054559707641602, 0.009018943786621094, 0.009036992073059082, 0.00905513572692871, 0.009052063941955566, 0.009015616416931152, 0.00900483226776123, 0.009019392013549805, 0.008951807975769043, 0.009040063858032226, 0.008962047576904298, 0.008974464416503906, 0.008974016189575195, 0.009055232048034668, 0.008973312377929688, 0.008986623764038085, 0.008992192268371581, 0.009108223915100098, 0.009638272285461425, 0.009500608444213868, 0.00930403232574463, 0.009156448364257812, 0.009042271614074707, 0.009042240142822266, 0.009008831977844238, 0.00903388786315918, 0.009003583908081054, 0.008978207588195801, 0.008979647636413575, 0.009675840377807617, 0.008989760398864746, 0.008978879928588868, 0.008953248023986816, 0.0089649600982666, 0.009021280288696289, 0.008934656143188476, 0.008931903839111329, 0.008957759857177734, 0.008932031631469727, 0.008930560111999511, 0.008931936264038086, 0.008942879676818848, 0.008942208290100097, 0.008912768363952637, 0.009054431915283203, 0.008951807975769043, 0.008919039726257324, 0.008920096397399902, 0.008906911849975586, 0.008950816154479981, 0.008957728385925293, 0.008980480194091797, 0.008953856468200684, 0.008888319969177246, 0.008954943656921387, 0.008958559989929199, 0.008930720329284669, 0.008924096107482911, 0.009006815910339356, 0.008943903923034667, 0.008928256034851074, 0.008922112464904786, 0.008945664405822755, 0.009021408081054687, 0.008898591995239258, 0.008956255912780762, 0.009064127922058105, 0.009021311759948731, 0.009007200241088868, 0.009004287719726563, 0.009089792251586914, 0.008976384162902832, 0.008950976371765137, 0.00892191982269287, 0.008939167976379395, 0.008946016311645507, 0.008933568000793457, 0.008954879760742187, 0.008897279739379883, 0.008973759651184082, 0.008960639953613281, 0.008802304267883301, 0.008951040267944336, 0.00897920036315918, 0.009031167984008789, 0.009181695938110352, 0.00903171157836914, 0.00896735954284668, 0.00897923183441162, 0.008962400436401367, 0.008937312126159667, 0.008985535621643066, 0.008975168228149415, 0.008937536239624023, 0.008927040100097657, 0.008951999664306641, 0.008988287925720215, 0.008980863571166993, 0.008966143608093263, 0.00892518424987793, 0.009028703689575195, 0.00895254421234131, 0.00894153594970703, 0.008887776374816895, 0.008940287590026855, 0.009244671821594238, 0.008957951545715333, 0.011403264045715332, 0.010727423667907715, 0.009027551651000977, 0.008953887939453125, 0.008975839614868164, 0.00905065631866455, 0.008976384162902832, 0.008944992065429688, 0.008948384284973144, 0.008994048118591308, 0.00895257568359375, 0.008875231742858886, 0.008894847869873048, 0.008927935600280762, 0.008912223815917969, 0.008910880088806153, 0.008892800331115723, 0.00888764762878418, 0.008929920196533204, 0.00894105625152588, 0.00888419246673584, 0.009183808326721192, 0.010092576026916504, 0.009052063941955566, 0.008939167976379395, 0.008997247695922852, 0.008920191764831543, 0.008883071899414062, 0.008949407577514648, 0.009001119613647461, 0.008958175659179687, 0.008930815696716308, 0.0090098876953125, 0.008967935562133789, 0.008959903717041015, 0.00893126392364502, 0.009065983772277832, 0.008764575958251954, 0.008928128242492676, 0.008937439918518066, 0.008951680183410644, 0.008943936347961427, 0.008975584030151368, 0.008982303619384766, 0.008936256408691405, 0.008951871871948243, 0.009062335968017579, 0.009081855773925781, 0.00895299243927002, 0.008976192474365234, 0.008978464126586915, 0.008989695549011231, 0.008998944282531738, 0.009013888359069824, 0.009016863822937012, 0.008948224067687988, 0.009128255844116211, 0.009168224334716797, 0.008942208290100097, 0.008939295768737794, 0.00890659236907959, 0.008876447677612304, 0.008950976371765137, 0.008972736358642578, 0.008936991691589355, 0.008972895622253419, 0.00898646354675293, 0.00893619155883789, 0.008947360038757323, 0.00898252773284912, 0.009001248359680176, 0.00906214427947998, 0.010520671844482422, 0.011323007583618164, 0.010008831977844239, 0.00911580753326416, 0.009051008224487305, 0.009028703689575195, 0.008963040351867676, 0.009255680084228515, 0.009014623641967773, 0.009098048210144044, 0.009037823677062988, 0.009627743721008301, 0.009077919960021972, 0.009865983963012696, 0.00913593578338623, 0.009082431793212891, 0.009043744087219238, 0.00900592041015625, 0.00910540771484375, 0.00899449634552002, 0.009090559959411621, 0.008984992027282715, 0.009000672340393066, 0.00900716781616211, 0.008927871704101562, 0.009064191818237305, 0.008978688240051269, 0.009027104377746582, 0.008763872146606445, 0.008894463539123536, 0.008941696166992188, 0.008990816116333008, 0.008976160049438476, 0.009122879981994628, 0.008956864356994628, 0.00900476837158203, 0.008965408325195313, 0.0092293119430542, 0.00901084804534912, 0.011481311798095703, 0.010864768028259277, 0.009003007888793945, 0.00898252773284912, 0.008976384162902832, 0.008994815826416015, 0.008916768074035644, 0.008913120269775391, 0.00899020767211914, 0.008956064224243164, 0.009100768089294433, 0.009003904342651367, 0.008953632354736328, 0.008941887855529785, 0.008933440208435058, 0.008889792442321777, 0.00901750373840332, 0.008947967529296875, 0.008937439918518066, 0.008906047821044922, 0.00890931224822998, 0.008984800338745116, 0.00905840015411377, 0.008966272354125976, 0.008961824417114258, 0.008972415924072266, 0.009519136428833008, 0.008966079711914062, 0.009150367736816406, 0.008965632438659669, 0.00892569637298584, 0.009193792343139648, 0.00898534393310547, 0.009143424034118653, 0.008926176071166992, 0.009052576065063477, 0.008988351821899413, 0.008931551933288574, 0.008944160461425781, 0.008916255950927734, 0.009183967590332031, 0.009033056259155273, 0.009034399986267089, 0.009080575942993165, 0.00900928020477295, 0.00893660831451416, 0.008913887977600097, 0.009074560165405273, 0.009048031806945802, 0.008964256286621093, 0.008955712318420411, 0.008935711860656738, 0.00870809555053711, 0.008970208168029785, 0.009154751777648925, 0.00903337574005127, 0.008949952125549316, 0.008976223945617676, 0.009023648262023926, 0.009003168106079101, 0.008955103874206543, 0.00916329574584961, 0.008993056297302246, 0.008963904380798339, 0.009027584075927735, 0.009175104141235351, 0.00902348804473877, 0.009000896453857422, 0.009025535583496093, 0.00901529598236084, 0.008965279579162598, 0.009010016441345215, 0.009197216033935546, 0.00908460807800293, 0.009020064353942871, 0.009047231674194335, 0.009030464172363282, 0.009048064231872559, 0.008988767623901368, 0.009010720252990722, 0.009023872375488282, 0.008993087768554687, 0.008984255790710449, 0.00905628776550293, 0.009007072448730469, 0.009084223747253418, 0.008962752342224121, 0.008989983558654786, 0.008962559700012206, 0.009025759696960449, 0.009107456207275391, 0.008982111930847168, 0.00897475242614746, 0.008961088180541992, 0.008921664237976074, 0.008949567794799804, 0.008968159675598145, 0.009095775604248046, 0.009005056381225587, 0.008933600425720214, 0.009045439720153809, 0.009527647972106933, 0.0098853759765625, 0.009118016242980958, 0.009115455627441406, 0.0090066556930542, 0.009064448356628419, 0.008947680473327637, 0.00891750431060791, 0.008892160415649414, 0.00895631980895996, 0.009074687957763672, 0.008929280281066895, 0.008973983764648438, 0.00899897575378418, 0.008844415664672851, 0.009003904342651367, 0.00902348804473877, 0.009000960350036622, 0.009119744300842286, 0.00906611156463623, 0.008969792366027832, 0.008961952209472657, 0.009005184173583985, 0.008962847709655762, 0.0089169921875, 0.008915200233459472, 0.00897548770904541, 0.009067296028137207, 0.008885951995849609, 0.008915103912353516, 0.008949215888977052, 0.008935968399047851, 0.008904704093933105, 0.008924544334411621, 0.00892512035369873, 0.009097920417785645, 0.009000767707824707, 0.008962176322937011, 0.009011263847351075, 0.008916192054748535, 0.008909536361694335, 0.008922304153442383, 0.008966527938842773, 0.008919551849365234, 0.008965439796447754, 0.008934016227722169, 0.008941632270812988, 0.008994815826416015, 0.009008416175842285, 0.008960736274719239, 0.008891839981079102, 0.008983200073242187, 0.008961695671081543, 0.008972543716430664, 0.00899071979522705, 0.00896121597290039, 0.00893830394744873, 0.009015328407287597, 0.009521120071411133, 0.009004256248474122, 0.00898742389678955, 0.009678848266601562, 0.009066207885742188, 0.009017536163330078, 0.008986399650573731, 0.008980799674987792, 0.008964032173156737, 0.008942815780639648, 0.008952704429626465, 0.009029600143432617, 0.009158656120300293, 0.008994848251342773, 0.009084320068359375, 0.008958239555358887, 0.009144703865051269, 0.009002592086791992, 0.009045503616333007, 0.00869007968902588, 0.008966912269592285, 0.008958208084106445, 0.009034496307373047, 0.008993023872375489, 0.008971487998962403, 0.00901200008392334, 0.009226176261901855, 0.009102751731872558, 0.009045536041259766, 0.009063072204589843, 0.009041952133178712, 0.009003199577331544, 0.008994815826416015, 0.009093119621276855, 0.009004287719726563, 0.009003487586975098, 0.009044256210327149, 0.008960000038146973, 0.008999135971069336, 0.008988448143005371, 0.00894976043701172, 0.008929311752319336, 0.008899871826171874, 0.0088787841796875, 0.00893280029296875, 0.00893836784362793, 0.00910431957244873, 0.008991168022155762, 0.008986944198608399, 0.00901529598236084, 0.008912320137023927, 0.008915648460388184, 0.00888003158569336, 0.009077055931091309, 0.008965567588806152, 0.008933440208435058, 0.009074848175048828, 0.0091843843460083, 0.009114815711975097, 0.009121472358703614, 0.009260640144348145, 0.009039615631103516, 0.00903545570373535, 0.008983712196350097, 0.009059200286865235, 0.009036128044128417, 0.009015904426574708, 0.009033280372619629, 0.009052191734313965, 0.009025312423706055, 0.008941984176635743, 0.008956128120422364, 0.00894156837463379, 0.008943615913391113, 0.00891062355041504, 0.00887551975250244, 0.008956640243530274, 0.008947487831115722, 0.008931424140930176, 0.008929247856140137, 0.008947039604187012, 0.008974176406860351, 0.00883737564086914, 0.009066656112670898, 0.009120767593383788, 0.009184127807617187, 0.009070624351501464, 0.009064576148986817, 0.008966239929199218, 0.008988544464111329, 0.008939711570739747, 0.008933055877685546, 0.009017375946044922, 0.009011296272277832, 0.009005056381225587, 0.00897875213623047, 0.009108799934387207, 0.009075072288513184, 0.00897993564605713, 0.008927840232849121, 0.008927103996276855, 0.00898464012145996, 0.0089683837890625, 0.008959808349609376, 0.009050111770629882, 0.008953856468200684, 0.008918560028076172, 0.00893177604675293, 0.009176671981811524, 0.009113823890686035, 0.009167072296142578, 0.009745951652526855, 0.00907868766784668, 0.009034303665161133, 0.009023839950561524, 0.009004287719726563, 0.00905401611328125, 0.00897056007385254, 0.008985983848571777, 0.00892848014831543, 0.00915014362335205, 0.008945664405822755, 0.0090316801071167, 0.009099264144897461, 0.009058112144470215, 0.008925375938415527, 0.00901046371459961, 0.0089966402053833, 0.009062432289123535, 0.009046624183654785, 0.009093440055847168, 0.009142271995544434, 0.009105440139770507, 0.009035167694091796, 0.009077312469482422, 0.00903987216949463, 0.009051872253417968, 0.009114944458007812, 0.009048895835876465, 0.009013407707214356, 0.009246303558349609, 0.009034144401550292, 0.009193471908569336, 0.009050111770629882, 0.009025535583496093, 0.008706591606140136, 0.008992480278015138, 0.008982975959777833, 0.008976320266723632, 0.008913023948669434, 0.008904576301574707, 0.008931327819824218, 0.00894268798828125, 0.008929920196533204, 0.008894399642944336, 0.00896025562286377, 0.008922528266906739, 0.008917087554931641, 0.008989279747009277, 0.008960160255432129, 0.008994784355163574, 0.008942655563354492, 0.00898745632171631, 0.008883584022521972, 0.008944607734680176, 0.008929183959960937, 0.008904447555541992, 0.00890060806274414, 0.008921119689941406, 0.00898249626159668, 0.008955936431884765, 0.008933216094970704, 0.009012607574462891, 0.008951744079589844, 0.008905471801757813, 0.008933440208435058, 0.00890454387664795, 0.008963264465332032, 0.008948703765869141, 0.008941247940063476, 0.009007424354553222, 0.0090316801071167, 0.00900438404083252, 0.008923808097839355, 0.009016511917114257, 0.00898953628540039, 0.008975392341613769, 0.008969375610351563, 0.008879903793334962, 0.00892470359802246, 0.008940064430236816, 0.008972224235534667, 0.00899071979522705, 0.008895711898803711, 0.00892188835144043, 0.008927424430847168, 0.009041728019714356, 0.00898252773284912, 0.009041631698608398, 0.009078144073486328, 0.009072768211364745, 0.009007904052734375, 0.009732383728027344, 0.008973888397216797, 0.008937408447265625, 0.00893564796447754, 0.008953184127807616, 0.008976736068725587, 0.00881049633026123, 0.009014464378356934, 0.008989503860473633, 0.00897433567047119, 0.00903104019165039, 0.009011839866638184, 0.009046015739440917, 0.008958239555358887, 0.008937184333801269, 0.009471391677856445, 0.009120351791381836, 0.009108896255493165, 0.00955247974395752, 0.011686240196228027, 0.00911292839050293, 0.009060480117797852, 0.009119359970092774, 0.009084671974182128, 0.008992959976196289, 0.008935104370117188, 0.009060576438903808, 0.008960736274719239, 0.008945664405822755, 0.009012576103210448, 0.009005215644836426, 0.00899839973449707, 0.009081503868103028, 0.009031295776367188, 0.009036224365234375, 0.009068832397460938, 0.009162816047668457, 0.009035327911376952, 0.008922495841979981, 0.009034751892089844, 0.009011199951171875, 0.009294976234436035, 0.00907875156402588, 0.009234463691711426, 0.009284704208374024, 0.008982432365417481, 0.008993791580200196, 0.008987039566040038, 0.009011679649353028, 0.009027584075927735, 0.009003007888793945, 0.008952927589416505, 0.008915871620178222, 0.008947423934936524, 0.009104736328125, 0.008950719833374024, 0.008920991897583008, 0.008939807891845702, 0.008976192474365234, 0.008933695793151855, 0.008958911895751953, 0.008948479652404785, 0.00902143955230713, 0.00905583953857422, 0.009096799850463867, 0.009040191650390625, 0.009044095993041993, 0.00892147159576416, 0.008919039726257324, 0.008717984199523925, 0.008907584190368653, 0.008931327819824218, 0.008945664405822755, 0.008928895950317383, 0.008945759773254394, 0.008988960266113281, 0.00893289566040039, 0.008913375854492188, 0.008949440002441406, 0.008976703643798828, 0.00897433567047119, 0.008907808303833007, 0.008930463790893555, 0.008951616287231446, 0.008980480194091797, 0.008999199867248535, 0.009090784072875977, 0.009136128425598144, 0.009082880020141602, 0.009105536460876465, 0.009150336265563965, 0.009134336471557617, 0.009151264190673828, 0.009005439758300782, 0.008935392379760742, 0.008935711860656738, 0.008984928131103515, 0.009000224113464355, 0.008995552062988282, 0.009038847923278808, 0.009114720344543458, 0.009035072326660155, 0.009027935981750489, 0.008984224319458008, 0.00895792007446289, 0.008960864067077638, 0.008955360412597656, 0.008993087768554687, 0.008952896118164062, 0.009015263557434082, 0.008971232414245606, 0.009009152412414552, 0.009047103881835937, 0.00892204761505127, 0.008978143692016601, 0.009015168190002441, 0.008986271858215333, 0.008938240051269532, 0.00914579200744629, 0.008988608360290527, 0.009040575981140136, 0.009027520179748534, 0.008955904006958008, 0.008947744369506835, 0.008969311714172363, 0.008954815864562989, 0.008964032173156737, 0.00898252773284912, 0.00894976043701172, 0.008927136421203614, 0.009053471565246583, 0.009020159721374512]",tokens/s,110.55244272788165,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,879.788032,662.56896,0.0,260.046848,258.555392,s,1,7.6580693359375,7.6580693359375,0.0,7.6580693359375,7.6580693359375,7.6580693359375,7.6580693359375,[7.6580693359375],,kWh,1.546157191250283e-05,1.6983004759740771e-06,4.540003632008682e-06,2.169987602048559e-05,,MB,1341.857792,759.037952,0.0,341.835776,317.950464,s,14,0.19417334270477296,0.013869524478912354,0.00010517210179652958,0.013833615779876709,0.01401504602432251,0.014100852346420289,0.01414809329032898,"[0.013838239669799805, 0.013851840019226075, 0.014159903526306153, 0.013826239585876464, 0.013828991889953613, 0.01382083225250244, 0.013797727584838868, 0.014069055557250977, 0.013889023780822754, 0.013791104316711426, 0.013865728378295899, 0.013808032035827637, 0.01384284782409668, 0.01378377628326416]",tokens/s,18457.734465895366,kWh,4.0430467469416875e-07,4.458645456758525e-08,2.1527333319113485e-07,6.641644624528889e-07,tokens/kWh,385446699.5336397,MB,1381.986304,786.300928,0.0,369.098752,317.953024,s,14,10.099852355957031,0.7214180254255023,0.0033747384144426156,0.719999267578125,0.7265133544921875,0.7269929077148437,0.7273353002929688,"[0.7241568603515625, 0.7274208984375, 0.7212330322265625, 0.7190859985351562, 0.726762451171875, 0.7175048828125, 0.7199035034179687, 0.7200950317382813, 0.7180828857421875, 0.7185093994140626, 0.7182341918945313, 0.7239569702148437, 0.71897412109375, 0.72593212890625]",tokens/s,87.32800925350006,kWh,2.0938425898222758e-05,2.309153294804496e-06,7.70897903795113e-06,3.095655823097839e-05,tokens/kWh,2035109.9605432095,,s,882,10.09300582027433,0.011443317256546886,0.00018695391206423247,0.011403183937072753,0.011536511898040771,0.011651961660385131,0.012133155317306516,"[0.011290623664855956, 0.011498527526855469, 0.011461600303649903, 0.011411135673522949, 0.011386879920959473, 0.011376447677612304, 0.01141379165649414, 0.011340000152587891, 0.01142579174041748, 0.011341312408447265, 0.012181599617004395, 0.012630784034729003, 0.011945631980895996, 0.011491552352905273, 0.011462431907653808, 0.011611392021179199, 0.011551487922668456, 0.011409695625305177, 0.011421407699584961, 0.011526304244995117, 0.011450464248657227, 0.0113918399810791, 0.011434911727905273, 0.011412544250488281, 0.011512255668640137, 0.011666272163391113, 0.011426976203918457, 0.011432448387145995, 0.011413439750671387, 0.011359328269958497, 0.011579456329345704, 0.011827872276306153, 0.011477248191833496, 0.011428000450134278, 0.011437888145446778, 0.01194752025604248, 0.0113951358795166, 0.01137507152557373, 0.011429887771606445, 0.011653120040893555, 0.011425567626953124, 0.011474271774291993, 0.01146348762512207, 0.011358271598815918, 0.011373760223388672, 0.011434528350830078, 0.011416000366210937, 0.011411295890808106, 0.011446271896362305, 0.011376799583435059, 0.011392864227294922, 0.011393152236938476, 0.01138649559020996, 0.011380191802978515, 0.01136515235900879, 0.011400544166564941, 0.011467424392700196, 0.011405055999755859, 0.011351455688476562, 0.011387104034423829, 0.011428480148315429, 0.01143120002746582, 0.01148134422302246, 0.011180255889892578, 0.011547807693481446, 0.01144217586517334, 0.011501536369323731, 0.011446944236755371, 0.01141759967803955, 0.011336000442504882, 0.01142137622833252, 0.011439871788024903, 0.011416959762573241, 0.01155571174621582, 0.01144422435760498, 0.011429887771606445, 0.011392704010009766, 0.01142518424987793, 0.011420576095581055, 0.011444576263427734, 0.011392448425292969, 0.01136473560333252, 0.011433119773864747, 0.011430591583251952, 0.011376543998718262, 0.01139516830444336, 0.011378848075866699, 0.01139846420288086, 0.011554752349853516, 0.011436575889587403, 0.011386688232421876, 0.011501824378967285, 0.011479040145874024, 0.011530240058898926, 0.011503616333007812, 0.011546303749084472, 0.012261759757995606, 0.013305791854858398, 0.013035519599914551, 0.011883968353271485, 0.011532032012939453, 0.011446335792541503, 0.011430656433105469, 0.011413760185241699, 0.011387935638427734, 0.011393535614013671, 0.011405535697937012, 0.011464703559875488, 0.011777855873107911, 0.01152019214630127, 0.011472895622253418, 0.01190841579437256, 0.011489983558654784, 0.011498559951782226, 0.011454751968383789, 0.011399519920349122, 0.012005727767944336, 0.011514047622680663, 0.011587583541870117, 0.011413408279418944, 0.011389120101928711, 0.011368320465087891, 0.01138486385345459, 0.011372320175170899, 0.01143398380279541, 0.011584768295288086, 0.0114585599899292, 0.011587776184082032, 0.011585023880004883, 0.011610431671142578, 0.011542367935180663, 0.011478816032409668, 0.011423871994018555, 0.011479455947875977, 0.011409248352050781, 0.011448320388793945, 0.01145036792755127, 0.011460512161254884, 0.011398880004882813, 0.011405695915222168, 0.01141759967803955, 0.011366399765014648, 0.0113536958694458, 0.011323519706726074, 0.011379103660583496, 0.011479264259338378, 0.011426560401916504, 0.01141648006439209, 0.01212179183959961, 0.011392607688903808, 0.011411647796630859, 0.01146729564666748, 0.011511808395385742, 0.011527392387390136, 0.011567071914672851, 0.011600704193115234, 0.011429951667785645, 0.011415488243103027, 0.011333727836608886, 0.011358112335205077, 0.011378687858581543, 0.0114782075881958, 0.011434783935546875, 0.011421728134155273, 0.011407360076904297, 0.01145644760131836, 0.01139414405822754, 0.011398112297058106, 0.011384991645812989, 0.011392448425292969, 0.011350432395935058, 0.011365792274475098, 0.011321503639221192, 0.011564991950988769, 0.011371007919311523, 0.0113887996673584, 0.011415264129638673, 0.01137500762939453, 0.011380224227905274, 0.011377152442932128, 0.011425984382629395, 0.011384256362915039, 0.011395456314086914, 0.011378463745117187, 0.011399392127990723, 0.011364192008972167, 0.011428000450134278, 0.01143183994293213, 0.011446368217468262, 0.011593119621276855, 0.011704768180847168, 0.011485088348388671, 0.011405887603759766, 0.011466655731201172, 0.011501055717468261, 0.011385120391845704, 0.01139247989654541, 0.011364895820617676, 0.01136796760559082, 0.01139065647125244, 0.011416352272033692, 0.011771231651306153, 0.011467424392700196, 0.011400287628173827, 0.011363327980041504, 0.011376543998718262, 0.011370400428771972, 0.011362144470214844, 0.011391519546508789, 0.011363295555114747, 0.011391743659973145, 0.011511327743530274, 0.011413984298706055, 0.011409760475158692, 0.011443424224853515, 0.011375040054321288, 0.011397120475769042, 0.0114585599899292, 0.01140121555328369, 0.011414752006530762, 0.011434271812438964, 0.01137657642364502, 0.011376704216003418, 0.011399680137634278, 0.011333439826965332, 0.011331775665283202, 0.011364319801330566, 0.011321727752685546, 0.011450016021728515, 0.0113438720703125, 0.01135206413269043, 0.011310879707336425, 0.011343551635742187, 0.011378527641296387, 0.011372384071350097, 0.011375455856323243, 0.011378687858581543, 0.011421664237976074, 0.011382335662841797, 0.011465184211730957, 0.011390848159790039, 0.0115283203125, 0.011372735977172851, 0.011366208076477051, 0.01132307243347168, 0.011336000442504882, 0.011487232208251954, 0.011363391876220702, 0.011314111709594727, 0.011331583976745606, 0.0113603515625, 0.011355487823486329, 0.01115135955810547, 0.011401311874389648, 0.011544192314147949, 0.01143836784362793, 0.01140940761566162, 0.011425919532775879, 0.011484959602355956, 0.01142416000366211, 0.011423423767089843, 0.012073087692260741, 0.013186559677124024, 0.013435263633728028, 0.011816160202026367, 0.011825920104980469, 0.01168182373046875, 0.011590911865234374, 0.011574015617370606, 0.011759615898132325, 0.011507712364196777, 0.011423968315124511, 0.011435680389404297, 0.011427071571350098, 0.011489248275756836, 0.011530847549438476, 0.011460927963256836, 0.011421695709228515, 0.011423232078552246, 0.011366911888122559, 0.011472064018249512, 0.01152079963684082, 0.01150774383544922, 0.01147475242614746, 0.011447680473327638, 0.011418432235717773, 0.01139635181427002, 0.01189350414276123, 0.011487168312072754, 0.011412863731384277, 0.011375519752502441, 0.011377696037292481, 0.01138764762878418, 0.011374591827392578, 0.01142198371887207, 0.0115032958984375, 0.01137667179107666, 0.011396415710449219, 0.011411456108093262, 0.011350720405578614, 0.011448479652404786, 0.011421536445617676, 0.011402751922607422, 0.011360063552856445, 0.011369152069091797, 0.011384767532348633, 0.011426079750061035, 0.011574560165405274, 0.011483648300170898, 0.011472960472106934, 0.011400320053100587, 0.011369279861450195, 0.01132748794555664, 0.01135427188873291, 0.011324799537658692, 0.011204607963562012, 0.011403264045715332, 0.011370495796203613, 0.011433728218078613, 0.011383296012878418, 0.011447839736938477, 0.011389311790466308, 0.01140892791748047, 0.011405247688293457, 0.011370464324951172, 0.011411871910095215, 0.01140124797821045, 0.01140118408203125, 0.011423744201660157, 0.011409503936767578, 0.011390815734863282, 0.011322688102722168, 0.011377087593078613, 0.01132767963409424, 0.011360511779785157, 0.011351936340332032, 0.011355584144592284, 0.011358271598815918, 0.011380224227905274, 0.011420672416687011, 0.011389151573181152, 0.011454239845275879, 0.011345919609069824, 0.011381888389587402, 0.011375552177429199, 0.011388575553894043, 0.011415840148925781, 0.011399168014526367, 0.011386879920959473, 0.011347135543823243, 0.011336095809936523, 0.011399231910705566, 0.011364383697509766, 0.011388447761535645, 0.011385248184204102, 0.011339584350585937, 0.011346048355102539, 0.01135865592956543, 0.011400927543640136, 0.011386624336242676, 0.011425888061523438, 0.011338175773620606, 0.01133078384399414, 0.011346719741821288, 0.01140940761566162, 0.011397024154663087, 0.011364447593688964, 0.011462656021118164, 0.0113787202835083, 0.011348095893859864, 0.011378656387329102, 0.011376031875610352, 0.01137507152557373, 0.011368608474731446, 0.011536224365234375, 0.011423744201660157, 0.011320639610290527, 0.011346624374389649, 0.01119372844696045, 0.011350560188293456, 0.011387328147888183, 0.011421279907226562, 0.01137990379333496, 0.011378815650939941, 0.011394816398620606, 0.011453632354736329, 0.01140681552886963, 0.011462719917297363, 0.01143836784362793, 0.011423040390014648, 0.011427712440490723, 0.011381823539733887, 0.01151142406463623, 0.011333696365356445, 0.011429439544677735, 0.011444448471069336, 0.011401503562927247, 0.011382783889770508, 0.011443648338317871, 0.011401791572570801, 0.011337727546691894, 0.011386879920959473, 0.011390975952148438, 0.01188684844970703, 0.011424736022949218, 0.011380576133728027, 0.011413599967956543, 0.011370976448059083, 0.011351872444152832, 0.011579808235168456, 0.011397600173950195, 0.011414752006530762, 0.011479071617126465, 0.011432703971862792, 0.011343328475952149, 0.011361696243286134, 0.011336288452148437, 0.011376864433288574, 0.011317248344421387, 0.011326496124267577, 0.01132857608795166, 0.011392000198364258, 0.011389023780822754, 0.01137718391418457, 0.011366687774658202, 0.011388928413391113, 0.011383872032165527, 0.01134102439880371, 0.011378399848937988, 0.011432095527648926, 0.011491168022155761, 0.011499775886535644, 0.011438143730163575, 0.01151148796081543, 0.01140121555328369, 0.011534336090087891, 0.011472288131713868, 0.011483839988708496, 0.011581119537353515, 0.01153654384613037, 0.011493696212768554, 0.011325663566589355, 0.011477824211120605, 0.011433823585510254, 0.011392704010009766, 0.011419967651367187, 0.011318559646606446, 0.011346303939819335, 0.011362208366394042, 0.011424127578735352, 0.011520064353942872, 0.011534239768981934, 0.011698431968688965, 0.011540608406066894, 0.011460479736328124, 0.011472448348999024, 0.011402591705322266, 0.011414463996887208, 0.011603967666625976, 0.011662816047668457, 0.012089568138122559, 0.011737600326538086, 0.01139187240600586, 0.011346943855285644, 0.011322784423828124, 0.01151039981842041, 0.011386367797851562, 0.011331999778747558, 0.01132748794555664, 0.011338815689086915, 0.011379743576049805, 0.011409184455871583, 0.01146399974822998, 0.0113754243850708, 0.011325440406799316, 0.011390080451965332, 0.011303487777709962, 0.011352383613586425, 0.011520000457763671, 0.011382368087768555, 0.011372960090637207, 0.011345919609069824, 0.01132953643798828, 0.011453503608703614, 0.01138150405883789, 0.01140332794189453, 0.01139129638671875, 0.011417440414428711, 0.011343839645385743, 0.011382783889770508, 0.011339936256408691, 0.01136355209350586, 0.011377280235290527, 0.011363360404968261, 0.011377632141113281, 0.011347968101501465, 0.011428159713745117, 0.011426591873168945, 0.011495936393737792, 0.011402751922607422, 0.011336895942687988, 0.011370207786560059, 0.011318431854248048, 0.011346816062927246, 0.011187552452087402, 0.011413375854492187, 0.011431903839111328, 0.011393535614013671, 0.01140940761566162, 0.011391072273254395, 0.011347135543823243, 0.011442912101745606, 0.01140940761566162, 0.011374591827392578, 0.011354111671447753, 0.011380224227905274, 0.01135638427734375, 0.011358240127563476, 0.011374848365783691, 0.01141379165649414, 0.011365471839904785, 0.011432191848754884, 0.011350015640258788, 0.011329919815063476, 0.011371935844421387, 0.011290271759033204, 0.011469759941101075, 0.01150921630859375, 0.011551263809204102, 0.01144761562347412, 0.011504608154296875, 0.011409312248229981, 0.01138054370880127, 0.011406847953796387, 0.011388928413391113, 0.011375103950500488, 0.011393247604370117, 0.011347488403320312, 0.011757504463195801, 0.011420096397399902, 0.011376511573791505, 0.011371871948242188, 0.011401887893676757, 0.011393024444580077, 0.01136844825744629, 0.011595199584960938, 0.011370976448059083, 0.011353856086730956, 0.01143228816986084, 0.011367615699768066, 0.011385663986206055, 0.0113438720703125, 0.011355551719665527, 0.011364959716796874, 0.011395071983337402, 0.01135747241973877, 0.011365440368652344, 0.01134284782409668, 0.011412128448486328, 0.011325407981872558, 0.011325152397155761, 0.01137059211730957, 0.01135638427734375, 0.011323295593261718, 0.01136627197265625, 0.011303135871887207, 0.011337375640869141, 0.011173727989196778, 0.01134006404876709, 0.011404416084289552, 0.011369215965270995, 0.01139465618133545, 0.01142416000366211, 0.011431936264038087, 0.011397120475769042, 0.011419648170471192, 0.01135206413269043, 0.01138105583190918, 0.011363360404968261, 0.011387552261352539, 0.011399168014526367, 0.011408960342407227, 0.011847552299499511, 0.011935839653015137, 0.011524543762207032, 0.01137782382965088, 0.011360960006713867, 0.011374784469604491, 0.01136019229888916, 0.011335743904113769, 0.011373920440673828, 0.01135478401184082, 0.011382783889770508, 0.011353568077087402, 0.011372256278991699, 0.011366815567016601, 0.011371935844421387, 0.011393471717834473, 0.011362784385681153, 0.01143331241607666, 0.011332351684570313, 0.01133568000793457, 0.011479040145874024, 0.011411135673522949, 0.011409728050231934, 0.011681119918823242, 0.011471808433532715, 0.01140662384033203, 0.011346112251281739, 0.011354240417480468, 0.011407391548156738, 0.011350111961364746, 0.011382783889770508, 0.011358400344848633, 0.011370304107666016, 0.01136838436126709, 0.011329471588134766, 0.011382911682128906, 0.011358016014099121, 0.01143017578125, 0.011400159835815429, 0.011348544120788574, 0.011337887763977051, 0.011363807678222657, 0.011362208366394042, 0.011405183792114257, 0.011349087715148925, 0.011335200309753417, 0.011325887680053711, 0.011304160118103028, 0.011386591911315918, 0.011411456108093262, 0.01141055965423584, 0.011403424263000488, 0.011414239883422852, 0.011343520164489746, 0.011360608100891113, 0.011388128280639648, 0.011402015686035155, 0.01136415958404541, 0.011362239837646485, 0.011370623588562011, 0.011397279739379883, 0.011323360443115235, 0.011421695709228515, 0.011378687858581543, 0.011429920196533203, 0.011390048027038574, 0.01138368034362793, 0.011396160125732422, 0.011377375602722168, 0.011352160453796386, 0.011337823867797851, 0.011413536071777343, 0.011425951957702636, 0.011423583984375, 0.011419648170471192, 0.011337727546691894, 0.011354047775268555, 0.011448384284973145, 0.011413056373596192, 0.011460895538330079, 0.011485343933105468, 0.011462656021118164, 0.011393280029296874, 0.011353856086730956, 0.011398528099060059, 0.011411744117736817, 0.01142569637298584, 0.011411904335021972, 0.011370495796203613, 0.011445247650146484, 0.011407615661621094, 0.01141750431060791, 0.011397631645202636, 0.011383456230163574, 0.011327168464660645, 0.011400832176208495, 0.011331968307495117, 0.011354111671447753, 0.011382399559020996, 0.0113503999710083, 0.011384832382202148, 0.011388575553894043, 0.011376480102539062, 0.011360544204711914, 0.011477215766906738, 0.011431936264038087, 0.01133897590637207, 0.01137123203277588, 0.011466815948486328, 0.011411456108093262, 0.011331583976745606, 0.011138079643249511, 0.01137939167022705, 0.01134620761871338, 0.011366399765014648, 0.011390975952148438, 0.013285440444946289, 0.012300095558166503, 0.011496640205383301, 0.011445183753967286, 0.011436320304870606, 0.011412832260131836, 0.01148761558532715, 0.011436032295227052, 0.011429984092712403, 0.011503520011901856, 0.011507648468017578, 0.011677760124206543, 0.011617568016052246, 0.011588319778442383, 0.01148908805847168, 0.011472288131713868, 0.01143887996673584, 0.011419648170471192, 0.011446271896362305, 0.011655167579650879, 0.011386783599853515, 0.011380831718444824, 0.011468352317810059, 0.011372287750244141, 0.011374815940856934, 0.011395680427551269, 0.011478912353515626, 0.011403103828430176, 0.011402655601501464, 0.011424415588378907, 0.011560192108154297, 0.011598688125610351, 0.011454208374023438, 0.011484640121459961, 0.011495488166809082, 0.011399904251098632, 0.011396863937377929, 0.011380895614624024, 0.011438176155090332, 0.01142182445526123, 0.011431039810180664, 0.011410176277160644, 0.01140121555328369, 0.011437088012695313, 0.011412384033203125, 0.011396703720092773, 0.011402048110961915, 0.01136911964416504, 0.011473216056823731, 0.011430591583251952, 0.011517951965332032, 0.011401151657104492, 0.011360608100891113, 0.011433631896972656, 0.011423808097839355, 0.011436032295227052, 0.011388928413391113, 0.011445504188537597, 0.011205344200134278, 0.011454336166381836, 0.0114236478805542, 0.01137001609802246, 0.011391679763793945, 0.011382783889770508, 0.011354304313659669, 0.011390784263610839, 0.011386879920959473, 0.011384223937988281, 0.011405023574829101, 0.011334527969360351, 0.011374239921569823, 0.01138697624206543, 0.011454527854919433, 0.011411647796630859, 0.011405311584472656, 0.011429344177246094, 0.011303456306457519, 0.01144985580444336, 0.011365056037902832, 0.011524160385131836, 0.011422687530517579, 0.011391776084899902, 0.011402560234069823, 0.011357024192810058, 0.011392864227294922, 0.011362591743469238, 0.01140492820739746, 0.011407135963439941, 0.011364064216613769, 0.01136684799194336, 0.011388319969177246, 0.011467359542846679, 0.01134716796875, 0.011362591743469238, 0.011350687980651855, 0.011343008041381835, 0.011363167762756348, 0.01136025619506836, 0.011380736351013183, 0.011386176109313965, 0.011363072395324707, 0.011408351898193359, 0.011518495559692383, 0.01154412841796875, 0.01136729621887207, 0.012017215728759766, 0.011440447807312012, 0.011460063934326171, 0.011456255912780761, 0.011377535820007324, 0.011327168464660645, 0.011440383911132812, 0.011388895988464355, 0.01140544033050537, 0.011371583938598632, 0.011402079582214356, 0.011382880210876465, 0.011397120475769042, 0.011370207786560059, 0.011434271812438964, 0.01140336036682129, 0.011218879699707031, 0.011379008293151855, 0.011460288047790528, 0.011828543663024903, 0.011530112266540528, 0.01136518383026123, 0.01135580825805664, 0.011445695877075196, 0.011629471778869629, 0.011527584075927735, 0.011506336212158203, 0.011466976165771484, 0.011422752380371094, 0.011563712120056153, 0.011498559951782226, 0.01144108772277832, 0.011476448059082031, 0.011457056045532226, 0.01143398380279541, 0.011402432441711425, 0.011457280158996582, 0.011454431533813477, 0.011929408073425293, 0.01139247989654541, 0.011428544044494629, 0.011408672332763672, 0.011434176445007323, 0.011479328155517578, 0.011538816452026366, 0.011553983688354492, 0.01171504020690918, 0.011503647804260254, 0.011423551559448242, 0.011463232040405273, 0.011395008087158202, 0.01140652847290039, 0.011460831642150879, 0.011509632110595703, 0.011508159637451172, 0.011596063613891601, 0.011497440338134766, 0.011448063850402833, 0.011443519592285156, 0.011371487617492676, 0.011383040428161621, 0.011408960342407227, 0.011436223983764648, 0.011457728385925294, 0.0115513916015625, 0.011970720291137694, 0.012089056015014648, 0.011980416297912598, 0.011600543975830078, 0.011629952430725097, 0.011626463890075683, 0.011558719635009766, 0.011533151626586914, 0.011601087570190429, 0.011489855766296386, 0.01150592041015625, 0.011488639831542968, 0.011425472259521485, 0.011485856056213379]",tokens/s,87.38724773429537,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2228.195328,2558.394368,0.0,2155.872256,2032.413184,s,1,8.8895615234375,8.8895615234375,0.0,8.8895615234375,8.8895615234375,8.8895615234375,8.8895615234375,[8.8895615234375],,kWh,5.174910428333987e-05,5.700924598974701e-06,1.6621957741991267e-05,7.407198662430584e-05,,MB,2251.853824,2835.218432,0.0,2418.016256,2280.154112,s,10,0.9366658630371092,0.09366658630371094,8.042570404870768e-05,0.09365460586547852,0.09377081909179688,0.0937755844116211,0.09377939666748046,"[0.09364790344238282, 0.0937803497314453, 0.09362393951416016, 0.09369862365722656, 0.09375603485107421, 0.09358163452148438, 0.09362413024902344, 0.09352217864990234, 0.09376976013183594, 0.09366130828857422]",tokens/s,2733.098430318877,kWh,2.7832190508332942e-06,3.0675981385820323e-07,1.8559062466286114e-06,4.945885111320109e-06,tokens/kWh,51760199.48665384,MB,2262.106112,2919.104512,0.0,2501.902336,2389.055488,s,10,24.521555908203126,2.4521555908203125,0.006199772254977471,2.4546346435546873,2.4580506835937497,2.458599560546875,2.459038662109375,"[2.456687744140625, 2.445426513671875, 2.4528740234375, 2.456395263671875, 2.4406396484375, 2.451732177734375, 2.4440927734375, 2.456630615234375, 2.4591484375, 2.4579287109375]",tokens/s,25.691681325541335,kWh,7.129616252875116e-05,7.863884643074794e-06,3.3561677642970646e-05,0.0001127217248147966,tokens/kWh,558898.4741274134,,s,630,24.518189258575436,0.03891776072789752,0.0004411238680858104,0.03881636810302734,0.039373481369018554,0.039591349983215327,0.04075487628936768,"[0.03986227035522461, 0.041564926147460934, 0.039090175628662106, 0.03872972869873047, 0.03890380859375, 0.03897958374023437, 0.03909222412109375, 0.03924102401733399, 0.038668991088867184, 0.038472801208496096, 0.03871414566040039, 0.039236927032470705, 0.039133758544921876, 0.03902489471435547, 0.03899801635742187, 0.038973438262939454, 0.03891788864135742, 0.03939478302001953, 0.03937055969238281, 0.03931990432739258, 0.03948812866210937, 0.03936460876464844, 0.039008255004882815, 0.038937728881835935, 0.03903558349609375, 0.03911494445800781, 0.038808799743652346, 0.03881145477294922, 0.03888188934326172, 0.03884479904174805, 0.03867372894287109, 0.038926078796386716, 0.038672351837158205, 0.038634078979492184, 0.038585792541503905, 0.0388218879699707, 0.038730560302734376, 0.038798526763916014, 0.03886124801635742, 0.03903084945678711, 0.03897183990478516, 0.03884172821044922, 0.03870169448852539, 0.03868467330932617, 0.03870230484008789, 0.03860534286499023, 0.03863577651977539, 0.03912704086303711, 0.03882704162597656, 0.038855327606201175, 0.04078540802001953, 0.03963571166992187, 0.039386432647705076, 0.03892095947265625, 0.038717472076416015, 0.038786464691162106, 0.038711872100830075, 0.038629375457763675, 0.03867238235473633, 0.03873177719116211, 0.03860889434814453, 0.03859251022338867, 0.03856137466430664, 0.03870105743408203, 0.038776832580566405, 0.038520832061767575, 0.038501953125, 0.03849420928955078, 0.03855353546142578, 0.03843689727783203, 0.03846598434448242, 0.038790462493896484, 0.03861699295043945, 0.038529502868652345, 0.038743934631347655, 0.03880393600463867, 0.03846902465820313, 0.03853987121582031, 0.038629375457763675, 0.03889468765258789, 0.03874089431762695, 0.03910627365112305, 0.03893833541870117, 0.038852481842041015, 0.03866284942626953, 0.038618240356445316, 0.038635646820068356, 0.039160575866699215, 0.03853830337524414, 0.038564769744873044, 0.03869084930419922, 0.03869900894165039, 0.03870515060424805, 0.038596607208251955, 0.03877264022827148, 0.03867776107788086, 0.03856399917602539, 0.03878572845458984, 0.03941580963134766, 0.03906953430175781, 0.03880361557006836, 0.0412303352355957, 0.03892633438110352, 0.038830078125, 0.038744190216064456, 0.03877856063842773, 0.03878838348388672, 0.039029151916503906, 0.03865568161010742, 0.03847980880737305, 0.03857497787475586, 0.03875430297851563, 0.03897958374023437, 0.03933184051513672, 0.03888883209228516, 0.03872012710571289, 0.03868262481689453, 0.03863142395019531, 0.03974553680419922, 0.03931241607666015, 0.03876553726196289, 0.038485214233398436, 0.03862134552001953, 0.039242366790771484, 0.03885465621948242, 0.038816993713378906, 0.03861913681030273, 0.03847372817993164, 0.03896441650390625, 0.03863225555419922, 0.03835696029663086, 0.039096351623535155, 0.038561790466308594, 0.03854131317138672, 0.0387583999633789, 0.038807361602783204, 0.039176383972167966, 0.03906969451904297, 0.03884636688232422, 0.038760543823242184, 0.03923503875732422, 0.03877737426757812, 0.03861708831787109, 0.04034969711303711, 0.038572032928466796, 0.038493919372558597, 0.03850883102416992, 0.03874611282348633, 0.0392806396484375, 0.03936665725708008, 0.039802017211914065, 0.0388043212890625, 0.03885260772705078, 0.03881574249267578, 0.03943423843383789, 0.03935641479492188, 0.04248748779296875, 0.039237056732177734, 0.03924671936035156, 0.038868736267089844, 0.0386868782043457, 0.03867596817016602, 0.03912931060791015, 0.03890966415405273, 0.03897411346435547, 0.03878054428100586, 0.0389615364074707, 0.03915980911254883, 0.038901153564453124, 0.038736480712890625, 0.038553600311279294, 0.03868819046020508, 0.03855203247070312, 0.039110240936279295, 0.038671966552734374, 0.038940799713134765, 0.038929183959960936, 0.038942718505859376, 0.03876851272583008, 0.03885587310791016, 0.038628192901611326, 0.03890358352661133, 0.03851295852661133, 0.03847782516479492, 0.03847564697265625, 0.03884659194946289, 0.03873785781860352, 0.03868371200561523, 0.038849342346191404, 0.038852127075195315, 0.03878883361816406, 0.03856422424316406, 0.03862163162231445, 0.038621185302734375, 0.03854131317138672, 0.03844054412841797, 0.03847721481323242, 0.0383449592590332, 0.03855846405029297, 0.03976380920410156, 0.03890192031860352, 0.03855142211914062, 0.03860281753540039, 0.038639678955078124, 0.03893350219726562, 0.03903180694580078, 0.03864303970336914, 0.0384293441772461, 0.038510238647460934, 0.0412388801574707, 0.038508544921875, 0.03857612609863281, 0.039456256866455076, 0.039559680938720705, 0.0394013442993164, 0.039426174163818356, 0.03886284637451172, 0.03866624069213867, 0.03874611282348633, 0.03926422500610351, 0.0391065902709961, 0.03924124908447266, 0.03907347106933594, 0.038730400085449215, 0.039198848724365236, 0.040235008239746094, 0.039411712646484375, 0.03900620651245117, 0.03904473495483399, 0.039282432556152345, 0.03938777542114258, 0.03926406478881836, 0.039516353607177736, 0.039002113342285157, 0.03864166259765625, 0.03891401672363281, 0.03918441772460937, 0.038766624450683594, 0.03905267333984375, 0.0387672004699707, 0.03910041427612305, 0.038643169403076175, 0.03863401412963867, 0.038629150390625, 0.038713569641113284, 0.03870515060424805, 0.03841164779663086, 0.038588191986083986, 0.039023456573486326, 0.03849123382568359, 0.040680126190185545, 0.040089824676513675, 0.039147518157958985, 0.039065601348876954, 0.038965248107910154, 0.03886796951293945, 0.03871846389770508, 0.03865932846069336, 0.03847449493408203, 0.03841228866577148, 0.03841347122192383, 0.038617919921875, 0.03868374252319336, 0.038884288787841795, 0.038629375457763675, 0.039117950439453125, 0.03901139068603516, 0.03902444839477539, 0.03897507095336914, 0.038599071502685545, 0.038441982269287106, 0.03897651290893555, 0.03861625671386719, 0.03895702362060547, 0.03856675338745117, 0.03857766342163086, 0.03925043106079101, 0.039822784423828125, 0.03877331161499024, 0.03860275268554687, 0.03851260757446289, 0.03849833679199219, 0.0385351676940918, 0.03855155181884766, 0.03845241546630859, 0.03847244644165039, 0.038345985412597657, 0.038674400329589846, 0.0387674560546875, 0.038899711608886715, 0.038330368041992184, 0.0384634895324707, 0.03842867279052734, 0.03840204620361328, 0.0383798713684082, 0.038610591888427734, 0.03829350280761719, 0.03893779373168945, 0.04027884674072266, 0.03924991989135742, 0.038980960845947266, 0.03865238571166992, 0.03859807968139648, 0.03869497680664063, 0.03866847991943359, 0.039062015533447264, 0.038421985626220706, 0.03893708801269531, 0.038445087432861326, 0.038563838958740236, 0.0386701774597168, 0.03866640090942383, 0.03871030426025391, 0.03869795227050781, 0.03861427307128906, 0.03868156814575195, 0.038391807556152346, 0.038327518463134765, 0.038451839447021484, 0.03839542388916015, 0.03871603012084961, 0.03877248001098633, 0.038727935791015626, 0.03899299240112305, 0.038847007751464845, 0.03925411224365234, 0.03907379150390625, 0.03898780822753906, 0.03904332733154297, 0.038801406860351564, 0.03873747253417969, 0.03901279830932617, 0.038702846527099606, 0.038714942932128904, 0.03880006408691406, 0.03889676666259766, 0.039045631408691404, 0.038975841522216795, 0.03905129623413086, 0.038828033447265625, 0.03866828918457031, 0.03870924758911133, 0.038505630493164064, 0.038495071411132814, 0.038465152740478514, 0.038570369720458984, 0.0389222412109375, 0.03868672180175781, 0.03877801513671875, 0.038961822509765626, 0.038851966857910154, 0.03881452941894531, 0.038973438262939454, 0.03918438339233398, 0.03950387191772461, 0.03931059265136719, 0.038883838653564456, 0.03885696029663086, 0.03877273559570313, 0.03868467330932617, 0.03876764678955078, 0.038958049774169924, 0.038870529174804686, 0.038976001739501956, 0.03908537673950195, 0.03907859039306641, 0.039346176147460936, 0.03925196838378906, 0.039180286407470705, 0.03922700881958008, 0.039725440979003906, 0.03909427261352539, 0.03879731369018555, 0.03857372665405273, 0.03934038543701172, 0.03943833541870117, 0.03911065673828125, 0.03974886322021484, 0.03919247817993164, 0.03922972869873047, 0.038962783813476565, 0.038773151397705076, 0.03880080032348633, 0.03903343963623047, 0.038950912475585936, 0.0389119987487793, 0.0391495361328125, 0.03926019287109375, 0.039188480377197264, 0.03894476699829102, 0.03975958251953125, 0.03863580703735352, 0.03846758270263672, 0.038513729095458984, 0.03858118438720703, 0.03874342346191406, 0.03868940734863281, 0.039198368072509766, 0.0389939193725586, 0.03870896148681641, 0.03861699295043945, 0.038744800567626955, 0.03881974411010742, 0.03871065521240234, 0.03882876968383789, 0.03914547348022461, 0.0389378890991211, 0.03875503921508789, 0.03863552093505859, 0.03855462265014648, 0.038547584533691406, 0.038548351287841794, 0.03848361587524414, 0.03872953414916992, 0.03876099014282226, 0.03857148742675781, 0.0384312629699707, 0.03854070281982422, 0.03846335983276367, 0.03850636672973633, 0.03830662536621094, 0.03872975921630859, 0.038381088256835935, 0.03851516723632813, 0.03874579238891602, 0.03891231918334961, 0.038713088989257814, 0.03858451080322266, 0.038567264556884764, 0.0385830078125, 0.03848969650268555, 0.03905782318115234, 0.039550975799560545, 0.03973868942260742, 0.03876860809326172, 0.038578208923339845, 0.03840480041503906, 0.038578174591064454, 0.038647296905517575, 0.03893299102783203, 0.0388935661315918, 0.03907788848876953, 0.038919456481933595, 0.038677215576171875, 0.03867820739746094, 0.038596927642822264, 0.038674144744873046, 0.03937923049926758, 0.038838367462158206, 0.03869686508178711, 0.0387437744140625, 0.038704769134521484, 0.038847007751464845, 0.03877286529541016, 0.0389343376159668, 0.03918409729003906, 0.039772544860839844, 0.04098467254638672, 0.0388403205871582, 0.0388454704284668, 0.03903539276123047, 0.0393732795715332, 0.03930521774291992, 0.0392163200378418, 0.03901113510131836, 0.03887308883666992, 0.03910351943969727, 0.03908019256591797, 0.03905974578857422, 0.03918819046020508, 0.03895296096801758, 0.03870297622680664, 0.04091990280151367, 0.0390467529296875, 0.03885615921020508, 0.038529407501220705, 0.03864121627807617, 0.039182910919189455, 0.03920732879638672, 0.039235614776611326, 0.039370750427246096, 0.03882393646240234, 0.038877185821533204, 0.03869219207763672, 0.03865871810913086, 0.03862099075317383, 0.03856320190429687, 0.038449985504150394, 0.0385731201171875, 0.03858051300048828, 0.03847439956665039, 0.03836928176879883, 0.03854336166381836, 0.03845119857788086, 0.03847782516479492, 0.03881488037109375, 0.03955766296386719, 0.039305534362792965, 0.040623390197753906, 0.03938172912597656, 0.039018497467041016, 0.03874784088134765, 0.03886687850952148, 0.03879062271118164, 0.03965801620483399, 0.039378944396972655, 0.039360511779785154, 0.03944243240356445, 0.038934528350830076, 0.03875628662109375, 0.038919681549072264, 0.03909270477294922, 0.039463008880615234, 0.03938070297241211, 0.03948102569580078, 0.039368480682373044, 0.03918316650390625, 0.03903833770751953, 0.03889215850830078, 0.03899094390869141, 0.03897743988037109, 0.038870014190673825, 0.038696704864501955, 0.03871059036254883, 0.03903379058837891, 0.03913113784790039, 0.03986636734008789, 0.03967942428588867, 0.03871977615356445, 0.03866857528686524, 0.038623233795166016, 0.03881574249267578, 0.03890176010131836, 0.03941312026977539, 0.039279232025146486, 0.03878863906860352, 0.038744544982910155, 0.03869203186035156, 0.03875104141235351, 0.03876249694824219, 0.03853017425537109, 0.03843958282470703, 0.038559551239013674, 0.03855606460571289, 0.03853638458251953, 0.040567615509033206, 0.03916185760498047, 0.03916550445556641, 0.03877318572998047, 0.038960990905761717, 0.038768798828125, 0.038981632232666014, 0.03927654266357422, 0.039247039794921876, 0.038849342346191404, 0.039008255004882815, 0.038742015838623044, 0.03879731369018555, 0.03883203125, 0.038860897064208984, 0.03907929611206055, 0.038863487243652343, 0.038991455078125, 0.039438751220703124, 0.03912396621704101, 0.0391929931640625, 0.03904934310913086, 0.03931270217895508, 0.039287166595458986, 0.03936249542236328, 0.03909667205810547, 0.03879638290405273, 0.038561729431152346, 0.039049919128417966, 0.03918057632446289, 0.039216159820556644, 0.0389249267578125, 0.03885408020019531, 0.03887392044067383, 0.038747520446777345, 0.039000064849853515, 0.03902028656005859, 0.03894492721557617, 0.03926713562011719, 0.039174144744873046, 0.039005374908447264, 0.03887772750854492, 0.03901468658447266, 0.03889561462402344, 0.03876812744140625, 0.038628929138183596, 0.03876729583740234, 0.03921920013427734, 0.04048102569580078, 0.03957526397705078, 0.039604511260986325, 0.040205310821533204, 0.039228416442871096, 0.03946656036376953, 0.03937529754638672, 0.039164161682128905, 0.03906480026245117, 0.03878908920288086, 0.03864387130737305, 0.03868918228149414, 0.038539264678955076, 0.03894208145141602, 0.03877747344970703, 0.03878448104858399, 0.03851728057861328, 0.03876435089111328, 0.038863040924072265, 0.03867343902587891, 0.03867500686645508, 0.03881001663208008, 0.03869900894165039, 0.03860425567626953, 0.03851523208618164, 0.038590560913085936, 0.038594463348388675, 0.038547294616699215, 0.03869712066650391, 0.03950592041015625, 0.03907174301147461, 0.0394090576171875, 0.0397092170715332, 0.03950393676757812, 0.039008255004882815, 0.0389304313659668, 0.03877478408813476]",tokens/s,25.695209110095774,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2228.768768,2558.394368,0.0,2155.872256,2032.413184,s,1,8.82915234375,8.82915234375,0.0,8.82915234375,8.82915234375,8.82915234375,8.82915234375,[8.82915234375],,kWh,5.20656411916813e-05,5.735624447842144e-06,1.662556885598243e-05,7.442683449550588e-05,,MB,2281.263104,2835.218432,0.0,2418.016256,2279.563776,s,10,0.7813948440551757,0.07813948440551757,0.0002024961363432146,0.07812495803833008,0.07832449722290039,0.0784661350250244,0.07857944526672364,"[0.07829302215576171, 0.07809327697753907, 0.07799747467041016, 0.07791468811035156, 0.07785638427734375, 0.07807494354248047, 0.07860777282714844, 0.07816614532470703, 0.0782344970703125, 0.0781566390991211]",tokens/s,3276.192592613567,kWh,2.3349306698669654e-06,2.574976728916439e-07,1.5468523485921981e-06,4.139280691350808e-06,tokens/kWh,61846494.37640753,MB,2281.263104,2919.104512,0.0,2501.902336,2389.801984,s,10,18.997174194335937,1.8997174194335937,0.011290641251550229,1.8964168090820313,1.9193546752929689,1.9203277893066406,1.9211062805175783,"[1.919138427734375, 1.9213009033203126, 1.899252685546875, 1.8935809326171875, 1.9007515869140625, 1.8887833251953126, 1.9031004638671876, 1.88919775390625, 1.8914566650390625, 1.8906114501953124]",tokens/s,33.16282693179896,kWh,5.5700861302215156e-05,6.143544489834176e-06,2.862189400860593e-05,9.046629980065526e-05,tokens/kWh,696391.9176402932,,s,630,18.989331668853744,0.030141796299767876,0.0007460634415589727,0.029934239387512204,0.030612844848632813,0.030947160434722898,0.0340253570175171,"[0.030558048248291017, 0.030486528396606444, 0.030330783843994142, 0.030415103912353515, 0.030291967391967774, 0.030265344619750976, 0.030349311828613282, 0.030142208099365235, 0.0301646728515625, 0.03019219207763672, 0.030318592071533205, 0.030785152435302734, 0.03052787208557129, 0.030334943771362304, 0.030236703872680664, 0.030269439697265626, 0.030209888458251954, 0.030352800369262696, 0.030572736740112304, 0.030468671798706055, 0.030631328582763673, 0.03061190414428711, 0.030808191299438476, 0.03056643295288086, 0.030670656204223632, 0.03046623992919922, 0.030414848327636718, 0.03029395294189453, 0.030881120681762696, 0.030269792556762695, 0.030283679962158205, 0.030119808197021484, 0.030146495819091797, 0.030128799438476562, 0.03033462333679199, 0.030409055709838866, 0.03033087921142578, 0.03014656066894531, 0.0302174072265625, 0.03012681579589844, 0.03020956802368164, 0.030210624694824217, 0.03059097671508789, 0.030283584594726562, 0.030430879592895508, 0.03134927940368652, 0.030697471618652345, 0.030457792282104493, 0.03056195259094238, 0.030392608642578124, 0.03062131118774414, 0.030577152252197266, 0.030674495697021485, 0.030312896728515625, 0.030107391357421874, 0.031443199157714846, 0.031112512588500976, 0.03053843116760254, 0.030504959106445313, 0.030357503890991212, 0.030510080337524413, 0.030745599746704103, 0.030478208541870118, 0.03100057601928711, 0.03058278465270996, 0.03052867126464844, 0.03038809585571289, 0.030105951309204102, 0.03027180862426758, 0.030095680236816406, 0.030476287841796876, 0.030699039459228517, 0.030599647521972657, 0.030474239349365235, 0.030164512634277343, 0.03247766494750977, 0.03461465454101562, 0.030491264343261718, 0.030385503768920897, 0.030507680892944335, 0.030338495254516602, 0.030540000915527343, 0.030255456924438477, 0.03024025535583496, 0.030046144485473634, 0.030282112121582033, 0.031941856384277344, 0.030317535400390627, 0.030105375289916993, 0.029866207122802736, 0.030304224014282226, 0.030746656417846678, 0.030468095779418947, 0.03055001640319824, 0.030521343231201172, 0.030498815536499024, 0.030201087951660155, 0.02996659278869629, 0.030117952346801757, 0.029940160751342773, 0.03031839942932129, 0.029862079620361328, 0.02965488052368164, 0.02975270462036133, 0.029897504806518555, 0.029684959411621095, 0.029585504531860353, 0.030086912155151368, 0.02983622360229492, 0.029822975158691405, 0.033889694213867186, 0.0298702392578125, 0.02979680061340332, 0.02977791976928711, 0.029894367218017578, 0.029942047119140624, 0.02987558364868164, 0.03020044708251953, 0.03027078437805176, 0.03843084716796875, 0.02998124885559082, 0.02987331199645996, 0.02972051239013672, 0.029842336654663085, 0.029906303405761718, 0.02965772819519043, 0.030539072036743165, 0.030067392349243164, 0.029863040924072267, 0.029825439453125, 0.02971696090698242, 0.02974515151977539, 0.029820928573608397, 0.029618175506591796, 0.029769376754760744, 0.029622400283813476, 0.029700063705444337, 0.029915391921997072, 0.0305664005279541, 0.03025676727294922, 0.03024662399291992, 0.034718368530273436, 0.030567903518676758, 0.029991455078125, 0.02996633529663086, 0.029891807556152342, 0.029799200057983397, 0.029931360244750977, 0.029851743698120117, 0.029642208099365235, 0.02978371238708496, 0.029602048873901367, 0.029672128677368164, 0.029572479248046873, 0.029784000396728516, 0.029967039108276368, 0.029841407775878907, 0.03295187377929688, 0.030300607681274416, 0.030060543060302734, 0.03006780815124512, 0.029968832015991213, 0.029945728302001953, 0.029843839645385742, 0.02982524871826172, 0.029715871810913085, 0.029777759552001952, 0.030029632568359374, 0.029797311782836913, 0.02976563262939453, 0.029724224090576172, 0.029661216735839845, 0.029727136611938477, 0.029705343246459962, 0.02977849578857422, 0.03042336082458496, 0.03326342391967774, 0.02993142318725586, 0.029792543411254882, 0.02977952003479004, 0.03130822372436524, 0.031069311141967773, 0.030122880935668946, 0.030050304412841795, 0.029937664031982423, 0.0299289608001709, 0.030128320693969726, 0.029903167724609374, 0.030332927703857423, 0.030072704315185547, 0.029730560302734375, 0.029593984603881837, 0.029714431762695313, 0.029607936859130858, 0.029675519943237305, 0.02968681526184082, 0.02958198356628418, 0.029724000930786133, 0.029572063446044922, 0.029677568435668947, 0.029602975845336915, 0.029620128631591795, 0.029543359756469725, 0.02979840087890625, 0.029937664031982423, 0.030224384307861327, 0.02984454345703125, 0.03368236923217773, 0.030552064895629883, 0.030000320434570314, 0.029845632553100587, 0.030028480529785156, 0.029703968048095702, 0.029722623825073242, 0.029647071838378905, 0.029654592514038087, 0.029716928482055663, 0.02978950309753418, 0.029937952041625977, 0.030103424072265624, 0.029742687225341798, 0.029864896774291994, 0.02964678382873535, 0.029761600494384765, 0.029648895263671874, 0.029743104934692382, 0.029743104934692382, 0.029808639526367187, 0.03064944076538086, 0.030085599899291993, 0.029847135543823244, 0.029968927383422852, 0.02976790428161621, 0.029726816177368165, 0.02969705581665039, 0.029662176132202147, 0.031163455963134766, 0.029864896774291994, 0.029908992767333983, 0.02999622344970703, 0.029784896850585937, 0.03008633613586426, 0.034921279907226564, 0.030662656784057617, 0.030674943923950194, 0.030443519592285157, 0.03029337692260742, 0.030364288330078124, 0.02976153564453125, 0.029799903869628906, 0.02999760055541992, 0.030093311309814453, 0.030271360397338867, 0.030065439224243165, 0.03009449577331543, 0.02987094306945801, 0.02975129508972168, 0.029822111129760742, 0.029767776489257814, 0.030855712890625, 0.029944032669067384, 0.029759040832519533, 0.030032320022583006, 0.029975616455078125, 0.029862432479858397, 0.03124060821533203, 0.029995008468627928, 0.02994095993041992, 0.029891359329223634, 0.029902687072753908, 0.02993168067932129, 0.030207359313964843, 0.030276224136352538, 0.034631679534912106, 0.030257152557373046, 0.030134271621704102, 0.030023679733276368, 0.030078975677490235, 0.029945535659790037, 0.02981100845336914, 0.029867136001586914, 0.029698944091796874, 0.03011568069458008, 0.029679264068603516, 0.029719039916992186, 0.029792255401611328, 0.02987353515625, 0.029856000900268555, 0.029800832748413084, 0.03079897689819336, 0.029870975494384767, 0.029767679214477538, 0.029781951904296874, 0.029675424575805662, 0.029747360229492186, 0.030117887496948242, 0.02993120002746582, 0.029888832092285156, 0.02970364761352539, 0.029684255599975586, 0.029785728454589842, 0.030837120056152342, 0.029837312698364257, 0.02996633529663086, 0.029929471969604493, 0.029887744903564453, 0.02990070343017578, 0.03264188766479492, 0.03228374481201172, 0.030219167709350587, 0.03019753646850586, 0.03057276725769043, 0.030205343246459963, 0.030073440551757813, 0.029853696823120116, 0.030272256851196288, 0.029853696823120116, 0.029585407257080077, 0.02981888008117676, 0.029593599319458007, 0.029726240158081056, 0.029731296539306642, 0.029683712005615235, 0.029884416580200194, 0.029685760498046877, 0.02962227249145508, 0.029609983444213867, 0.029618175506591796, 0.029566591262817382, 0.02971072006225586, 0.029830432891845702, 0.02989948844909668, 0.02970412826538086, 0.029588991165161133, 0.0296265926361084, 0.02953660774230957, 0.029669376373291017, 0.02955264091491699, 0.03016908836364746, 0.034080768585205076, 0.030248031616210938, 0.02986892890930176, 0.03021548843383789, 0.029946592330932616, 0.029875680923461913, 0.02983580780029297, 0.02989849662780762, 0.029899007797241212, 0.02976972770690918, 0.02979635238647461, 0.029711904525756835, 0.02990742492675781, 0.03021785545349121, 0.030407039642333985, 0.030044160842895507, 0.029904895782470704, 0.029954048156738283, 0.029876224517822264, 0.029809759140014647, 0.029835775375366212, 0.029828895568847658, 0.029893247604370118, 0.029829120635986327, 0.02984457588195801, 0.030028703689575196, 0.029663232803344725, 0.029728160858154298, 0.029774431228637696, 0.029638656616210936, 0.029692928314208986, 0.029730976104736326, 0.02979311943054199, 0.030224384307861327, 0.03025686454772949, 0.034390174865722656, 0.029934911727905272, 0.029807424545288085, 0.02977952003479004, 0.03029257583618164, 0.030517248153686522, 0.029849599838256836, 0.029872127532958984, 0.02963599967956543, 0.029801055908203124, 0.029865535736083984, 0.029807039260864258, 0.029865888595581053, 0.029714527130126952, 0.029874176025390626, 0.03026464080810547, 0.03030291175842285, 0.030303232192993163, 0.03024176025390625, 0.02988172721862793, 0.029829792022705078, 0.0328675537109375, 0.030268192291259766, 0.030659839630126952, 0.03044620704650879, 0.030227903366088868, 0.030628543853759765, 0.030303552627563478, 0.030281728744506835, 0.03014672088623047, 0.030980640411376954, 0.030894079208374024, 0.03049228858947754, 0.030036352157592774, 0.02977782440185547, 0.029887807846069335, 0.032774398803710934, 0.030758592605590822, 0.030567264556884764, 0.030638080596923828, 0.030551551818847656, 0.030130687713623046, 0.030029823303222656, 0.029871200561523436, 0.02997545623779297, 0.029857791900634766, 0.029868032455444334, 0.029882368087768556, 0.029865440368652345, 0.02977436828613281, 0.029861120223999022, 0.029876991271972655, 0.029724672317504884, 0.02977177619934082, 0.02962339210510254, 0.02965305519104004, 0.029585472106933595, 0.029856544494628906, 0.02978611183166504, 0.029812736511230467, 0.029732864379882814, 0.029671424865722655, 0.029730815887451172, 0.031139839172363282, 0.03176790428161621, 0.030114463806152344, 0.03016214370727539, 0.030884063720703125, 0.03066249656677246, 0.030123455047607422, 0.0300795841217041, 0.02971251106262207, 0.02985478401184082, 0.02988310432434082, 0.02984943962097168, 0.029820768356323243, 0.029727264404296874, 0.0299233283996582, 0.02975948715209961, 0.029855743408203125, 0.029800447463989257, 0.029920736312866212, 0.030126623153686524, 0.03059663963317871, 0.03032431983947754, 0.030071680068969726, 0.03014614486694336, 0.03020947265625, 0.029969375610351564, 0.029841407775878907, 0.029771360397338867, 0.02983772850036621, 0.030299903869628907, 0.030209856033325197, 0.03040300750732422, 0.03025436782836914, 0.029987552642822265, 0.02997248077392578, 0.030089216232299806, 0.029869600296020506, 0.02985990333557129, 0.029722368240356446, 0.029711008071899414, 0.029642751693725586, 0.029722623825073242, 0.029741056442260744, 0.029618175506591796, 0.02977996826171875, 0.029644256591796876, 0.029696224212646484, 0.029750751495361327, 0.029773920059204102, 0.03021286392211914, 0.029642688751220704, 0.02964271926879883, 0.029642047882080077, 0.02962646484375, 0.02961631965637207, 0.029745248794555663, 0.02962428855895996, 0.029630912780761718, 0.0297205753326416, 0.029676576614379883, 0.029899744033813475, 0.0297205753326416, 0.03079167938232422, 0.031522495269775394, 0.030355712890625, 0.030577791213989257, 0.030190528869628905, 0.030562271118164064, 0.030244895935058594, 0.029825023651123047, 0.030257152557373046, 0.029612031936645508, 0.02974883270263672, 0.0297903995513916, 0.029903072357177735, 0.02997657585144043, 0.02989779281616211, 0.029890623092651367, 0.029827871322631837, 0.029859935760498047, 0.029855743408203125, 0.029746496200561523, 0.029835968017578124, 0.02981888008117676, 0.029767679214477538, 0.029845504760742186, 0.02994576072692871, 0.0298374080657959, 0.02963046455383301, 0.029716480255126954, 0.02970147132873535, 0.029683391571044923, 0.029887359619140626, 0.029743200302124025, 0.0298024959564209, 0.029883392333984377, 0.029972799301147462, 0.02999776077270508, 0.02991209602355957, 0.02990959930419922, 0.030001535415649414, 0.03001753616333008, 0.030449663162231445, 0.030089216232299806, 0.029659040451049806, 0.029745248794555663, 0.029896703720092774, 0.030170976638793947, 0.03010985565185547, 0.03019366455078125, 0.029888320922851562, 0.030906240463256837, 0.029890880584716797, 0.029999103546142578, 0.02993356704711914, 0.030291904449462892, 0.029843040466308594, 0.02970627212524414, 0.030037824630737304, 0.030745216369628906, 0.030298112869262695, 0.02993075180053711, 0.029985504150390627, 0.02987126350402832, 0.030131071090698243, 0.03019366455078125, 0.030125471115112306, 0.0304849910736084, 0.031034912109375, 0.031004703521728516, 0.03079360008239746, 0.030725088119506836, 0.030529535293579102, 0.030757919311523437, 0.03026019287109375, 0.029882272720336913, 0.029922847747802735, 0.030032447814941406, 0.03013222312927246, 0.029954048156738283, 0.029978624343872072, 0.02983078384399414, 0.029862272262573243, 0.029843456268310548, 0.02998886489868164, 0.02988559913635254, 0.029958015441894532, 0.029721471786499025, 0.029765727996826172, 0.029657087326049804, 0.029730335235595703, 0.029622751235961912, 0.029669376373291017, 0.029646848678588866, 0.029969696044921876, 0.02963324737548828, 0.029716032028198242, 0.029706687927246095, 0.030139488220214845, 0.02975619125366211, 0.029977760314941405, 0.029852640151977538, 0.029969823837280272, 0.029782623291015626, 0.030104608535766603, 0.030028127670288087, 0.030590879440307618, 0.030221023559570313, 0.030244768142700194, 0.029892704010009766, 0.03006460762023926, 0.029953311920166016, 0.029944576263427735, 0.02979020881652832, 0.02994790458679199, 0.02976972770690918, 0.02976051139831543, 0.02999193572998047, 0.030690303802490236, 0.029842432022094727, 0.029908992767333983, 0.029775871276855468, 0.02973676872253418, 0.029898944854736327, 0.03137126350402832, 0.029880256652832032, 0.029935680389404296, 0.02995814323425293, 0.02978611183166504, 0.029849599838256836, 0.03016048049926758, 0.030042400360107423, 0.02994803237915039]",tokens/s,33.176523059699036,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2228.256768,2558.394368,0.0,2155.872256,2032.413184,s,1,9.1341005859375,9.1341005859375,0.0,9.1341005859375,9.1341005859375,9.1341005859375,9.1341005859375,[9.1341005859375],,kWh,5.169524065420319e-05,5.695051776205635e-06,1.6650013319990942e-05,7.404030575039976e-05,,MB,2285.228032,2835.218432,0.0,2418.016256,2279.563776,s,10,0.7810281829833985,0.07810281829833984,0.0001284550209585552,0.07809267044067383,0.07828038482666015,0.07829091110229493,0.07829933212280274,"[0.07827804565429687, 0.0779207992553711, 0.07806588745117188, 0.0779493408203125, 0.07820748901367187, 0.0779736328125, 0.078172607421875, 0.07803948974609375, 0.07830143737792969, 0.07811945343017578]",tokens/s,3277.730632230483,kWh,2.3386067667666187e-06,2.579053324702673e-07,1.5470967932318392e-06,4.143608892468724e-06,tokens/kWh,61781892.70355522,MB,2286.727168,2919.104512,0.0,2501.902336,2389.801984,s,10,18.702352172851565,1.8702352172851562,0.14610106509636314,1.9295911865234374,1.9375388061523438,1.9379331604003907,1.9382486437988282,"[1.4416275634765625, 1.8312318115234374, 1.91214794921875, 1.9103414306640625, 1.937451171875, 1.9356417236328125, 1.9276470947265625, 1.9315352783203126, 1.936400634765625, 1.9383275146484376]",tokens/s,33.685602440665804,kWh,5.5891985026146625e-05,6.164648744589533e-06,2.8758371895569734e-05,9.08150056663059e-05,tokens/kWh,693717.9548442642,,s,630,18.69372937393187,0.029672586307828392,0.0025473643425977205,0.030441247940063476,0.030992185783386233,0.03132554359436036,0.03211103267669678,"[0.022968191146850586, 0.022790048599243166, 0.022491968154907227, 0.022609920501708985, 0.023149919509887696, 0.022624416351318358, 0.02258995246887207, 0.02261759948730469, 0.02261862373352051, 0.022449472427368163, 0.022608575820922853, 0.02262396812438965, 0.025937856674194334, 0.02303830337524414, 0.02274691200256348, 0.022546655654907228, 0.02272051239013672, 0.022632448196411133, 0.022645792007446288, 0.022581504821777343, 0.022741312026977538, 0.022698400497436523, 0.02265292739868164, 0.022640640258789063, 0.022763519287109374, 0.022648832321166993, 0.022637823104858398, 0.022794111251831055, 0.02266761589050293, 0.022569375991821287, 0.02263667106628418, 0.022814720153808594, 0.023029760360717775, 0.022804479598999023, 0.02289254379272461, 0.023178752899169923, 0.02306287956237793, 0.022790176391601562, 0.024536352157592773, 0.02341974449157715, 0.022951936721801756, 0.022863552093505858, 0.02259350395202637, 0.02267526435852051, 0.02273539161682129, 0.02264793586730957, 0.022594432830810546, 0.0224849910736084, 0.023810047149658203, 0.022603776931762694, 0.02265622329711914, 0.022993696212768554, 0.022719520568847656, 0.02317123222351074, 0.02322719955444336, 0.023252864837646485, 0.023024959564208983, 0.02296486473083496, 0.02315283203125, 0.02292259216308594, 0.02266716766357422, 0.022594303131103517, 0.022632448196411133, 0.023195295333862304, 0.022868000030517577, 0.022524192810058595, 0.022667648315429688, 0.022540288925170897, 0.022605823516845702, 0.02262771224975586, 0.022493824005126953, 0.022587392807006838, 0.022687744140625, 0.022822271347045897, 0.030532224655151367, 0.030689279556274415, 0.03074015998840332, 0.0304202880859375, 0.030307327270507813, 0.03037161636352539, 0.030256736755371095, 0.03028646469116211, 0.030263296127319338, 0.030121376037597656, 0.030322912216186524, 0.030402944564819335, 0.030137727737426758, 0.030175199508666994, 0.03006892776489258, 0.03036518478393555, 0.03025404739379883, 0.030193279266357422, 0.030372064590454103, 0.03135222434997559, 0.03010432052612305, 0.03064352035522461, 0.030685344696044923, 0.030133855819702147, 0.030763168334960938, 0.030321279525756837, 0.030152671813964842, 0.030140607833862305, 0.0301977596282959, 0.03020595169067383, 0.03021004867553711, 0.03012784004211426, 0.03031475257873535, 0.030427167892456055, 0.030717952728271485, 0.03089993667602539, 0.0307238712310791, 0.030441984176635743, 0.030180416107177734, 0.030604127883911134, 0.030234176635742186, 0.0302126407623291, 0.030105600357055663, 0.030422624588012696, 0.030207839965820313, 0.030991968154907228, 0.030989023208618165, 0.030348575592041016, 0.030516191482543944, 0.03051238441467285, 0.03028659248352051, 0.03036774444580078, 0.03027235221862793, 0.030351327896118163, 0.030242847442626952, 0.030623552322387695, 0.03019932746887207, 0.030298784255981447, 0.03039232063293457, 0.030164512634277343, 0.030255552291870116, 0.030268831253051756, 0.030351808547973633, 0.03024300765991211, 0.031235136032104493, 0.031046592712402343, 0.030509056091308592, 0.030310400009155275, 0.030480384826660156, 0.030306304931640625, 0.03021548843383789, 0.030223039627075194, 0.030143680572509764, 0.03027846336364746, 0.030480384826660156, 0.03015679931640625, 0.030135360717773438, 0.030096319198608397, 0.030302207946777345, 0.030746368408203124, 0.03147782325744629, 0.030762624740600587, 0.03059529685974121, 0.030669151306152345, 0.030453216552734374, 0.030278175354003907, 0.030140384674072266, 0.03023174476623535, 0.03008140754699707, 0.03005036735534668, 0.030036384582519532, 0.030064640045166017, 0.030035968780517577, 0.03096575927734375, 0.030058496475219725, 0.03002572822570801, 0.03012393569946289, 0.030056543350219726, 0.03019980812072754, 0.03058892822265625, 0.03056003189086914, 0.030281951904296875, 0.03056537628173828, 0.03021516799926758, 0.030096511840820312, 0.030309055328369142, 0.03035759925842285, 0.030209983825683594, 0.03011123275756836, 0.030560863494873046, 0.03022854423522949, 0.030195487976074218, 0.03017545509338379, 0.03015475273132324, 0.030107072830200195, 0.03019366455078125, 0.030126079559326172, 0.03009916877746582, 0.03008950424194336, 0.030073888778686525, 0.030106592178344726, 0.030297792434692383, 0.03002191925048828, 0.03031657600402832, 0.03010668754577637, 0.030118751525878906, 0.030138463973999025, 0.030076608657836915, 0.030178720474243165, 0.030188032150268555, 0.0306712646484375, 0.030774784088134766, 0.030525951385498046, 0.03058278465270996, 0.03035696029663086, 0.030214208602905274, 0.03031907272338867, 0.0305581111907959, 0.030318592071533205, 0.030275680541992187, 0.030201343536376952, 0.030196128845214845, 0.030144607543945313, 0.030515199661254884, 0.030469951629638673, 0.030408287048339845, 0.030128223419189453, 0.030321151733398437, 0.030252767562866212, 0.03045404815673828, 0.03013795280456543, 0.030163360595703126, 0.03039641571044922, 0.030297439575195314, 0.030300319671630858, 0.030216480255126955, 0.030128063201904295, 0.030118175506591797, 0.03024028778076172, 0.030210527420043945, 0.030074880599975585, 0.03035955238342285, 0.030153919219970703, 0.030845760345458984, 0.03048431968688965, 0.030709503173828125, 0.030391872406005858, 0.030249631881713868, 0.030136032104492186, 0.030292448043823243, 0.030471775054931642, 0.030529951095581053, 0.031692800521850584, 0.03035126495361328, 0.030154848098754884, 0.030130176544189452, 0.03014656066894531, 0.030281728744506835, 0.03044166374206543, 0.030717567443847658, 0.03105830383300781, 0.031008447647094727, 0.030916288375854493, 0.03086809539794922, 0.030507007598876954, 0.030453760147094725, 0.030258975982666015, 0.030238943099975588, 0.030115840911865234, 0.0301711368560791, 0.030168224334716796, 0.0305446720123291, 0.030306367874145507, 0.03023855972290039, 0.030623903274536134, 0.03807436752319336, 0.03138559913635254, 0.030947135925292968, 0.030668991088867188, 0.030453760147094725, 0.030509056091308592, 0.03042918395996094, 0.03029305648803711, 0.03068204879760742, 0.030519296646118164, 0.03058687973022461, 0.03082854461669922, 0.0306296329498291, 0.030506591796875, 0.030450336456298827, 0.030464000701904297, 0.03030531120300293, 0.03087808036804199, 0.030882400512695314, 0.030930431365966796, 0.03058883285522461, 0.0305230712890625, 0.03057551956176758, 0.030440832138061525, 0.030670944213867186, 0.03068332862854004, 0.030615455627441408, 0.030380224227905272, 0.030398687362670897, 0.03038211250305176, 0.030422304153442385, 0.030532384872436525, 0.031190975189208985, 0.03213516616821289, 0.030840831756591795, 0.031068159103393556, 0.03095142364501953, 0.030654016494750976, 0.03044748878479004, 0.030415071487426757, 0.030826688766479492, 0.03044710350036621, 0.03062646484375, 0.030467456817626953, 0.030732927322387697, 0.030523391723632814, 0.030846975326538087, 0.031070207595825194, 0.03077939224243164, 0.031110431671142576, 0.030903007507324217, 0.030717952728271485, 0.030654464721679688, 0.030406496047973634, 0.03033923149108887, 0.030375072479248047, 0.03064918327331543, 0.0307521915435791, 0.030517824172973634, 0.030509056091308592, 0.030510400772094725, 0.03039507293701172, 0.03057196807861328, 0.0305894718170166, 0.030484415054321288, 0.030423040390014647, 0.03079542350769043, 0.03073811149597168, 0.030544639587402344, 0.03058230400085449, 0.030542303085327148, 0.03035545539855957, 0.0304901123046875, 0.030407072067260742, 0.03062588882446289, 0.030644224166870116, 0.030885663986206055, 0.031096128463745116, 0.03097420883178711, 0.030627647399902345, 0.030559072494506834, 0.030648319244384766, 0.03058687973022461, 0.030875648498535156, 0.03081216049194336, 0.03057459259033203, 0.030810111999511718, 0.03078963279724121, 0.03076300811767578, 0.030586687088012696, 0.03051705551147461, 0.030652799606323243, 0.030489952087402343, 0.03226259231567383, 0.030912607192993165, 0.030940383911132813, 0.030743167877197265, 0.030617887496948243, 0.030674400329589843, 0.03066316795349121, 0.030894111633300782, 0.03059712028503418, 0.030557632446289062, 0.03038470458984375, 0.03082806396484375, 0.031042015075683594, 0.03101900863647461, 0.03099033546447754, 0.03098534393310547, 0.0312801284790039, 0.03072096061706543, 0.030554079055786134, 0.030596607208251952, 0.03041302490234375, 0.030470527648925782, 0.030676992416381835, 0.0307259521484375, 0.030793376922607422, 0.03063222312927246, 0.0308756160736084, 0.030837024688720703, 0.030922752380371094, 0.03075004768371582, 0.030507680892944335, 0.03019161605834961, 0.03055820846557617, 0.030711807250976563, 0.03077939224243164, 0.03085270309448242, 0.030943647384643554, 0.030744575500488282, 0.030740480422973632, 0.03033497619628906, 0.030182624816894533, 0.030118240356445312, 0.030191360473632814, 0.030476703643798828, 0.030236576080322267, 0.030142847061157226, 0.030312095642089844, 0.0303353271484375, 0.030302143096923827, 0.030246463775634766, 0.030202367782592773, 0.030123039245605467, 0.030303199768066405, 0.030430912017822265, 0.030351680755615236, 0.03037183952331543, 0.0304005126953125, 0.03042255973815918, 0.030460384368896483, 0.030408319473266603, 0.030361120223999023, 0.030288415908813475, 0.030442911148071287, 0.031110048294067383, 0.03146956825256347, 0.031204864501953124, 0.030834527969360353, 0.030652959823608397, 0.03051251220703125, 0.030650720596313477, 0.03087558364868164, 0.030802400588989257, 0.031178752899169923, 0.03084649658203125, 0.030586944580078126, 0.030476543426513673, 0.030685344696044923, 0.030515199661254884, 0.03058892822265625, 0.03095142364501953, 0.030830591201782227, 0.030537727355957032, 0.03056025505065918, 0.0304388484954834, 0.030634559631347657, 0.03061299133300781, 0.030330751419067385, 0.030376575469970704, 0.030330623626708984, 0.03035366439819336, 0.03048409652709961, 0.03053811264038086, 0.030795391082763673, 0.030739871978759766, 0.03049478340148926, 0.03075718307495117, 0.03076361656188965, 0.030515199661254884, 0.030365695953369142, 0.03024883270263672, 0.030371807098388673, 0.03025312042236328, 0.03024905586242676, 0.030840831756591795, 0.03057814407348633, 0.030822975158691406, 0.031317983627319336, 0.030801919937133788, 0.03062579154968262, 0.030613088607788087, 0.03066227149963379, 0.03038467216491699, 0.030353471755981444, 0.030326976776123046, 0.03024025535583496, 0.030198272705078126, 0.03031449508666992, 0.030531583786010744, 0.030482431411743165, 0.03133807945251465, 0.030941600799560546, 0.030298015594482423, 0.03022447967529297, 0.030203903198242187, 0.03027257537841797, 0.030377920150756837, 0.030315231323242188, 0.030540063858032228, 0.032057022094726564, 0.031170879364013672, 0.03077120018005371, 0.03059507179260254, 0.030631935119628906, 0.03060531234741211, 0.03169484710693359, 0.03298822402954102, 0.030659135818481446, 0.03069171142578125, 0.03068623924255371, 0.030704448699951172, 0.03068943977355957, 0.03055001640319824, 0.030629728317260744, 0.030554399490356446, 0.03051091194152832, 0.030767200469970703, 0.03065251159667969, 0.030674144744873046, 0.030657312393188477, 0.030337024688720703, 0.030553695678710937, 0.03047875213623047, 0.03044883155822754, 0.03033375930786133, 0.030444992065429687, 0.03060383987426758, 0.030703615188598633, 0.030627456665039063, 0.030552288055419922, 0.03094281578063965, 0.03109267234802246, 0.03102934455871582, 0.030503456115722655, 0.030455711364746094, 0.03026543998718262, 0.030195711135864257, 0.030160415649414064, 0.030255584716796874, 0.030672096252441407, 0.03061020851135254, 0.030732000350952148, 0.030376224517822265, 0.030150047302246095, 0.030024192810058595, 0.03018044853210449, 0.030077951431274414, 0.03004412841796875, 0.030222368240356446, 0.0304389762878418, 0.030560703277587892, 0.03071311950683594, 0.030527584075927733, 0.03064076805114746, 0.030461919784545897, 0.03030428886413574, 0.030203903198242187, 0.030074880599975585, 0.030066688537597655, 0.030468095779418947, 0.031078399658203124, 0.03138150405883789, 0.031524768829345705, 0.03148195266723633, 0.03145078468322754, 0.03143100738525391, 0.031244255065917968, 0.03152899169921875, 0.03211468887329102, 0.03194870376586914, 0.03132835197448731, 0.03144851112365723, 0.03141279983520508, 0.031913312911987304, 0.031124128341674804, 0.031039424896240234, 0.03179177665710449, 0.03137740707397461, 0.03121971130371094, 0.031322111129760744, 0.03152678489685059, 0.031047807693481446, 0.031106943130493163, 0.03111315155029297, 0.0310581111907959, 0.030838464736938475, 0.030994144439697266, 0.03148041534423828, 0.030943231582641603, 0.03058073616027832, 0.03123404884338379, 0.03342540740966797, 0.03428303909301758, 0.03073276710510254, 0.03060531234741211, 0.03058687973022461, 0.03026697540283203, 0.030359968185424805, 0.030681087493896485, 0.030498815536499024, 0.030459615707397462, 0.0302957763671875, 0.03062022399902344, 0.0305947208404541, 0.030547359466552734, 0.030239648818969726, 0.030253087997436524, 0.030095359802246095, 0.030332160949707032, 0.030185888290405274, 0.030447967529296877, 0.030259199142456054, 0.030338655471801756, 0.03061801528930664, 0.03026265525817871, 0.03031667137145996, 0.030165504455566407, 0.0305166072845459, 0.03065100860595703, 0.03054755210876465, 0.030345632553100587, 0.03015996742248535, 0.0302347526550293, 0.031537952423095705, 0.032102081298828126, 0.030492992401123048, 0.030450815200805663, 0.030352256774902345, 0.030248960494995116, 0.030410751342773438, 0.030199392318725586, 0.03065603256225586, 0.03043724822998047, 0.030257631301879882, 0.030244735717773436, 0.03059779167175293, 0.030480224609375, 0.030756160736083983, 0.030653120040893555]",tokens/s,33.70114049465834,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4294.496256,4886.233088,0.0,4483.710976,4465.672704,s,1,10.7436259765625,10.7436259765625,0.0,10.7436259765625,10.7436259765625,10.7436259765625,10.7436259765625,[10.7436259765625],,kWh,0.00010669827799168464,1.1762235266076216e-05,3.3638638021998846e-05,0.0001520991512797597,,MB,2152.845312,5309.857792,0.0,4892.655616,4837.669376,s,10,1.8159718017578124,0.18159718017578125,0.0004328877022416853,0.18151288604736326,0.1820882095336914,0.18225849075317382,0.18239471572875976,"[0.1809141387939453, 0.18121597290039063, 0.1814171142578125, 0.1812464599609375, 0.18155850219726563, 0.18146726989746093, 0.1816407928466797, 0.18242877197265625, 0.18203240966796874, 0.1820503692626953]",tokens/s,1409.7135195172018,kWh,5.323411342348636e-06,5.870750273224289e-07,3.516093721963396e-06,9.426580091634461e-06,tokens/kWh,27157250.828132786,MB,2157.498368,5477.629952,0.0,5060.427776,5014.227968,s,10,15.907208740234374,1.5907208740234373,0.0014287310827142909,1.5904212646484375,1.592446325683594,1.5929980041503906,1.5934393469238282,"[1.5904366455078125, 1.5901727294921875, 1.5904058837890624, 1.588873779296875, 1.5897474365234374, 1.5935496826171875, 1.5888824462890625, 1.59232373046875, 1.590826416015625, 1.591989990234375]",tokens/s,39.604685541501084,kWh,4.919065063765526e-05,5.425514434583151e-06,3.2807518670239434e-05,8.742368374247784e-05,tokens/kWh,720628.5219641145,,s,630,15.904182754516606,0.025244734530978735,0.0002521348008896871,0.025230415344238283,0.025554061698913576,0.02562706956863403,0.02580793775558472,"[0.025482976913452148, 0.025077856063842774, 0.024987199783325195, 0.024844928741455077, 0.024714656829833984, 0.025843744277954103, 0.02469536018371582, 0.02472256088256836, 0.02476063919067383, 0.02501206398010254, 0.024976032257080078, 0.02500204849243164, 0.025004032135009766, 0.025109888076782227, 0.0251331844329834, 0.025141759872436522, 0.025059328079223633, 0.025067520141601563, 0.02507366371154785, 0.025070655822753907, 0.025012672424316405, 0.025206464767456055, 0.02519500732421875, 0.025221439361572267, 0.02518764877319336, 0.02513580894470215, 0.02516099166870117, 0.025176799774169922, 0.0251144962310791, 0.02527244758605957, 0.025260032653808592, 0.025174016952514647, 0.02513484764099121, 0.025141248703002928, 0.025157152175903322, 0.025148128509521483, 0.025099615097045898, 0.025339584350585937, 0.02538732719421387, 0.025385631561279296, 0.02533171272277832, 0.025311168670654298, 0.025305152893066406, 0.02531648063659668, 0.02530393600463867, 0.02526608085632324, 0.02549884796142578, 0.02537353515625, 0.025319456100463867, 0.025403392791748046, 0.025527776718139647, 0.025518239974975584, 0.025512319564819336, 0.02546073532104492, 0.02547711944580078, 0.025431968688964843, 0.025378528594970702, 0.02550137519836426, 0.025614463806152343, 0.025596351623535157, 0.025714208602905273, 0.025709152221679688, 0.025587711334228515, 0.025292640686035157, 0.024971424102783205, 0.024938495635986328, 0.024805376052856445, 0.024778623580932618, 0.024937728881835937, 0.025027456283569335, 0.02498684883117676, 0.024942848205566408, 0.024988191604614258, 0.025351360321044923, 0.02497823905944824, 0.024983552932739257, 0.024993215560913086, 0.024977983474731444, 0.02489753532409668, 0.024885248184204102, 0.024895456314086913, 0.025218528747558595, 0.025216991424560548, 0.025366336822509765, 0.025033088684082033, 0.02500239944458008, 0.024993471145629883, 0.02496748733520508, 0.02496672058105469, 0.025219520568847655, 0.025251232147216796, 0.025286304473876954, 0.025205503463745116, 0.02515727996826172, 0.02516022491455078, 0.025161184310913087, 0.02511311912536621, 0.025219072341918947, 0.02522915267944336, 0.02527609634399414, 0.02527609634399414, 0.02546512031555176, 0.025495168685913085, 0.025443199157714844, 0.02534947204589844, 0.025335487365722657, 0.025415903091430665, 0.025475839614868163, 0.025537599563598634, 0.025562047958374023, 0.025484575271606445, 0.025334495544433594, 0.0253798713684082, 0.02552931213378906, 0.02553856086730957, 0.025483264923095703, 0.02548086357116699, 0.02546928024291992, 0.025490848541259766, 0.02543881607055664, 0.025434112548828124, 0.02555904006958008, 0.02553241539001465, 0.02550489616394043, 0.02560089683532715, 0.02557542419433594, 0.025468063354492188, 0.02500079917907715, 0.02502374458312988, 0.024976127624511717, 0.02491596794128418, 0.024852479934692383, 0.024975360870361327, 0.024979455947875977, 0.024987648010253907, 0.024891263961791994, 0.024894880294799804, 0.024922847747802734, 0.02501206398010254, 0.024981664657592773, 0.025126911163330077, 0.025126815795898438, 0.025127008438110353, 0.025025856018066405, 0.025053312301635742, 0.02511929512023926, 0.02531328010559082, 0.025046016693115233, 0.0250644474029541, 0.02505251121520996, 0.025258623123168945, 0.024931520462036134, 0.025319263458251952, 0.025033727645874023, 0.025115999221801757, 0.025170591354370116, 0.02515558433532715, 0.025132768630981444, 0.025131296157836915, 0.0251409912109375, 0.025400768280029296, 0.025518911361694336, 0.025503679275512694, 0.025294912338256835, 0.025257728576660157, 0.025360639572143556, 0.025339296340942383, 0.02523561668395996, 0.025385215759277345, 0.02537081527709961, 0.0253515510559082, 0.02535183906555176, 0.025357088088989257, 0.025319616317749025, 0.025389055252075195, 0.025464832305908205, 0.02549760055541992, 0.025470975875854493, 0.025436159133911132, 0.025405439376831054, 0.025403392791748046, 0.025453983306884767, 0.02555120086669922, 0.025647327423095702, 0.025704063415527344, 0.025686431884765625, 0.02555904006958008, 0.025425920486450194, 0.025413631439208984, 0.02527145576477051, 0.024894304275512695, 0.024864767074584963, 0.024877056121826172, 0.024880800247192383, 0.024797536849975585, 0.024958976745605467, 0.024991743087768553, 0.024995840072631836, 0.024942495346069335, 0.024825536727905273, 0.02494095993041992, 0.02494054412841797, 0.02493440055847168, 0.025014272689819338, 0.02507776069641113, 0.025071264266967774, 0.025071968078613283, 0.025022432327270507, 0.02504297637939453, 0.02518560028076172, 0.02504159927368164, 0.02493846321105957, 0.02506959915161133, 0.02517100715637207, 0.025117631912231445, 0.025016319274902343, 0.025241216659545897, 0.025216640472412108, 0.025479936599731447, 0.025161727905273438, 0.0250666561126709, 0.025164640426635743, 0.025214975357055663, 0.02515328025817871, 0.025218591690063477, 0.025322080612182617, 0.025349952697753905, 0.025281919479370116, 0.02526470375061035, 0.0252542724609375, 0.025278303146362306, 0.02522947120666504, 0.02546601676940918, 0.025570144653320314, 0.02557855987548828, 0.025502656936645506, 0.025430015563964844, 0.025364479064941405, 0.025272319793701172, 0.025349632263183593, 0.02538947105407715, 0.025421920776367186, 0.025399295806884766, 0.02553036880493164, 0.025683967590332032, 0.02567987251281738, 0.025595008850097658, 0.025540639877319336, 0.025537343978881837, 0.025413312911987306, 0.025413984298706054, 0.025612287521362305, 0.02546659278869629, 0.025020288467407226, 0.024939008712768555, 0.024938880920410158, 0.025333791732788085, 0.024893440246582032, 0.024815616607666017, 0.024766271591186523, 0.024796703338623046, 0.024861343383789064, 0.024893440246582032, 0.02500982475280762, 0.0250412483215332, 0.024997535705566405, 0.024991424560546874, 0.02515011215209961, 0.0251146240234375, 0.02511052894592285, 0.024969215393066405, 0.02490777587890625, 0.025017343521118163, 0.025019392013549805, 0.025010175704956054, 0.0251342716217041, 0.025255775451660155, 0.02516067123413086, 0.02517523193359375, 0.02522604751586914, 0.025202688217163087, 0.025116672515869142, 0.025135103225708007, 0.025157632827758788, 0.02514739227294922, 0.025166879653930663, 0.025319520950317382, 0.025250688552856445, 0.025340959548950194, 0.02531427192687988, 0.025255935668945313, 0.025434112548828124, 0.02549350357055664, 0.025351903915405274, 0.025337343215942384, 0.025393632888793944, 0.02545631980895996, 0.025308832168579102, 0.025289695739746095, 0.025427520751953123, 0.025444799423217774, 0.02545814323425293, 0.025367071151733398, 0.025462656021118163, 0.025490751266479494, 0.025506624221801756, 0.025437503814697265, 0.025415552139282226, 0.025485952377319335, 0.025481407165527343, 0.025392608642578127, 0.025661983489990235, 0.02569215965270996, 0.025633792877197265, 0.025621503829956056, 0.025135103225708007, 0.025229215621948242, 0.025067039489746094, 0.02492678451538086, 0.024816864013671874, 0.024895872116088867, 0.024917984008789064, 0.024957376480102537, 0.024974655151367188, 0.02505081558227539, 0.024972288131713868, 0.024838144302368165, 0.02498883247375488, 0.02498031997680664, 0.024985599517822265, 0.024961023330688475, 0.024888896942138673, 0.025100383758544922, 0.02511702346801758, 0.025392736434936523, 0.025835935592651366, 0.025044992446899415, 0.025038240432739257, 0.02497529602050781, 0.025004703521728514, 0.025100288391113282, 0.02507980728149414, 0.025133056640625, 0.025231359481811523, 0.025219072341918947, 0.025169919967651368, 0.025092096328735353, 0.025169919967651368, 0.02527631950378418, 0.025316864013671874, 0.025279071807861327, 0.02531123161315918, 0.02533692741394043, 0.025332639694213867, 0.025251136779785157, 0.025256351470947267, 0.025342239379882812, 0.025409536361694338, 0.02532044792175293, 0.026850303649902343, 0.025411487579345703, 0.025455839157104494, 0.025553792953491212, 0.025577472686767577, 0.025547775268554687, 0.025739391326904296, 0.025530656814575194, 0.025535072326660156, 0.02550169563293457, 0.025423871994018556, 0.025629791259765625, 0.025611103057861326, 0.02553862380981445, 0.02567532730102539, 0.025682367324829102, 0.025636863708496094, 0.0255283203125, 0.0261079044342041, 0.025251295089721678, 0.025010623931884766, 0.024860511779785155, 0.02483839988708496, 0.024928224563598632, 0.024938528060913085, 0.024977344512939453, 0.02496054458618164, 0.024916095733642576, 0.02504300880432129, 0.02504710388183594, 0.0250534725189209, 0.025044767379760743, 0.025005535125732423, 0.02494540786743164, 0.024975360870361327, 0.024991104125976563, 0.025070207595825195, 0.02510374450683594, 0.025104448318481444, 0.02514796829223633, 0.02512076759338379, 0.025078912734985352, 0.024963327407836914, 0.025049152374267577, 0.025112447738647462, 0.025202880859375, 0.025114528656005858, 0.025082080841064454, 0.025133440017700195, 0.025113824844360352, 0.025040735244750978, 0.025191360473632813, 0.025159679412841796, 0.025154815673828126, 0.025194847106933593, 0.025450847625732423, 0.025442079544067384, 0.02544041633605957, 0.025393184661865235, 0.02526755142211914, 0.025376640319824218, 0.02534480094909668, 0.025217119216918944, 0.025358335494995117, 0.025403392791748046, 0.02532352066040039, 0.025417728424072264, 0.02546246337890625, 0.02540729522705078, 0.025295295715332032, 0.02537055969238281, 0.02548748779296875, 0.02550579261779785, 0.025448383331298827, 0.02541526412963867, 0.025481184005737303, 0.025577280044555666, 0.025584320068359374, 0.02560550308227539, 0.025563135147094726, 0.025514623641967774, 0.025523296356201174, 0.025169088363647462, 0.02525472068786621, 0.02491939163208008, 0.024828575134277345, 0.024860511779785155, 0.024942495346069335, 0.024971519470214844, 0.024999935150146483, 0.024911392211914064, 0.024979936599731446, 0.024922111511230468, 0.024847423553466797, 0.024941503524780275, 0.025028255462646483, 0.02504870414733887, 0.025022592544555664, 0.024966815948486328, 0.02507257652282715, 0.02508799934387207, 0.025036800384521486, 0.025019968032836914, 0.025102399826049806, 0.02511267280578613, 0.025083648681640626, 0.025133600234985353, 0.025218303680419923, 0.025496320724487306, 0.02508799934387207, 0.025108480453491212, 0.025499296188354493, 0.025191904067993164, 0.0251278076171875, 0.025202688217163087, 0.025196544647216795, 0.025160736083984375, 0.025242591857910158, 0.025341951370239257, 0.025341951370239257, 0.025412960052490233, 0.02540336036682129, 0.025345727920532225, 0.025396223068237304, 0.025397247314453125, 0.025324735641479492, 0.02595680046081543, 0.025637247085571288, 0.02556723213195801, 0.025480384826660155, 0.025518911361694336, 0.025617759704589845, 0.02554323196411133, 0.025438304901123046, 0.025556480407714844, 0.025532928466796875, 0.025556991577148438, 0.025550079345703126, 0.02551612854003906, 0.025696928024291993, 0.025591808319091795, 0.02566511917114258, 0.025713056564331056, 0.025630720138549806, 0.0255283203125, 0.02552217674255371, 0.0252620792388916, 0.02490096092224121, 0.024849056243896484, 0.02487436866760254, 0.02496575927734375, 0.024969215393066405, 0.02495692825317383, 0.024987648010253907, 0.024904767990112306, 0.024888256072998046, 0.024816928863525392, 0.024941280364990236, 0.02503638458251953, 0.0250351676940918, 0.02505308723449707, 0.02508195114135742, 0.024999584197998047, 0.024981376647949218, 0.024979167938232422, 0.025213695526123046, 0.025257984161376954, 0.025173759460449217, 0.02508121681213379, 0.025080320358276367, 0.02506380844116211, 0.025177120208740234, 0.02514838409423828, 0.025204063415527344, 0.025211551666259765, 0.025228288650512694, 0.025175039291381835, 0.025235456466674806, 0.025271392822265624, 0.025318048477172853, 0.02526028823852539, 0.02523750305175781, 0.025384960174560548, 0.025358335494995117, 0.02530508804321289, 0.025376096725463867, 0.025377439498901366, 0.025640960693359374, 0.025225215911865235, 0.025403392791748046, 0.0255283203125, 0.02548121643066406, 0.025358335494995117, 0.02535580825805664, 0.025356767654418945, 0.025374719619750977, 0.02532147216796875, 0.025392255783081054, 0.02551897621154785, 0.025643007278442383, 0.025617759704589845, 0.02557151985168457, 0.025577472686767577, 0.025553375244140624, 0.0256060791015625, 0.0255795841217041, 0.025623743057250976, 0.025674560546875, 0.025342687606811524, 0.02511824035644531, 0.024986240386962892, 0.024872703552246092, 0.024883455276489257, 0.0249234561920166, 0.024994304656982422, 0.02492844772338867, 0.024893440246582032, 0.024979455947875977, 0.024995840072631836, 0.025019775390625, 0.024969856262207032, 0.025004032135009766, 0.025026559829711914, 0.025064895629882813, 0.024983423233032227, 0.025041568756103517, 0.02508188819885254, 0.02512076759338379, 0.025108480453491212, 0.025024511337280272, 0.02507776069641113, 0.025200639724731445, 0.02508799934387207, 0.025091775894165037, 0.02511289596557617, 0.025116031646728515, 0.02504972839355469, 0.025308256149291993, 0.02548624038696289, 0.025371904373168944, 0.025240320205688477, 0.025285856246948242, 0.025246496200561522, 0.025269983291625976, 0.02523753547668457, 0.025342208862304687, 0.025440256118774415, 0.025472799301147462, 0.025394464492797853, 0.025373632431030274, 0.025411584854125976, 0.025391103744506836, 0.025333759307861328, 0.025341951370239257, 0.025397247314453125, 0.025453760147094728, 0.02547590446472168, 0.026191871643066408, 0.025390975952148436, 0.025468448638916015, 0.02528316879272461, 0.025500959396362304, 0.025498111724853514, 0.025453920364379882, 0.02615180778503418, 0.025391103744506836, 0.0254783992767334, 0.025512704849243163, 0.02561337661743164, 0.025709056854248048, 0.025704896926879883]",tokens/s,39.61222086819188,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,872.648704,601.751552,0.0,199.22944,181.241344,s,1,7.62974609375,7.62974609375,0.0,7.62974609375,7.62974609375,7.62974609375,7.62974609375,[7.62974609375],,kWh,1.5839093170833013e-05,1.735011818051321e-06,4.55639253400808e-06,2.2130497522892415e-05,,MB,1345.232896,658.374656,0.0,241.17248,225.803264,s,13,0.2078934392929077,0.015991803022531365,0.0001516011805989282,0.01594268798828125,0.016125178146362305,0.016220512771606445,0.01633095100402832,"[0.015738271713256837, 0.01612848091125488, 0.016100799560546875, 0.015918496131896973, 0.01594268798828125, 0.01602694320678711, 0.01585247993469238, 0.015911999702453612, 0.01611196708679199, 0.01601443290710449, 0.01591158390045166, 0.01587673568725586, 0.01635856056213379]",tokens/s,16008.201179023616,kWh,3.7532006402102423e-07,4.139127851199241e-08,2.1282364926478213e-07,6.295349917977988e-07,tokens/kWh,406649357.5979411,MB,1385.091072,673.05472,0.0,255.852544,225.805824,s,13,10.247889221191407,0.7882991708608775,0.005680795175853167,0.7881617431640625,0.7948011596679687,0.7981488037109374,0.8018119384765625,"[0.7881617431640625, 0.7883017578125, 0.7817357788085938, 0.7851591186523438, 0.7893953857421875, 0.7831097412109375, 0.7844913940429687, 0.78282275390625, 0.7849459228515625, 0.79509619140625, 0.7883206787109375, 0.8027277221679687, 0.7936210327148437]",tokens/s,79.91889669400466,kWh,1.7405593295293662e-05,1.9193962539178246e-06,6.656185113910894e-06,2.5981174663122382e-05,tokens/kWh,2424832.6265795077,,s,819,10.23985273742676,0.012502872695270768,0.00030264183369250086,0.012435551643371581,0.012743321800231933,0.012822761726379393,0.013601171455383289,"[0.01198198413848877, 0.012350208282470703, 0.012357215881347656, 0.012525312423706055, 0.012591967582702637, 0.012636128425598145, 0.012521408081054687, 0.012443648338317872, 0.012475839614868164, 0.012512160301208495, 0.01245967960357666, 0.012568767547607423, 0.012465248107910156, 0.012393183708190918, 0.012388352394104005, 0.012414400100708007, 0.012417599678039551, 0.012373151779174805, 0.012397727966308593, 0.012549823760986328, 0.013082847595214844, 0.012875679969787598, 0.012486751556396485, 0.012463680267333984, 0.01237007999420166, 0.01239244842529297, 0.01235750389099121, 0.012382399559020997, 0.012383968353271484, 0.01239641571044922, 0.012435872077941895, 0.012693056106567383, 0.01236950397491455, 0.01252233600616455, 0.012355584144592285, 0.01243068790435791, 0.012581536293029785, 0.012406720161437988, 0.012914752006530762, 0.012416255950927735, 0.012354304313659668, 0.012879103660583495, 0.013402175903320313, 0.013077088356018066, 0.012494943618774413, 0.012539487838745117, 0.01256220817565918, 0.012573311805725098, 0.012411999702453613, 0.012369983673095703, 0.012523776054382324, 0.012497504234313965, 0.01243785572052002, 0.012424863815307618, 0.012392607688903809, 0.012433247566223144, 0.012406975746154784, 0.012400575637817383, 0.012435327529907227, 0.012314623832702636, 0.012435456275939942, 0.012507136344909669, 0.01252137565612793, 0.01205020809173584, 0.012647520065307618, 0.01250387191772461, 0.01244371223449707, 0.012386528015136719, 0.012518783569335938, 0.01240550422668457, 0.012389472007751465, 0.012396991729736328, 0.012387999534606934, 0.012345888137817384, 0.012447584152221679, 0.012392319679260255, 0.012370304107666015, 0.012495743751525878, 0.01257369613647461, 0.012445695877075195, 0.012509471893310547, 0.012457183837890625, 0.012392992019653321, 0.012380127906799316, 0.012402688026428223, 0.012548095703125, 0.012506560325622558, 0.012390975952148437, 0.012438559532165527, 0.012380288124084473, 0.012352352142333984, 0.012331007957458496, 0.012430879592895508, 0.012313055992126464, 0.012360960006713868, 0.012410847663879395, 0.012399392127990723, 0.012389856338500977, 0.012607935905456544, 0.013056096076965331, 0.012377728462219238, 0.012519359588623047, 0.012484576225280762, 0.012462559700012208, 0.012428544044494628, 0.012460415840148925, 0.012402336120605468, 0.012370911598205567, 0.012379903793334961, 0.012404831886291504, 0.01258892822265625, 0.012490367889404297, 0.012485024452209472, 0.01246003246307373, 0.01238374423980713, 0.012538528442382813, 0.012413824081420898, 0.012609631538391113, 0.01248863983154297, 0.013198271751403808, 0.01374617576599121, 0.012725983619689941, 0.013637920379638672, 0.01246399974822998, 0.012441439628601074, 0.012467904090881348, 0.011947744369506836, 0.012431615829467774, 0.012388383865356446, 0.012335264205932617, 0.012379008293151856, 0.012375007629394532, 0.012504351615905763, 0.012352224349975585, 0.012310015678405761, 0.012316351890563964, 0.01232102394104004, 0.012603615760803223, 0.012357888221740722, 0.01238003158569336, 0.01244745635986328, 0.012798463821411133, 0.012468447685241699, 0.012482336044311523, 0.012364864349365234, 0.012431839942932128, 0.012329376220703125, 0.012426688194274902, 0.0123439359664917, 0.012463583946228027, 0.012333503723144531, 0.012527711868286133, 0.012285056114196778, 0.012400832176208496, 0.012364319801330566, 0.012406944274902344, 0.012470272064208985, 0.01243945598602295, 0.012404831886291504, 0.012349504470825196, 0.012328895568847656, 0.012364928245544433, 0.0122990083694458, 0.012333344459533692, 0.012348992347717285, 0.0123602876663208, 0.012390080451965333, 0.012373344421386719, 0.01239311981201172, 0.012290047645568849, 0.012354592323303222, 0.012340512275695801, 0.012369600296020508, 0.012363776206970215, 0.01237606430053711, 0.012375519752502442, 0.012403231620788574, 0.012656736373901366, 0.013035264015197754, 0.012396639823913574, 0.012400704383850098, 0.012448927879333497, 0.012395359992980958, 0.012395808219909668, 0.012421759605407715, 0.01232630443572998, 0.01235424041748047, 0.012347359657287598, 0.012360960006713868, 0.012040191650390625, 0.012465920448303223, 0.012430784225463867, 0.012524352073669434, 0.012565631866455079, 0.012648832321166993, 0.012497152328491211, 0.01239641571044922, 0.012401056289672852, 0.012384415626525879, 0.012385791778564453, 0.012456255912780762, 0.01258512020111084, 0.012426176071166993, 0.012389280319213868, 0.012421119689941406, 0.012419072151184082, 0.012377823829650879, 0.012541536331176759, 0.012391103744506835, 0.01236787223815918, 0.012369919776916503, 0.012416095733642578, 0.013433759689331054, 0.012470272064208985, 0.012374015808105468, 0.012428959846496583, 0.012351840019226075, 0.012361536026000977, 0.012376511573791504, 0.012356512069702149, 0.012403072357177735, 0.012435935974121093, 0.012574720382690429, 0.012476415634155273, 0.012593152046203614, 0.012371968269348145, 0.01248799991607666, 0.012351615905761719, 0.012485183715820313, 0.012474176406860351, 0.012464320182800293, 0.012384384155273438, 0.012949376106262208, 0.01244985580444336, 0.012508416175842285, 0.0125632963180542, 0.012422464370727538, 0.012536191940307617, 0.012392607688903809, 0.01237600040435791, 0.012347455978393554, 0.012373215675354004, 0.012362367630004882, 0.012327072143554687, 0.012405920028686523, 0.012548959732055665, 0.012506848335266113, 0.01236201572418213, 0.012373408317565919, 0.01238096046447754, 0.012397919654846191, 0.012429408073425293, 0.01209062385559082, 0.012446240425109863, 0.012421343803405762, 0.012705151557922362, 0.013056639671325684, 0.01234227180480957, 0.012391424179077149, 0.012569952011108398, 0.012458975791931153, 0.012455615997314453, 0.012404735565185548, 0.012370047569274902, 0.012369536399841308, 0.012392224311828614, 0.01241254425048828, 0.012555007934570312, 0.012435551643371581, 0.012465600013732911, 0.012341407775878907, 0.012667296409606933, 0.01392198371887207, 0.014188672065734863, 0.012578047752380372, 0.012529664039611817, 0.012485055923461915, 0.01247929573059082, 0.013117312431335449, 0.012392255783081054, 0.012378111839294433, 0.012503264427185058, 0.012474143981933594, 0.012464320182800293, 0.012558239936828614, 0.012507040023803711, 0.012459168434143067, 0.012395135879516602, 0.01238588809967041, 0.0123439359664917, 0.012848544120788574, 0.012433535575866699, 0.012525856018066406, 0.012359871864318847, 0.012333056449890138, 0.012638208389282226, 0.012373344421386719, 0.012395327568054199, 0.012341216087341308, 0.01239452838897705, 0.012380000114440917, 0.012412927627563476, 0.01256595230102539, 0.012439488410949707, 0.012503680229187011, 0.01257203197479248, 0.012336031913757324, 0.01240617561340332, 0.01237980842590332, 0.012381055831909179, 0.01240236759185791, 0.012615679740905762, 0.012312671661376954, 0.012469951629638671, 0.012458304405212402, 0.012097023963928222, 0.012489695549011231, 0.012529664039611817, 0.012504927635192871, 0.012435615539550781, 0.012480511665344238, 0.012426912307739258, 0.012424991607666016, 0.012509056091308594, 0.012417599678039551, 0.012445183753967285, 0.012429951667785644, 0.01242521572113037, 0.012429311752319335, 0.012382080078125, 0.012406975746154784, 0.012472576141357421, 0.012529472351074218, 0.012404000282287598, 0.012425824165344239, 0.013084671974182128, 0.012445792198181153, 0.012339327812194824, 0.012392064094543458, 0.012490912437438965, 0.012369888305664063, 0.012341279983520508, 0.012411935806274414, 0.012477408409118653, 0.012443648338317872, 0.012394495964050293, 0.012337151527404786, 0.012380160331726075, 0.01251097583770752, 0.0123721923828125, 0.012469280242919921, 0.012345696449279785, 0.012425248146057128, 0.012378687858581544, 0.012384415626525879, 0.012384160041809082, 0.012359999656677246, 0.012330047607421875, 0.01236838436126709, 0.012391584396362305, 0.012348480224609375, 0.012387359619140625, 0.01234774398803711, 0.012601887702941895, 0.012492735862731934, 0.012468223571777343, 0.01241215991973877, 0.012388575553894042, 0.012420895576477051, 0.012456704139709472, 0.012404159545898437, 0.01236032009124756, 0.012312576293945313, 0.01242262363433838, 0.012316512107849122, 0.012348095893859863, 0.012390399932861328, 0.01232425594329834, 0.011966464042663574, 0.012464127540588378, 0.012406592369079589, 0.01242950439453125, 0.01243558406829834, 0.012589119911193847, 0.01244745635986328, 0.012472415924072265, 0.012381919860839844, 0.012403264045715331, 0.01227952003479004, 0.012461855888366699, 0.01234556770324707, 0.012629471778869628, 0.01249728012084961, 0.01254319953918457, 0.012483872413635255, 0.012379615783691407, 0.01243391990661621, 0.012358783721923828, 0.012354111671447754, 0.012373503684997558, 0.012335616111755371, 0.012431488037109376, 0.01240447998046875, 0.012488191604614257, 0.012479104042053223, 0.01235267162322998, 0.012341535568237305, 0.0123472318649292, 0.012337663650512695, 0.012356224060058594, 0.012434207916259765, 0.012366880416870117, 0.01234886360168457, 0.012428704261779786, 0.01318179225921631, 0.012441439628601074, 0.012470687866210937, 0.012346240043640136, 0.012413920402526856, 0.012333056449890138, 0.012289600372314453, 0.012327615737915038, 0.012370847702026367, 0.012435808181762695, 0.012545791625976563, 0.012711968421936035, 0.012839551925659179, 0.012743807792663575, 0.012546431541442872, 0.012628704071044923, 0.012443455696105957, 0.012447168350219726, 0.012483200073242188, 0.012361984252929687, 0.012418815612792969, 0.01234124755859375, 0.012480511665344238, 0.012388511657714844, 0.012514240264892578, 0.012380352020263672, 0.012417695999145507, 0.012034175872802734, 0.012362624168395996, 0.012402624130249023, 0.012726271629333496, 0.01242521572113037, 0.012435392379760742, 0.012388511657714844, 0.01242841625213623, 0.01232975959777832, 0.012339200019836426, 0.012421279907226563, 0.01274454402923584, 0.012443231582641602, 0.012421536445617675, 0.012434720039367676, 0.012405471801757812, 0.012476448059082032, 0.012442655563354492, 0.012856063842773437, 0.012635968208312988, 0.012689791679382324, 0.012889920234680175, 0.012621376037597656, 0.012472543716430664, 0.012447584152221679, 0.012363327980041503, 0.01236620807647705, 0.012333024024963378, 0.0123155517578125, 0.012400480270385742, 0.012381504058837891, 0.01233897590637207, 0.01233199977874756, 0.012375935554504395, 0.012357760429382324, 0.012311391830444336, 0.012315008163452148, 0.012357695579528809, 0.012431936264038086, 0.012328895568847656, 0.012303584098815918, 0.012526432037353515, 0.012400992393493652, 0.012332703590393066, 0.012346464157104492, 0.012321696281433106, 0.0123571195602417, 0.012357695579528809, 0.0123788480758667, 0.012336864471435546, 0.012269344329833985, 0.012646592140197754, 0.012388447761535644, 0.012285887718200683, 0.012297727584838868, 0.012348256111145019, 0.01245353603363037, 0.012312543869018555, 0.01231868839263916, 0.012293184280395508, 0.012526592254638673, 0.01234329605102539, 0.01242031955718994, 0.012142335891723633, 0.012440064430236816, 0.012351327896118165, 0.012417183876037598, 0.012394399642944335, 0.01246009635925293, 0.01237564754486084, 0.01234716796875, 0.01252406406402588, 0.012423295974731446, 0.01239459228515625, 0.012359328269958496, 0.012367424011230468, 0.012454560279846191, 0.012345439910888672, 0.012435392379760742, 0.012482239723205566, 0.012555680274963378, 0.012452768325805665, 0.012326911926269531, 0.012380000114440917, 0.012437248229980468, 0.012560000419616699, 0.01244883155822754, 0.01254371166229248, 0.01242473602294922, 0.012425248146057128, 0.012514816284179688, 0.012465087890625, 0.012496607780456543, 0.012428863525390625, 0.012536255836486817, 0.01372755241394043, 0.012492704391479491, 0.012683327674865724, 0.012730879783630371, 0.0124203519821167, 0.012413824081420898, 0.012405887603759765, 0.012423935890197755, 0.012609888076782226, 0.01234124755859375, 0.012311200141906738, 0.012355680465698243, 0.012358816146850585, 0.012406240463256836, 0.012311840057373047, 0.012346367835998535, 0.01235968017578125, 0.012593024253845215, 0.012540032386779785, 0.01235919952392578, 0.012388128280639649, 0.012427552223205567, 0.012376128196716308, 0.012362079620361328, 0.01237007999420166, 0.012600192070007324, 0.012446687698364258, 0.012355584144592285, 0.01238755226135254, 0.01234819221496582, 0.0123689603805542, 0.012066816329956055, 0.012415167808532714, 0.01240764808654785, 0.012814847946166993, 0.012362208366394043, 0.012438976287841796, 0.012398816108703614, 0.012359007835388184, 0.012412128448486328, 0.012456959724426269, 0.012380767822265625, 0.012363967895507812, 0.012351263999938964, 0.012412672042846679, 0.012567008018493652, 0.012586336135864257, 0.012580703735351563, 0.012563263893127442, 0.01274454402923584, 0.01285750389099121, 0.012555520057678222, 0.012511391639709473, 0.012446304321289063, 0.012460224151611328, 0.012483615875244141, 0.012436351776123046, 0.012629311561584472, 0.012546848297119141, 0.012430399894714355, 0.01234934425354004, 0.012349632263183594, 0.012316320419311523, 0.012376928329467773, 0.012458144187927246, 0.012589056015014649, 0.012431360244750977, 0.01236086368560791, 0.012566975593566895, 0.012355744361877441, 0.012536064147949218, 0.012334303855895996, 0.012551967620849609, 0.01245900821685791, 0.012705792427062988, 0.012416095733642578, 0.012432064056396484, 0.012496800422668456, 0.012446016311645507, 0.012439680099487305, 0.013960800170898437, 0.01851011276245117, 0.01386291217803955, 0.012566783905029296, 0.012526975631713868, 0.012537535667419433, 0.01245206356048584, 0.01247110366821289, 0.012453408241271972, 0.01243926429748535, 0.01234985637664795, 0.012412927627563476, 0.01235324764251709, 0.012560928344726562, 0.012049504280090332, 0.012434335708618164, 0.0124552001953125, 0.012558783531188965, 0.012468480110168456, 0.012455967903137207, 0.012430527687072753, 0.01250387191772461, 0.012515328407287597, 0.012404959678649902, 0.012535584449768067, 0.012417311668395997, 0.01250483226776123, 0.012506272315979004, 0.012605600357055664, 0.012694432258605956, 0.012780575752258301, 0.012594176292419433, 0.012662495613098144, 0.012453887939453125, 0.012578911781311035, 0.012451904296875, 0.012504735946655273, 0.012556480407714845, 0.012494239807128906, 0.012515935897827148, 0.012404352188110352, 0.012353664398193359, 0.012383551597595214, 0.012486783981323243, 0.012448575973510742, 0.01238755226135254, 0.012450592041015625, 0.013008128166198731, 0.012403136253356933, 0.012335424423217773, 0.012385600090026856, 0.012407232284545898, 0.012345600128173827, 0.01229759979248047, 0.012320927619934082, 0.0123721923828125, 0.012536064147949218, 0.012478464126586914, 0.01239247989654541, 0.01247555160522461, 0.012343680381774903, 0.012423359870910644, 0.012390656471252441, 0.01246003246307373, 0.012372096061706543, 0.012810111999511719, 0.013035039901733398, 0.012484064102172851, 0.012399104118347168, 0.012531455993652343, 0.012688128471374512, 0.012619168281555175, 0.012730208396911621, 0.01277340793609619, 0.012781791687011719, 0.012584959983825684, 0.01248044776916504, 0.012170656204223633, 0.012594079971313477, 0.012591296195983887, 0.012846495628356934, 0.01294927978515625, 0.012888799667358399, 0.012767231941223145, 0.012572064399719238, 0.012510848045349122, 0.012544992446899415, 0.012613632202148438, 0.012512991905212403, 0.012462080001831055, 0.012531999588012696, 0.012526944160461425, 0.012632736206054688, 0.012545408248901367, 0.01249788761138916, 0.012649727821350097, 0.01250761604309082, 0.01267296028137207, 0.012690848350524902, 0.01270025634765625, 0.01269375991821289, 0.01278649616241455, 0.01271395206451416, 0.012806400299072266, 0.012778207778930665, 0.012811264038085938, 0.01280016040802002, 0.012729439735412598, 0.012756287574768066, 0.012756447792053222, 0.012760031700134277, 0.012775424003601075, 0.012928159713745117, 0.012810976028442382, 0.012744480133056641, 0.012701375961303711, 0.012744799613952636, 0.012743200302124023, 0.012785696029663085, 0.012672991752624511, 0.01276473617553711, 0.012771391868591308, 0.012790176391601562, 0.012785663604736328, 0.012726271629333496, 0.012793919563293458, 0.012732064247131348, 0.0127675199508667, 0.012904064178466797, 0.012864224433898926, 0.012707488059997558, 0.012781567573547363, 0.01276518440246582, 0.013930144309997559, 0.012804448127746582, 0.012775424003601075, 0.01277337646484375, 0.012769280433654785, 0.01275276756286621, 0.012785856246948241, 0.01240764808654785, 0.012931296348571777, 0.012926719665527343, 0.0128123197555542, 0.012771295547485352, 0.012758655548095703, 0.012631839752197265, 0.012459936141967774, 0.012958399772644043, 0.01264185619354248, 0.012540351867675782, 0.012685312271118163, 0.012688672065734864, 0.012628288269042968, 0.01282089614868164, 0.01264579200744629, 0.012569184303283692, 0.012548095703125, 0.012546175956726075, 0.012697471618652343, 0.012709888458251953, 0.012650208473205566, 0.012514911651611327, 0.012486656188964844, 0.012636863708496093, 0.012638208389282226, 0.012576416015625, 0.012582240104675292, 0.012741632461547851, 0.012640255928039551, 0.012716032028198243, 0.012645471572875976, 0.012681632041931153, 0.012702431678771973, 0.012621600151062012, 0.012693535804748536, 0.012718048095703125, 0.01269257640838623, 0.012671903610229492, 0.012639455795288086, 0.012585247993469238, 0.012549792289733887, 0.012604224205017089, 0.012625951766967774, 0.012533760070800782, 0.01241215991973877, 0.01236832046508789, 0.012371520042419433, 0.012569472312927247, 0.013086591720581055, 0.012331007957458496, 0.012343328475952148, 0.012349120140075684, 0.012310336112976075, 0.012265952110290527, 0.012406784057617188, 0.012350912094116212, 0.012379743576049805, 0.012403200149536133, 0.012489184379577637, 0.012383359909057617, 0.012323712348937988, 0.01234329605102539]",tokens/s,79.98161897451388,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1060.069376,912.130048,0.0,509.607936,491.434496,s,1,8.07005810546875,8.07005810546875,0.0,8.07005810546875,8.07005810546875,8.07005810546875,8.07005810546875,[8.07005810546875],,kWh,2.5786774854153314e-05,2.8370980970060674e-06,8.215839906011713e-06,3.68397128571711e-05,,MB,1384.59136,1046.347776,0.0,629.1456,592.24832,s,10,0.33173187255859377,0.033173187255859375,0.0003151173575147231,0.03304300880432129,0.0334689956665039,0.033747505950927736,0.0339703141784668,"[0.034026016235351564, 0.0334071044921875, 0.03305481719970703, 0.03294736099243164, 0.033019775390625, 0.03303120040893555, 0.03298323059082031, 0.03316534423828125, 0.032916191101074216, 0.03318083190917969]",tokens/s,7717.075782484023,kWh,9.6787689624711e-07,1.0673981451579888e-07,5.570635435165666e-07,1.6316802542794755e-06,tokens/kWh,156893484.08095163,MB,1424.1792,1061.02784,0.0,643.825664,605.085696,s,10,18.10406884765625,1.8104068847656252,0.005429275622437041,1.8084608154296875,1.8154996826171874,1.8190633178710935,1.8219142260742187,"[1.814707763671875, 1.813886962890625, 1.807641357421875, 1.80636328125, 1.8037791748046874, 1.8055064697265626, 1.822626953125, 1.8134154052734375, 1.8068612060546876, 1.8092802734375]",tokens/s,34.79880712459617,kWh,5.2695541612919345e-05,5.811851654905267e-06,1.9231618954084674e-05,7.77390122219093e-05,tokens/kWh,810403.9168926387,,s,630,18.09740370750428,0.02872603763095916,0.0006436883955033683,0.028597968101501465,0.0290340030670166,0.029228936004638672,0.031079838600158723,"[0.028856319427490236, 0.02893824005126953, 0.028479007720947264, 0.028586143493652343, 0.02841971206665039, 0.028465856552124025, 0.02851020812988281, 0.028449087142944335, 0.028401344299316407, 0.028771392822265623, 0.028488576889038084, 0.028407871246337892, 0.028592159271240234, 0.02882671928405762, 0.030316991806030272, 0.028801471710205077, 0.028763359069824218, 0.028560319900512696, 0.02849500846862793, 0.028749759674072266, 0.02843942451477051, 0.028458240509033204, 0.02890220832824707, 0.02910927963256836, 0.028936416625976562, 0.02875062370300293, 0.02860540771484375, 0.02850918388366699, 0.028806976318359375, 0.02895427131652832, 0.02870694351196289, 0.02937875175476074, 0.028769920349121094, 0.028545440673828124, 0.02859187126159668, 0.028544479370117188, 0.0285579833984375, 0.028442752838134765, 0.02859212875366211, 0.028487680435180664, 0.02867795181274414, 0.031322111129760744, 0.028657855987548827, 0.028550912857055664, 0.028571903228759767, 0.02868412780761719, 0.028619935989379883, 0.028543264389038085, 0.029229440689086915, 0.030486688613891602, 0.02882374382019043, 0.028817407608032225, 0.02875775909423828, 0.028606719970703125, 0.028665855407714845, 0.028600032806396485, 0.02874332809448242, 0.028514944076538085, 0.028858367919921874, 0.028786687850952147, 0.029605535507202147, 0.028895584106445313, 0.0290119686126709, 0.028492191314697265, 0.028786687850952147, 0.029297920227050783, 0.028754688262939452, 0.028588031768798827, 0.028531776428222657, 0.028425216674804688, 0.028570655822753907, 0.028579839706420897, 0.028507232666015625, 0.028591712951660155, 0.028532960891723632, 0.028528608322143555, 0.028614591598510743, 0.02866419219970703, 0.028537759780883788, 0.02852128028869629, 0.02895427131652832, 0.02862508773803711, 0.028614816665649415, 0.0286167049407959, 0.028546655654907226, 0.0286231689453125, 0.028558975219726564, 0.02851443290710449, 0.03210505676269531, 0.02871072006225586, 0.028539840698242187, 0.028455360412597656, 0.02854355239868164, 0.0285883846282959, 0.028683935165405273, 0.028665855407714845, 0.028709184646606444, 0.028611648559570314, 0.028572288513183594, 0.028859872817993164, 0.02940310478210449, 0.02883030319213867, 0.02852249526977539, 0.02858336067199707, 0.028471519470214843, 0.028618240356445314, 0.028523040771484376, 0.028449087142944335, 0.028666112899780275, 0.029159231185913084, 0.02853059196472168, 0.028598207473754883, 0.028635040283203125, 0.028538240432739257, 0.029078304290771486, 0.03334348678588867, 0.029075199127197266, 0.028958080291748046, 0.028519264221191408, 0.028645408630371093, 0.02855116844177246, 0.028591487884521486, 0.02845350456237793, 0.02842835235595703, 0.028464895248413086, 0.028963008880615235, 0.028922143936157226, 0.028877887725830078, 0.028854751586914064, 0.02922831916809082, 0.029172639846801757, 0.028635135650634767, 0.028528640747070313, 0.02910335922241211, 0.028804191589355467, 0.028663455963134767, 0.02847551918029785, 0.028489599227905272, 0.028597728729248047, 0.028437023162841798, 0.02840403175354004, 0.028638687133789063, 0.028877023696899415, 0.028462080001831053, 0.028516351699829103, 0.028480512619018555, 0.028475391387939454, 0.028521919250488283, 0.02850668716430664, 0.028467456817626954, 0.02842198371887207, 0.02839241600036621, 0.028479583740234377, 0.028477567672729492, 0.028427167892456053, 0.028493312835693358, 0.0285863037109375, 0.02842748832702637, 0.028468000411987306, 0.02840985679626465, 0.02854105567932129, 0.02838435173034668, 0.028392032623291017, 0.0284202880859375, 0.02870204734802246, 0.028815328598022463, 0.028792736053466796, 0.028801952362060547, 0.028638784408569335, 0.029481279373168946, 0.02904800033569336, 0.0289370231628418, 0.02878086471557617, 0.02878825569152832, 0.028639392852783205, 0.028690431594848635, 0.028819456100463867, 0.02894339179992676, 0.02866419219970703, 0.02849967956542969, 0.028511104583740236, 0.0285732479095459, 0.028774431228637695, 0.028610143661499023, 0.028805952072143554, 0.028639232635498047, 0.029879423141479493, 0.028610431671142578, 0.029089056015014648, 0.02823776054382324, 0.028576032638549805, 0.028370431900024414, 0.029018688201904296, 0.028543392181396485, 0.02860406494140625, 0.028636735916137697, 0.02847750473022461, 0.028418079376220703, 0.028477472305297853, 0.02863580894470215, 0.028435871124267577, 0.0284432315826416, 0.02914080047607422, 0.028592319488525392, 0.028729343414306642, 0.029081600189208984, 0.02874982452392578, 0.02858415985107422, 0.02853071975708008, 0.028413951873779295, 0.02857382392883301, 0.02859993553161621, 0.028516416549682618, 0.02848953628540039, 0.02851238441467285, 0.028520448684692383, 0.02854707145690918, 0.02854252815246582, 0.028510175704956054, 0.028439008712768554, 0.028552223205566406, 0.028559680938720702, 0.028580511093139648, 0.02850201606750488, 0.028649471282958985, 0.02853215980529785, 0.02849385643005371, 0.028436895370483398, 0.028454303741455078, 0.028393184661865235, 0.028861440658569337, 0.029095039367675782, 0.029944192886352538, 0.029112831115722656, 0.02910207939147949, 0.02894438362121582, 0.028692384719848633, 0.02920479965209961, 0.02860598373413086, 0.028723455429077147, 0.028840160369873045, 0.028802976608276368, 0.028860416412353516, 0.02844086456298828, 0.028569183349609374, 0.028624191284179687, 0.02954310417175293, 0.028704767227172853, 0.028405023574829102, 0.028510047912597657, 0.028494016647338867, 0.02850681686401367, 0.028075904846191407, 0.028477184295654295, 0.02859657669067383, 0.028476160049438478, 0.02854185676574707, 0.028391424179077147, 0.028672159194946287, 0.02843836784362793, 0.0285614070892334, 0.028894432067871095, 0.0286046085357666, 0.0284835205078125, 0.028553407669067384, 0.028475872039794924, 0.02853068733215332, 0.028659711837768553, 0.02882476806640625, 0.028754751205444337, 0.028626943588256838, 0.029394176483154295, 0.028564287185668946, 0.02859017562866211, 0.028716896057128908, 0.02857164764404297, 0.028790143966674803, 0.028617055892944335, 0.028569120407104492, 0.028664575576782227, 0.028487104415893555, 0.028531007766723633, 0.02847932815551758, 0.028613407135009764, 0.028735456466674806, 0.028870304107666014, 0.028784543991088866, 0.028721408843994142, 0.028509504318237306, 0.028954944610595702, 0.028585664749145506, 0.028647968292236328, 0.028596223831176756, 0.02858393669128418, 0.02849897575378418, 0.028467519760131836, 0.028484512329101562, 0.028494720458984376, 0.028406784057617186, 0.02842166328430176, 0.028666208267211914, 0.02866111946105957, 0.02865545654296875, 0.028649824142456055, 0.028691232681274412, 0.02846067237854004, 0.029126688003540038, 0.028464384078979492, 0.028678815841674806, 0.028642847061157228, 0.028594751358032227, 0.028544384002685545, 0.02870457649230957, 0.02862115287780762, 0.028975584030151366, 0.028079263687133788, 0.028518367767333984, 0.028532672882080078, 0.028375936508178712, 0.028724319458007814, 0.028652639389038087, 0.028611967086791992, 0.028600992202758788, 0.028623008728027345, 0.0284932804107666, 0.028393760681152343, 0.02844870376586914, 0.028766239166259765, 0.02840928077697754, 0.028668447494506834, 0.028518655776977538, 0.02848863983154297, 0.02841881561279297, 0.028387104034423828, 0.02834454345703125, 0.028472448348999025, 0.028467296600341797, 0.028523359298706055, 0.028479488372802734, 0.028552928924560548, 0.02981507110595703, 0.028966848373413085, 0.02918729591369629, 0.02921558380126953, 0.029021568298339843, 0.028670879364013673, 0.028388320922851564, 0.028643295288085936, 0.02889193534851074, 0.02854707145690918, 0.028422143936157225, 0.028459007263183594, 0.029563936233520507, 0.028570592880249022, 0.02855673599243164, 0.028598848342895507, 0.02951116752624512, 0.030081695556640625, 0.02880905532836914, 0.02862054443359375, 0.028565759658813476, 0.028483680725097656, 0.028771520614624024, 0.028471935272216798, 0.02838947105407715, 0.02866387176513672, 0.028578880310058594, 0.0284671688079834, 0.028391839981079102, 0.028382848739624024, 0.02850099182128906, 0.028444543838500976, 0.028417152404785158, 0.02853772735595703, 0.028487680435180664, 0.029210687637329102, 0.02856153678894043, 0.02843219184875488, 0.02822867202758789, 0.02851081657409668, 0.028440832138061523, 0.028648799896240234, 0.02884000015258789, 0.032350688934326174, 0.03720451354980469, 0.028966592788696288, 0.02878463935852051, 0.028500160217285155, 0.02855507278442383, 0.02927129554748535, 0.036795135498046874, 0.028704639434814452, 0.028563583374023437, 0.028524192810058593, 0.028432512283325197, 0.028729888916015626, 0.028370208740234375, 0.028618656158447265, 0.028557823181152343, 0.028493824005126952, 0.02836070442199707, 0.02836419105529785, 0.02833417510986328, 0.02842265510559082, 0.028377023696899414, 0.028469600677490235, 0.02839727973937988, 0.028318912506103515, 0.028973888397216797, 0.02837513542175293, 0.02842399978637695, 0.028672063827514648, 0.028659168243408202, 0.02872332763671875, 0.02881955146789551, 0.028620704650878907, 0.02862758445739746, 0.028728607177734376, 0.028852672576904298, 0.028604415893554686, 0.028687616348266602, 0.02861961555480957, 0.028766559600830077, 0.02875935935974121, 0.028619104385375977, 0.028536352157592773, 0.028799423217773436, 0.028618783950805665, 0.028513504028320313, 0.028660512924194335, 0.02861836814880371, 0.028596607208251953, 0.028525856018066405, 0.028564064025878907, 0.028489376068115236, 0.028471296310424804, 0.028481151580810545, 0.028527456283569334, 0.028624895095825196, 0.028653312683105468, 0.028612863540649413, 0.02836534309387207, 0.029236671447753906, 0.028820032119750975, 0.028762111663818358, 0.02875596809387207, 0.028906911849975587, 0.02890713691711426, 0.02875859260559082, 0.028809503555297853, 0.028790271759033204, 0.02887718391418457, 0.028944416046142576, 0.02898147201538086, 0.028909759521484377, 0.028700735092163084, 0.03034316825866699, 0.02883967971801758, 0.02865679931640625, 0.028867136001586913, 0.028555551528930665, 0.028534048080444335, 0.028531328201293945, 0.02868003273010254, 0.028532384872436523, 0.028632959365844726, 0.028816095352172853, 0.02870681571960449, 0.029063167572021483, 0.02895408058166504, 0.028705024719238283, 0.028846208572387694, 0.02906947135925293, 0.0289814395904541, 0.028725055694580077, 0.028752960205078126, 0.028576704025268556, 0.029212671279907225, 0.028643007278442382, 0.028350784301757814, 0.028661760330200195, 0.028722272872924805, 0.028638111114501954, 0.02841427230834961, 0.028572799682617188, 0.028801599502563478, 0.028504064559936523, 0.02871286392211914, 0.02903183937072754, 0.029122655868530273, 0.029053312301635742, 0.028897504806518554, 0.028497343063354493, 0.02870265579223633, 0.028639871597290038, 0.02917100715637207, 0.02869219207763672, 0.02862998390197754, 0.028638879776000978, 0.028527967453002928, 0.028476415634155275, 0.02855116844177246, 0.028565536499023436, 0.028450143814086913, 0.028219392776489258, 0.028499807357788086, 0.028700864791870118, 0.028669919967651367, 0.028646400451660156, 0.028496896743774414, 0.02884988784790039, 0.02912838363647461, 0.028910335540771485, 0.02847327995300293, 0.028565343856811524, 0.028591455459594725, 0.028579647064208985, 0.02856643295288086, 0.028630239486694336, 0.028680992126464844, 0.02849990463256836, 0.02843244743347168, 0.028931520462036134, 0.029557376861572265, 0.029308544158935548, 0.02874729537963867, 0.028500320434570313, 0.030367807388305666, 0.028555648803710938, 0.028624895095825196, 0.028525856018066405, 0.029020896911621095, 0.028692447662353515, 0.029439552307128906, 0.028895391464233398, 0.028693952560424806, 0.02862499237060547, 0.02852739143371582, 0.028457056045532225, 0.028593887329101564, 0.02863942337036133, 0.028669952392578125, 0.028651935577392578, 0.028788320541381834, 0.02854537582397461, 0.02917340850830078, 0.02849184036254883, 0.028513696670532225, 0.02840630340576172, 0.028564863204956055, 0.028762271881103516, 0.028532575607299805, 0.02860723114013672, 0.028706687927246094, 0.028406015396118166, 0.02838844871520996, 0.02842822456359863, 0.0284150390625, 0.0283787841796875, 0.02865100860595703, 0.028590719223022462, 0.02850931167602539, 0.02840447998046875, 0.028360799789428712, 0.02850966453552246, 0.028428735733032225, 0.028398752212524414, 0.028092096328735352, 0.02849065589904785, 0.028409664154052734, 0.028385568618774414, 0.02866713523864746, 0.02909257507324219, 0.02880512046813965, 0.02876367950439453, 0.029028863906860353, 0.02881328010559082, 0.028620800018310546, 0.02840118408203125, 0.02909811210632324, 0.028530431747436524, 0.028557504653930664, 0.02832633590698242, 0.02849168014526367, 0.02853455924987793, 0.028504671096801756, 0.028390304565429687, 0.02993414306640625, 0.028532991409301756, 0.028372896194458007, 0.028411680221557618, 0.028553375244140623, 0.028636831283569336, 0.028758527755737305, 0.02866486358642578, 0.028744287490844726, 0.028752256393432617, 0.028458784103393555, 0.028604000091552735, 0.028438272476196288, 0.028661663055419923, 0.03275465774536133, 0.029657087326049804, 0.029032447814941405, 0.02854707145690918, 0.02852454376220703, 0.028593215942382812, 0.02850422477722168, 0.02878544044494629, 0.02856755256652832, 0.02857516860961914, 0.028592319488525392, 0.028479616165161134, 0.02849363136291504, 0.02917344093322754, 0.028527360916137695, 0.02835251235961914, 0.028588031768798827, 0.028473344802856446, 0.028642751693725585, 0.028612863540649413, 0.02856118392944336, 0.028755840301513673, 0.028474016189575194, 0.028447839736938478, 0.02845372772216797, 0.02848092842102051, 0.029218463897705077, 0.028666879653930662, 0.028564544677734376]",tokens/s,34.81162326830142,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1584.603136,1604.190208,0.0,1201.668096,1189.151232,s,1,8.3048056640625,8.3048056640625,0.0,8.3048056640625,8.3048056640625,8.3048056640625,8.3048056640625,[8.3048056640625],,kWh,3.692426027498641e-05,4.0654078609166135e-06,1.1942787332006044e-05,5.293245546790907e-05,,MB,1615.224832,1799.225344,0.0,1382.023168,1351.367168,s,10,0.49314117813110353,0.04931411781311035,0.0005387807762566702,0.049204910278320316,0.049985800933837886,0.05012710914611816,0.05024015571594238,"[0.04842873764038086, 0.04926041412353516, 0.04927507019042969, 0.049923168182373044, 0.049954399108886716, 0.04914940643310547, 0.04890201568603516, 0.04907807922363281, 0.048901470184326175, 0.05026841735839844]",tokens/s,5191.21118561187,kWh,1.4354283868464054e-06,1.5830207674527977e-07,9.477077298430566e-07,2.5414381934347417e-06,tokens/kWh,100730366.23960437,MB,1621.979136,1841.168384,0.0,1423.966208,1407.328256,s,10,18.014220581054687,1.8014220581054687,0.11444368025304653,1.820716064453125,1.8762670776367187,1.8825643859863281,1.8876022326660156,"[1.4675819091796876, 1.8641611328125, 1.8888616943359375, 1.87486767578125, 1.838138671875, 1.818370361328125, 1.82248828125, 1.8098170166015626, 1.81894384765625, 1.810989990234375]",tokens/s,34.97237069821175,kWh,4.036273550898625e-05,4.451640172260774e-06,1.8804446960756822e-05,6.361882264200383e-05,tokens/kWh,990272.9629957776,,s,630,18.008997428894048,0.028585710204593717,0.001989006076888536,0.02893305587768555,0.030049209213256836,0.030249720191955567,0.030651383075714113,"[0.02265011215209961, 0.022601760864257813, 0.02278873634338379, 0.02324643135070801, 0.022796159744262696, 0.022604671478271485, 0.02279408073425293, 0.022865215301513673, 0.022794336318969727, 0.022706687927246092, 0.02291916847229004, 0.02286796760559082, 0.02290278434753418, 0.02261142349243164, 0.02266374397277832, 0.023689184188842773, 0.02314854431152344, 0.02401840019226074, 0.022813024520874022, 0.022729183197021486, 0.0224105281829834, 0.022411712646484373, 0.022675455093383787, 0.022388608932495117, 0.022132287979125975, 0.02227872085571289, 0.022597631454467772, 0.02227516746520996, 0.022006752014160157, 0.022217920303344726, 0.02214374351501465, 0.022173696517944336, 0.022240320205688478, 0.022150079727172853, 0.02214067268371582, 0.021976543426513673, 0.02209071922302246, 0.02183353614807129, 0.021964479446411132, 0.022126911163330078, 0.021864288330078124, 0.022024351119995116, 0.022040672302246093, 0.022219776153564453, 0.022071712493896483, 0.022347776412963868, 0.022342144012451173, 0.02226585578918457, 0.022329343795776366, 0.021989280700683594, 0.022024160385131837, 0.021956735610961915, 0.021960704803466798, 0.02208355140686035, 0.02402921676635742, 0.02899279975891113, 0.02926665687561035, 0.02896294403076172, 0.02873740768432617, 0.02873916816711426, 0.028931583404541016, 0.02870710372924805, 0.028756607055664064, 0.028702816009521483, 0.02884329605102539, 0.028699359893798827, 0.0313666877746582, 0.029974016189575195, 0.029049407958984374, 0.0288734073638916, 0.028779455184936523, 0.029757728576660158, 0.028756479263305663, 0.028827775955200197, 0.028634016036987304, 0.02878121566772461, 0.02930713653564453, 0.02882975959777832, 0.02872319984436035, 0.028680416107177736, 0.02900559997558594, 0.029061119079589845, 0.02915328025817871, 0.029276159286499022, 0.029212127685546874, 0.029297183990478516, 0.029329408645629884, 0.029378559112548826, 0.029393056869506835, 0.029712223052978517, 0.03275775909423828, 0.029708288192749024, 0.03022643280029297, 0.03021824073791504, 0.03008665657043457, 0.030023328781127928, 0.030151647567749025, 0.030216064453125, 0.030088415145874025, 0.030407455444335936, 0.030228479385375977, 0.030354463577270507, 0.030049087524414063, 0.02994764709472656, 0.029981088638305665, 0.02988764762878418, 0.030133087158203124, 0.02996428871154785, 0.030050304412841795, 0.02998271942138672, 0.029898591995239258, 0.0294169921875, 0.029592191696166992, 0.02934783935546875, 0.029163520812988283, 0.029278207778930664, 0.030191104888916017, 0.02930956840515137, 0.029383871078491212, 0.029211328506469725, 0.029717632293701172, 0.02951603126525879, 0.029495296478271486, 0.0294017276763916, 0.029470975875854493, 0.029283967971801758, 0.02921945571899414, 0.029323135375976563, 0.029362464904785158, 0.02957107162475586, 0.029535871505737304, 0.029561216354370118, 0.02958336067199707, 0.030336959838867188, 0.02967558479309082, 0.03028201675415039, 0.030117055892944337, 0.030258720397949218, 0.030116640090942382, 0.030433727264404298, 0.030367519378662108, 0.030657535552978517, 0.030284799575805665, 0.030311744689941408, 0.030104288101196287, 0.030130144119262695, 0.030045696258544922, 0.029970943450927736, 0.030238719940185548, 0.029953983306884764, 0.029767391204833984, 0.029686111450195313, 0.030154304504394533, 0.029790304183959962, 0.029698400497436522, 0.029603296279907227, 0.029703840255737305, 0.02941222381591797, 0.030410751342773438, 0.029454336166381836, 0.029490304946899415, 0.02926595115661621, 0.029725568771362305, 0.02980246353149414, 0.02966752052307129, 0.02985148811340332, 0.030590944290161133, 0.030535680770874023, 0.02961408042907715, 0.029884544372558594, 0.029750688552856445, 0.029470592498779296, 0.0300795841217041, 0.029898752212524415, 0.02996019172668457, 0.029609983444213867, 0.02996019172668457, 0.030009248733520507, 0.03013033676147461, 0.03028780746459961, 0.030302463531494142, 0.03049616050720215, 0.03007535934448242, 0.030530656814575195, 0.030101696014404298, 0.0301712646484375, 0.030861087799072266, 0.030296031951904296, 0.030669536590576172, 0.029436159133911132, 0.029691648483276368, 0.029728639602661134, 0.030183551788330078, 0.02944233512878418, 0.029394752502441408, 0.029543968200683595, 0.029551231384277343, 0.02941961669921875, 0.029433504104614257, 0.029239295959472656, 0.02958758354187012, 0.030535551071166993, 0.030160287857055663, 0.029657472610473634, 0.02963657569885254, 0.02969011116027832, 0.029447839736938475, 0.029644832611083985, 0.030266752243041994, 0.02953926467895508, 0.029634559631347656, 0.029691904067993165, 0.02966691207885742, 0.03018364715576172, 0.030021408081054687, 0.029940128326416016, 0.030007295608520508, 0.03006233596801758, 0.03063632011413574, 0.030176416397094726, 0.03014956855773926, 0.030044000625610353, 0.029988895416259764, 0.030583967208862306, 0.02975984001159668, 0.029727231979370116, 0.02964630317687988, 0.029564512252807616, 0.02964371109008789, 0.02948841667175293, 0.029952543258666992, 0.02999519920349121, 0.029845855712890626, 0.02973654365539551, 0.029499616622924805, 0.029535903930664062, 0.029583616256713866, 0.02944812774658203, 0.029505344390869142, 0.029411327362060546, 0.02928223991394043, 0.02961408042907715, 0.029554943084716796, 0.02939084815979004, 0.029454336166381836, 0.029499391555786132, 0.029768863677978517, 0.02990937614440918, 0.029728256225585937, 0.02978096008300781, 0.029773120880126954, 0.03001532745361328, 0.029785663604736327, 0.030091712951660157, 0.030108768463134764, 0.030458784103393553, 0.030066688537597655, 0.029910400390625, 0.02981337547302246, 0.03001491165161133, 0.029626943588256835, 0.02965888023376465, 0.02953446388244629, 0.02954854393005371, 0.029551776885986328, 0.029563743591308592, 0.029583583831787108, 0.029300512313842772, 0.02920857620239258, 0.029388799667358398, 0.02919785690307617, 0.029178335189819337, 0.029243392944335936, 0.02910207939147949, 0.02907939147949219, 0.029062463760375978, 0.029063711166381834, 0.029002304077148437, 0.02901750373840332, 0.02919590377807617, 0.028934879302978514, 0.02894438362121582, 0.028814464569091796, 0.028683135986328125, 0.028717056274414062, 0.02935398483276367, 0.028945760726928713, 0.028723583221435547, 0.02885251235961914, 0.029390752792358397, 0.02856150436401367, 0.02856345558166504, 0.028631103515625, 0.02867807960510254, 0.02902835273742676, 0.028733440399169922, 0.02898739242553711, 0.02866579246520996, 0.028565248489379882, 0.028601055145263673, 0.028699583053588867, 0.028488351821899415, 0.02856483268737793, 0.028652351379394533, 0.02865135955810547, 0.028548831939697265, 0.028698720932006837, 0.029235328674316406, 0.029184064865112304, 0.029401151657104493, 0.02944607925415039, 0.029220863342285155, 0.029212415695190428, 0.02949760055541992, 0.029308063507080078, 0.028859264373779298, 0.02896940803527832, 0.028927711486816405, 0.02942630386352539, 0.029124607086181642, 0.02888672065734863, 0.0322808952331543, 0.028999679565429686, 0.028763200759887694, 0.0284902400970459, 0.02838547134399414, 0.02902412796020508, 0.02868467140197754, 0.028614112854003906, 0.028731136322021483, 0.028789695739746095, 0.028736799240112305, 0.028703264236450195, 0.028667552947998047, 0.028677919387817382, 0.02856188774108887, 0.02874166488647461, 0.02877663993835449, 0.028834911346435548, 0.029218784332275392, 0.029223615646362305, 0.029008031845092774, 0.0290645751953125, 0.029147775650024414, 0.02900979232788086, 0.02891983985900879, 0.02882147216796875, 0.028792287826538084, 0.02839414405822754, 0.028315135955810547, 0.02857187271118164, 0.02867363166809082, 0.028728256225585936, 0.029321151733398436, 0.02898841667175293, 0.029159679412841796, 0.030002912521362304, 0.029348352432250976, 0.028584287643432616, 0.028476991653442384, 0.028465599060058595, 0.028344352722167968, 0.02858799934387207, 0.02885840034484863, 0.029418943405151367, 0.028586528778076173, 0.028596223831176756, 0.02858559989929199, 0.028582271575927735, 0.02854832077026367, 0.028773151397705077, 0.028477439880371092, 0.028374528884887694, 0.028534879684448244, 0.0286429443359375, 0.028402271270751952, 0.028512319564819335, 0.029301887512207032, 0.028208927154541017, 0.0287457275390625, 0.02876416015625, 0.02846886444091797, 0.028389280319213867, 0.028506591796875, 0.02849715232849121, 0.02853494453430176, 0.028893184661865235, 0.02878838348388672, 0.028470207214355468, 0.02842624092102051, 0.02831167984008789, 0.02828620719909668, 0.02830761528015137, 0.02853731155395508, 0.028827423095703124, 0.028889312744140624, 0.02933945655822754, 0.02909382438659668, 0.029112447738647462, 0.028971136093139647, 0.029198112487792968, 0.02917398452758789, 0.029155231475830077, 0.02914518356323242, 0.029970495223999024, 0.029388511657714844, 0.029081823348999024, 0.029337600708007814, 0.029509632110595704, 0.029061151504516602, 0.02919215965270996, 0.029019968032836914, 0.028983007431030272, 0.028994016647338867, 0.029440031051635743, 0.02901968002319336, 0.02884886360168457, 0.02903830337524414, 0.02898739242553711, 0.029056896209716798, 0.02894041633605957, 0.030424863815307616, 0.030468544006347655, 0.029101856231689455, 0.029020320892333983, 0.028906463623046875, 0.028812416076660158, 0.028796672821044922, 0.028665727615356445, 0.0287642879486084, 0.028881919860839843, 0.02902876853942871, 0.029014623641967774, 0.02874367904663086, 0.02865974426269531, 0.02863920021057129, 0.028684288024902343, 0.028661535263061522, 0.028733055114746095, 0.028578079223632813, 0.028625215530395508, 0.028395168304443358, 0.02859791946411133, 0.02854707145690918, 0.028635200500488282, 0.028476255416870117, 0.02845414352416992, 0.028679935455322266, 0.028642303466796876, 0.028559263229370118, 0.028524511337280272, 0.02853696060180664, 0.028583744049072265, 0.028649375915527343, 0.028542751312255858, 0.02924345588684082, 0.028753440856933595, 0.028666528701782226, 0.028681631088256835, 0.02864828872680664, 0.028548831939697265, 0.02852236747741699, 0.028574111938476563, 0.028579551696777342, 0.02864566421508789, 0.028846368789672852, 0.028739295959472656, 0.028734464645385743, 0.02864963150024414, 0.028618976593017577, 0.028568511962890626, 0.02857542419433594, 0.02859212875366211, 0.02871023941040039, 0.028701343536376954, 0.02885856056213379, 0.028630847930908202, 0.02858393669128418, 0.02854934310913086, 0.02858576011657715, 0.02858982467651367, 0.028817087173461913, 0.028861152648925782, 0.028538719177246093, 0.028497919082641602, 0.028520448684692383, 0.02853887939453125, 0.028721151351928712, 0.02899760055541992, 0.028827680587768554, 0.029150239944458006, 0.028857215881347657, 0.028917856216430664, 0.0287457275390625, 0.028717056274414062, 0.028626943588256838, 0.028628959655761718, 0.02843177604675293, 0.028486303329467774, 0.028757984161376954, 0.028807008743286133, 0.029018272399902345, 0.03161894416809082, 0.029021568298339843, 0.028649248123168946, 0.02859657669067383, 0.028634336471557616, 0.028856800079345702, 0.028518592834472656, 0.02879692840576172, 0.028741823196411134, 0.028794143676757814, 0.028936128616333007, 0.028842592239379884, 0.02878463935852051, 0.02910771179199219, 0.02888502311706543, 0.028842464447021484, 0.029083648681640626, 0.029490591049194336, 0.028951135635375977, 0.028903423309326173, 0.02915123176574707, 0.029142431259155274, 0.030067615509033203, 0.029081247329711915, 0.028972288131713868, 0.029000480651855468, 0.028876447677612306, 0.02907935905456543, 0.02896067237854004, 0.02887295913696289, 0.028934528350830078, 0.028911615371704103, 0.029865856170654296, 0.02911177635192871, 0.02917558479309082, 0.028986240386962892, 0.029108224868774416, 0.02903654479980469, 0.0285631046295166, 0.028452863693237306, 0.028387903213500976, 0.02973673629760742, 0.02873139190673828, 0.028848127365112306, 0.028628992080688476, 0.028366847991943358, 0.02836070442199707, 0.028512256622314453, 0.02880512046813965, 0.028391103744506836, 0.02835852813720703, 0.02853638458251953, 0.028625791549682617, 0.028661792755126952, 0.029001792907714843, 0.028892063140869142, 0.02899398422241211, 0.02918252754211426, 0.028846080780029298, 0.028659711837768553, 0.028686336517333984, 0.028460607528686524, 0.028757984161376954, 0.028587583541870118, 0.028777376174926757, 0.03031644821166992, 0.029130655288696287, 0.028821599960327147, 0.02883635139465332, 0.02851820755004883, 0.028452287673950194, 0.02835772705078125, 0.028337024688720704, 0.028308256149291992, 0.02836275291442871, 0.02840166473388672, 0.02843235206604004, 0.02853891181945801, 0.028358655929565428, 0.028435583114624022, 0.02837388801574707, 0.028740991592407228, 0.028430976867675782, 0.028553216934204102, 0.02851020812988281, 0.028499967575073244, 0.028575872421264647, 0.0288275203704834, 0.02842732810974121, 0.02841619110107422, 0.029102848052978515, 0.029045888900756836, 0.028642175674438476, 0.028753919601440428, 0.02879852867126465, 0.028707584381103515, 0.028434112548828126, 0.02845635223388672, 0.02842508888244629, 0.028716768264770508, 0.02868400001525879, 0.028725536346435546, 0.029018272399902345, 0.02913865661621094, 0.02923734474182129, 0.028819583892822267, 0.028831872940063476, 0.028516128540039064, 0.02855311965942383, 0.02846691131591797, 0.028604799270629883, 0.02860188865661621, 0.028758079528808593, 0.029149343490600586, 0.028812768936157227, 0.02873321533203125, 0.029248384475708007, 0.02875539207458496, 0.028758880615234374, 0.028778335571289063, 0.028892799377441405, 0.02891404724121094, 0.028647424697875977, 0.029398752212524415, 0.028539007186889648, 0.029167295455932617, 0.02894428825378418, 0.029682432174682617]",tokens/s,34.98251374000497,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4296.548352,4886.233088,0.0,4483.710976,4465.672704,s,1,10.7379111328125,10.7379111328125,0.0,10.7379111328125,10.7379111328125,10.7379111328125,10.7379111328125,[10.7379111328125],,kWh,0.00010794066158750865,1.1899220575926714e-05,3.3526693487995196e-05,0.00015336657565143057,,MB,2153.914368,5309.857792,0.0,4892.655616,4841.339904,s,10,1.9641685333251955,0.19641685333251954,0.00045467831309295995,0.1965788803100586,0.1968275390625,0.19697163848876953,0.19708691802978515,"[0.19561196899414063, 0.19575485229492187, 0.19661724853515625, 0.1961144256591797, 0.19654051208496093, 0.19622055053710938, 0.19679551696777345, 0.19711573791503906, 0.1967484130859375, 0.19664930725097657]",tokens/s,1303.3504796383766,kWh,5.775228835375177e-06,6.369021732562968e-07,3.8252427116076765e-06,1.023737372023915e-05,tokens/kWh,25006413.46069954,MB,2159.026176,5477.629952,0.0,5060.427776,5012.931584,s,10,18.77374719238281,1.877374719238281,0.004906811295751002,1.8758689575195313,1.88391943359375,1.8859052124023437,1.8874938354492186,"[1.8726434326171875, 1.8878909912109374, 1.8749144287109376, 1.876823486328125, 1.8713726806640625, 1.8744427490234374, 1.8804154052734374, 1.8775953369140626, 1.8834781494140624, 1.8741705322265625]",tokens/s,33.55749885966366,kWh,7.074144245045491e-05,7.802359644343153e-06,4.5506157864592e-05,0.00012404995995939005,tokens/kWh,507859.89790423284,,s,630,18.770692888259887,0.029794750616285535,0.00037116582118075714,0.029753759384155275,0.030097993278503418,0.03028514232635498,0.03113627298355103,"[0.030451520919799805, 0.030077024459838866, 0.02974224090576172, 0.02972271919250488, 0.029774751663208008, 0.029726463317871092, 0.029574880599975584, 0.029427263259887697, 0.02961916732788086, 0.02982268714904785, 0.02953411293029785, 0.02962371253967285, 0.029535200119018553, 0.029655040740966795, 0.029577215194702147, 0.02956883239746094, 0.029625984191894533, 0.029628992080688477, 0.029456384658813478, 0.029851648330688478, 0.029808639526367187, 0.029652992248535157, 0.029505151748657227, 0.02956326484680176, 0.029564416885375977, 0.029444608688354492, 0.02944371223449707, 0.029444480895996095, 0.029538272857666015, 0.029595680236816406, 0.02983065605163574, 0.029809312820434572, 0.02974857521057129, 0.029651456832885743, 0.029663135528564453, 0.029526111602783203, 0.02977382469177246, 0.029940927505493164, 0.0297357120513916, 0.02964406394958496, 0.029512447357177736, 0.029896703720092774, 0.02977164840698242, 0.02992140769958496, 0.029978624343872072, 0.029702144622802733, 0.029655040740966795, 0.029480960845947264, 0.029708288192749024, 0.02981068801879883, 0.029728096008300783, 0.029713056564331056, 0.029667327880859375, 0.029861312866210937, 0.0300118408203125, 0.030111007690429688, 0.0299036808013916, 0.02976361656188965, 0.02967523193359375, 0.0298621768951416, 0.030006784439086914, 0.02992793655395508, 0.02983024024963379, 0.030214879989624025, 0.030154176712036133, 0.03013222312927246, 0.030087039947509765, 0.029792959213256837, 0.029921279907226563, 0.029576416015625, 0.02973161506652832, 0.029823999404907226, 0.030225408554077147, 0.0306845760345459, 0.02991551971435547, 0.0301592960357666, 0.030307775497436525, 0.030259552001953124, 0.029855743408203125, 0.02994790458679199, 0.02999295997619629, 0.03094438362121582, 0.029907840728759766, 0.029861888885498046, 0.029650495529174804, 0.02968783950805664, 0.02969171142578125, 0.029614208221435546, 0.03096419143676758, 0.029658271789550782, 0.029588319778442382, 0.030035968780517577, 0.029874176025390626, 0.029908607482910157, 0.02983318328857422, 0.029792671203613282, 0.030027776718139648, 0.03065353584289551, 0.029746080398559572, 0.029972032546997072, 0.029847999572753907, 0.02993561553955078, 0.02995814323425293, 0.029929279327392578, 0.02979158401489258, 0.029772640228271485, 0.0299683837890625, 0.029996639251708986, 0.02986844825744629, 0.029827199935913085, 0.02984284782409668, 0.02985526466369629, 0.029881280899047853, 0.029945600509643556, 0.030030080795288086, 0.029839359283447265, 0.029903968811035155, 0.029901727676391602, 0.029890560150146486, 0.02976972770690918, 0.029880319595336914, 0.029903936386108398, 0.029930431365966795, 0.029840768814086913, 0.029840000152587892, 0.030158975601196288, 0.030060543060302734, 0.029801599502563475, 0.029610368728637697, 0.029413728713989257, 0.02972457695007324, 0.02952422332763672, 0.02957107162475586, 0.030496768951416016, 0.02962188720703125, 0.029571456909179686, 0.029521184921264647, 0.02949612808227539, 0.029475967407226564, 0.029455135345458985, 0.029286399841308593, 0.029497472763061524, 0.029441919326782227, 0.02956492805480957, 0.029462528228759766, 0.029787519454956054, 0.029807231903076173, 0.029698175430297853, 0.029674848556518554, 0.029626688003540038, 0.02950681686401367, 0.029466943740844728, 0.02955721664428711, 0.029579456329345704, 0.029552160263061525, 0.029708063125610352, 0.029674175262451172, 0.02957017517089844, 0.02959654426574707, 0.029628416061401368, 0.02954035186767578, 0.029873376846313478, 0.029901023864746093, 0.029686336517333985, 0.029906944274902345, 0.029749248504638674, 0.02969584083557129, 0.029659296035766603, 0.03000054359436035, 0.02992188835144043, 0.02978611183166504, 0.02973526382446289, 0.030025375366210937, 0.030103551864624024, 0.030130176544189452, 0.030164831161499022, 0.03007913589477539, 0.029937664031982423, 0.029816831588745117, 0.029767679214477538, 0.02998476791381836, 0.029947359085083006, 0.02977846336364746, 0.029918880462646485, 0.029882720947265625, 0.029784063339233398, 0.029868032455444334, 0.03020185661315918, 0.03075391960144043, 0.030172767639160155, 0.031145952224731446, 0.029937440872192383, 0.02979033660888672, 0.02955753517150879, 0.029660160064697266, 0.029587839126586916, 0.029597343444824217, 0.029395679473876953, 0.029341440200805664, 0.02944175910949707, 0.029544607162475586, 0.029607295989990234, 0.02963929557800293, 0.029481311798095704, 0.029593503952026368, 0.02936025619506836, 0.02956492805480957, 0.02951148796081543, 0.029773439407348633, 0.029676095962524414, 0.029460479736328125, 0.02977382469177246, 0.029913087844848633, 0.02995769691467285, 0.029710784912109375, 0.030050304412841795, 0.030668800354003906, 0.029863199234008788, 0.02981961631774902, 0.029777088165283204, 0.029754175186157226, 0.02977791976928711, 0.029457504272460938, 0.03021446418762207, 0.029641311645507814, 0.029849599838256836, 0.02980659294128418, 0.029736000061035155, 0.029589567184448242, 0.02948931121826172, 0.029782751083374023, 0.029709791183471678, 0.029585792541503907, 0.029531648635864258, 0.029514400482177735, 0.02949337577819824, 0.02965872001647949, 0.029591840744018556, 0.029687231063842773, 0.029880800247192384, 0.031954944610595705, 0.0297762565612793, 0.02963987159729004, 0.02993174362182617, 0.029835552215576173, 0.02990083122253418, 0.029939552307128907, 0.02994806480407715, 0.02977903938293457, 0.029891679763793946, 0.02984940719604492, 0.029982048034667967, 0.030318464279174805, 0.029675039291381836, 0.029469152450561524, 0.02939084815979004, 0.02930892753601074, 0.02932326316833496, 0.029269760131835937, 0.029393152236938478, 0.0295280647277832, 0.029553951263427733, 0.029360864639282228, 0.0293703670501709, 0.029419519424438476, 0.02945852851867676, 0.029456287384033202, 0.02948409652709961, 0.029397951126098634, 0.029281696319580077, 0.029415231704711914, 0.029512479782104493, 0.02957107162475586, 0.02981827163696289, 0.031202943801879882, 0.029865215301513672, 0.030199520111083983, 0.02958745574951172, 0.03022435188293457, 0.02967705535888672, 0.029757984161376955, 0.029684032440185547, 0.02973459243774414, 0.02996566390991211, 0.029581024169921876, 0.02948601531982422, 0.029595232009887694, 0.029536991119384765, 0.029605567932128905, 0.029894559860229493, 0.02972598457336426, 0.029577695846557617, 0.02962214469909668, 0.03001388740539551, 0.02981603240966797, 0.02993577575683594, 0.029811359405517577, 0.029959199905395507, 0.02987923240661621, 0.029930912017822265, 0.029721216201782228, 0.029789407730102538, 0.029815584182739257, 0.029691904067993165, 0.029634559631347656, 0.029788127899169924, 0.030115167617797853, 0.02976838493347168, 0.029684736251831056, 0.02972774314880371, 0.029741056442260744, 0.029738431930541993, 0.02957779121398926, 0.029638656616210936, 0.029961919784545897, 0.03045452880859375, 0.029914880752563478, 0.029591583251953126, 0.030306079864501952, 0.02952851104736328, 0.029800447463989257, 0.029378559112548826, 0.029489152908325194, 0.029368064880371095, 0.02955904006958008, 0.02939289665222168, 0.02957926368713379, 0.029470720291137696, 0.02951372718811035, 0.02950156784057617, 0.029532032012939455, 0.029562496185302736, 0.02964518356323242, 0.029825023651123047, 0.029878271102905272, 0.02979430389404297, 0.02963046455383301, 0.029849599838256836, 0.029535232543945314, 0.02948761558532715, 0.02938070487976074, 0.029598432540893553, 0.02970355224609375, 0.029775327682495117, 0.030311264038085938, 0.029786176681518554, 0.029793632507324218, 0.02956319999694824, 0.02960598373413086, 0.029835424423217773, 0.029689119338989257, 0.029552480697631837, 0.029644927978515624, 0.02972537612915039, 0.029663328170776368, 0.029878175735473633, 0.029507680892944334, 0.029739007949829102, 0.02976563262939453, 0.02978755187988281, 0.02955120086669922, 0.029673471450805664, 0.029747200012207032, 0.029751232147216797, 0.029769311904907225, 0.030021440505981444, 0.030075551986694336, 0.02977702331542969, 0.02967635154724121, 0.029752511978149414, 0.029854591369628907, 0.029898752212524415, 0.030105600357055663, 0.03066249656677246, 0.030097503662109375, 0.029964384078979493, 0.029877504348754882, 0.030017791748046876, 0.030410816192626953, 0.03043132781982422, 0.030050048828125, 0.02992665672302246, 0.029911359786987304, 0.0297807674407959, 0.02971116828918457, 0.029671455383300783, 0.02962326431274414, 0.029599456787109374, 0.029626495361328126, 0.029603103637695312, 0.029473663330078125, 0.03091632080078125, 0.030005279541015624, 0.029617792129516603, 0.029864576339721678, 0.030310400009155275, 0.02958745574951172, 0.029807872772216797, 0.029683488845825196, 0.029955039978027342, 0.029459808349609377, 0.02951750373840332, 0.029555679321289063, 0.029467744827270506, 0.029901311874389647, 0.03214080047607422, 0.030055328369140624, 0.029902847290039062, 0.029847360610961913, 0.029698240280151368, 0.029752511978149414, 0.029576000213623048, 0.029659135818481445, 0.029684864044189452, 0.029655935287475586, 0.029521568298339844, 0.02965679931640625, 0.029528671264648438, 0.029460512161254882, 0.02953215980529785, 0.02951372718811035, 0.029628416061401368, 0.029568735122680663, 0.02973651123046875, 0.029945920944213868, 0.02991375923156738, 0.029994176864624023, 0.030020416259765623, 0.02975334358215332, 0.029661184310913087, 0.03003968048095703, 0.030404319763183595, 0.02998134422302246, 0.029868032455444334, 0.029761280059814453, 0.029783359527587892, 0.029819328308105467, 0.029763967514038085, 0.029775999069213868, 0.029941055297851564, 0.03014463996887207, 0.030188703536987306, 0.02997228813171387, 0.029716512680053712, 0.029573087692260743, 0.029501440048217774, 0.029432863235473634, 0.029485376358032226, 0.029481632232666016, 0.029607936859130858, 0.029503488540649415, 0.029529184341430665, 0.029500320434570314, 0.029504608154296875, 0.029420448303222657, 0.029453407287597655, 0.02946345520019531, 0.029480960845947264, 0.029609983444213867, 0.029577215194702147, 0.02954035186767578, 0.029708288192749024, 0.029575040817260742, 0.029800575256347658, 0.02978748893737793, 0.029877119064331055, 0.029613855361938477, 0.029533824920654296, 0.029585792541503907, 0.029620159149169923, 0.029576351165771484, 0.029650911331176758, 0.029821887969970703, 0.02978201675415039, 0.029515775680541992, 0.030021631240844726, 0.030039072036743164, 0.029999616622924805, 0.030063072204589845, 0.029872352600097657, 0.029880096435546875, 0.02999075126647949, 0.02984976005554199, 0.02973401641845703, 0.02956991958618164, 0.029652000427246094, 0.029997152328491213, 0.030102399826049803, 0.029851520538330078, 0.02971660804748535, 0.03019913673400879, 0.03356444931030273, 0.029950847625732423, 0.029882368087768556, 0.029790111541748047, 0.029833311080932616, 0.02960380744934082, 0.02965711975097656, 0.030078880310058592, 0.030035808563232423, 0.029851743698120117, 0.029792224884033203, 0.029761728286743165, 0.03000444793701172, 0.029831647872924805, 0.029684032440185547, 0.029402336120605468, 0.029504255294799806, 0.02959516716003418, 0.029691551208496095, 0.029550815582275392, 0.029475263595581055, 0.029401023864746093, 0.029427967071533202, 0.029476608276367187, 0.029474815368652343, 0.02953379249572754, 0.030188095092773436, 0.03111257553100586, 0.03023232078552246, 0.029704927444458008, 0.029587135314941407, 0.02951945686340332, 0.02972336006164551, 0.029636608123779298, 0.029900447845458984, 0.029997247695922852, 0.029643968582153322, 0.02980963134765625, 0.0297259521484375, 0.02968828773498535, 0.029544736862182615, 0.029593183517456056, 0.029858207702636717, 0.029661184310913087, 0.029558784484863283, 0.029677183151245116, 0.030226816177368165, 0.030082239151000976, 0.0297828483581543, 0.02950454330444336, 0.02981177520751953, 0.03013532829284668, 0.03038640022277832, 0.03315155029296875, 0.030478431701660157, 0.030383167266845704, 0.03030726432800293, 0.030218175888061524, 0.029898143768310546, 0.029880992889404295, 0.02978816032409668, 0.02978927993774414, 0.02961497688293457, 0.02997865676879883, 0.02984899139404297, 0.029644895553588867, 0.029829280853271484, 0.030103231430053713, 0.029920223236083985, 0.02978771209716797, 0.029980703353881834, 0.029871807098388672, 0.029780384063720702, 0.030135520935058592, 0.030087648391723634, 0.030168544769287108, 0.03038822364807129, 0.02995814323425293, 0.029380607604980468, 0.029398719787597657, 0.029281696319580077, 0.02931337547302246, 0.02935398483276367, 0.029452863693237304, 0.0295731201171875, 0.031870975494384765, 0.030136320114135744, 0.0297259521484375, 0.029816831588745117, 0.029601951599121094, 0.02952569580078125, 0.029489664077758788, 0.029445600509643555, 0.02947372817993164, 0.02935398483276367, 0.030045536041259764, 0.029745439529418945, 0.029796735763549805, 0.029647136688232423, 0.02946019172668457, 0.029452287673950195, 0.02941654396057129, 0.029772703170776366, 0.029693183898925782, 0.029686527252197267, 0.02949734306335449, 0.029574527740478515, 0.02947270393371582, 0.029582048416137697, 0.02943791961669922, 0.029476127624511718, 0.029557472229003907, 0.02959974479675293, 0.029800512313842772, 0.029781951904296874, 0.029794208526611327, 0.02969766426086426, 0.029542400360107423, 0.02976755142211914, 0.029958751678466795, 0.030061824798583985, 0.029835712432861327, 0.02995750427246094, 0.029878944396972657, 0.02985398483276367, 0.02978611183166504, 0.029902847290039062, 0.029859647750854493, 0.029966623306274413, 0.029898111343383788, 0.03003228759765625, 0.029958272933959963, 0.029767679214477538, 0.029673471450805664, 0.0298591365814209, 0.029948415756225585, 0.0299946231842041, 0.029961824417114258, 0.029915199279785157]",tokens/s,33.56295922320656,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7436.419072,8048.738304,0.0,7646.216192,7627.584,s,1,12.9949267578125,12.9949267578125,0.0,12.9949267578125,12.9949267578125,12.9949267578125,12.9949267578125,[12.9949267578125],,kWh,0.00017278734016250232,1.9052442514780857e-05,5.561560004800625e-05,0.00024745538272528944,,MB,1761.652736,8732.409856,0.0,8315.20768,8191.863296,s,10,3.3556708984375003,0.33556708984375005,0.0007634867049992896,0.3355626220703125,0.3364967987060547,0.33655421600341795,0.3366001498413086,"[0.333852294921875, 0.33557867431640626, 0.3357731018066406, 0.33551687622070314, 0.33551300048828125, 0.3364840393066406, 0.33474517822265626, 0.33554656982421877, 0.33661163330078125, 0.3360495300292969]",tokens/s,762.8876840073953,kWh,9.832132973056104e-06,1.0843030367387353e-06,6.513440395933268e-06,1.742987640572811e-05,tokens/kWh,14687424.85838103,MB,1764.708352,9046.982656,0.0,8629.78048,8480.067584,s,10,26.989277099609374,2.6989277099609374,0.004195169949303004,2.6994720458984376,2.703811279296875,2.7050911865234375,2.7061151123046874,"[2.69077001953125, 2.694811767578125, 2.696556884765625, 2.700678955078125, 2.697007080078125, 2.6997451171875, 2.699198974609375, 2.7006103515625, 2.70352685546875, 2.70637109375]",tokens/s,23.342603719056935,kWh,7.891598056610822e-05,8.704517592640453e-06,5.248802347186665e-05,0.0001401085216306153,tokens/kWh,449651.45065261883,,s,630,26.980206951141366,0.042825725319272,0.0004004493700450249,0.042804304122924804,0.043337841796875,0.04346133193969726,0.04374581111907959,"[0.04240771102905273, 0.042219070434570315, 0.042070785522460935, 0.042178558349609374, 0.04206310272216797, 0.04201958465576172, 0.04237107086181641, 0.04257791900634766, 0.04242432022094727, 0.042246143341064454, 0.04215398406982422, 0.04219465637207031, 0.04242665481567383, 0.04238950347900391, 0.04224518585205078, 0.04222246551513672, 0.042381374359130856, 0.04242227172851563, 0.042280384063720707, 0.04236899185180664, 0.042566177368164065, 0.04250636672973633, 0.042418113708496095, 0.042635265350341796, 0.04259430313110352, 0.04259443283081055, 0.042610561370849606, 0.04264700698852539, 0.04296121597290039, 0.04274537658691406, 0.04264006423950195, 0.04254719924926758, 0.04248358535766601, 0.04252278518676758, 0.04274774551391602, 0.042790912628173826, 0.04273984146118164, 0.04274892807006836, 0.0433746566772461, 0.04310931015014648, 0.04274380874633789, 0.04256959915161133, 0.04288524627685547, 0.04296278381347656, 0.04287408065795899, 0.04304991912841797, 0.04287887954711914, 0.042789985656738284, 0.042857185363769534, 0.04292809677124024, 0.043254112243652346, 0.043312160491943356, 0.043162559509277346, 0.04325750350952148, 0.043178337097167965, 0.04310220718383789, 0.043173824310302734, 0.043460224151611326, 0.04332774353027344, 0.04303683090209961, 0.04307462310791016, 0.04327318572998047, 0.04311651229858399, 0.042435073852539064, 0.042298881530761716, 0.04208051300048828, 0.04196537780761719, 0.04196099090576172, 0.042298271179199216, 0.04242009735107422, 0.042178688049316404, 0.04229254531860351, 0.04231180953979492, 0.042262527465820314, 0.042248321533203126, 0.04245139312744141, 0.04242227172851563, 0.04222873687744141, 0.04240876770019531, 0.04237740707397461, 0.04234684753417969, 0.04270431900024414, 0.042524574279785156, 0.04245945739746094, 0.042323200225830075, 0.04249174499511719, 0.042500255584716796, 0.042746719360351564, 0.04267804718017578, 0.042559616088867186, 0.04258201599121094, 0.042493568420410154, 0.04278656005859375, 0.04296768188476562, 0.042957855224609376, 0.04270956802368164, 0.042759681701660154, 0.043088798522949216, 0.04310630416870117, 0.04295065689086914, 0.042827617645263674, 0.042870849609375, 0.042842208862304686, 0.042799102783203126, 0.043201633453369144, 0.04315228652954101, 0.042967041015625, 0.04294655990600586, 0.04312659072875977, 0.043090110778808595, 0.04284332656860351, 0.043125343322753903, 0.04317001724243164, 0.043358207702636715, 0.043342910766601565, 0.04317033767700195, 0.04304732894897461, 0.04306534576416016, 0.04315039825439453, 0.04342879867553711, 0.04333772659301758, 0.04315955352783203, 0.04314041519165039, 0.04320230484008789, 0.04335308837890625, 0.0437037124633789, 0.042369022369384765, 0.041901920318603514, 0.04215964889526367, 0.04257228851318359, 0.0425546875, 0.042427455902099606, 0.042313472747802734, 0.04232806396484375, 0.04237721633911133, 0.042194942474365234, 0.042272384643554685, 0.042291038513183596, 0.042433185577392576, 0.042649375915527345, 0.04272544097900391, 0.04242416000366211, 0.042514240264892575, 0.04264774322509766, 0.04262639999389648, 0.042574687957763674, 0.04255894470214844, 0.04255104064941406, 0.042681121826171876, 0.042633216857910154, 0.042700801849365234, 0.04271916961669922, 0.04269472122192383, 0.04259430313110352, 0.04254105758666992, 0.04255942535400391, 0.042547264099121095, 0.04259430313110352, 0.04328243255615234, 0.043186176300048826, 0.04310812759399414, 0.042948833465576174, 0.04282572937011719, 0.04287680053710938, 0.04331270217895508, 0.043186912536621096, 0.043122528076171875, 0.042915969848632815, 0.04287638473510742, 0.04303094482421875, 0.04308374404907227, 0.04295068740844726, 0.04307958221435547, 0.043057247161865236, 0.04295897674560547, 0.04302630233764648, 0.04309302520751953, 0.04291683197021484, 0.04291788864135742, 0.043030529022216796, 0.043194366455078126, 0.043165023803710935, 0.04339926528930664, 0.04328422546386719, 0.04307846450805664, 0.04302643203735351, 0.04318342590332031, 0.04339321517944336, 0.04330339050292969, 0.042619102478027346, 0.04248828887939453, 0.04230963134765625, 0.04223590469360351, 0.04265574264526367, 0.042409984588623044, 0.04216169738769531, 0.04206991958618164, 0.042388031005859375, 0.04230553436279297, 0.04241766357421875, 0.04264585494995117, 0.04273920059204102, 0.04260019302368164, 0.043015071868896484, 0.042727039337158206, 0.042469215393066403, 0.04227519989013672, 0.04239580917358399, 0.0435233268737793, 0.04235728073120117, 0.042494174957275394, 0.04251443099975586, 0.042409854888916014, 0.042565185546875, 0.04257984161376953, 0.04263187026977539, 0.04302643203735351, 0.043087871551513675, 0.04292812728881836, 0.04275814437866211, 0.04269875335693359, 0.04262838363647461, 0.04272732925415039, 0.042808128356933595, 0.04286640167236328, 0.04333596801757812, 0.0433144645690918, 0.043026943206787106, 0.04320892715454101, 0.043511520385742186, 0.043327136993408205, 0.04269939041137695, 0.042839969635009766, 0.04288441467285156, 0.042769184112548826, 0.0428851203918457, 0.042874622344970706, 0.043014400482177736, 0.043251361846923825, 0.043102558135986326, 0.043284481048583984, 0.04344591903686523, 0.04327452850341797, 0.04312070465087891, 0.04308329772949219, 0.043149856567382815, 0.043274177551269534, 0.04350566482543945, 0.04337254333496094, 0.043218944549560545, 0.04356300735473633, 0.04374937438964844, 0.042342784881591794, 0.04211097717285156, 0.0421124153137207, 0.042418174743652344, 0.042154590606689454, 0.042376766204833986, 0.042393409729003906, 0.04249049758911133, 0.04253286361694336, 0.042379264831542966, 0.04237107086181641, 0.04228300857543945, 0.042336254119873046, 0.042684417724609375, 0.0428851203918457, 0.04269465637207031, 0.04245209503173828, 0.042261375427246096, 0.042280960083007815, 0.042254337310791014, 0.04271430587768555, 0.04253984069824219, 0.04249980926513672, 0.04244425582885742, 0.0435552978515625, 0.04278822326660156, 0.04263625717163086, 0.04271513748168945, 0.04315955352783203, 0.04315750503540039, 0.04274585723876953, 0.04258611297607422, 0.04291161727905273, 0.042825344085693356, 0.042816001892089846, 0.043038719177246096, 0.04288716888427734, 0.04273152160644531, 0.04272537612915039, 0.04259244918823242, 0.04270880126953125, 0.04294041442871094, 0.04296054458618164, 0.04320403289794922, 0.04308060836791992, 0.04304828643798828, 0.04300457763671875, 0.042943710327148436, 0.04293916702270508, 0.04327423858642578, 0.043319297790527345, 0.04331520080566406, 0.04312675094604492, 0.04319798278808594, 0.043347648620605465, 0.0432393913269043, 0.043131328582763674, 0.043318878173828124, 0.04339590454101563, 0.04333363342285156, 0.0430709457397461, 0.0430720329284668, 0.043112449645996094, 0.04311286544799805, 0.04261273574829102, 0.042522209167480465, 0.042449310302734376, 0.04232137680053711, 0.04220163345336914, 0.04226662445068359, 0.04228300857543945, 0.042270721435546874, 0.04277414321899414, 0.042707328796386716, 0.04253900909423828, 0.0424466552734375, 0.04227910232543945, 0.04228656005859375, 0.042408481597900394, 0.04233343887329102, 0.04238425445556641, 0.04247539138793945, 0.042643455505371096, 0.04248735809326172, 0.042463104248046876, 0.042453567504882814, 0.04274486541748047, 0.042707935333251956, 0.042600704193115235, 0.042813182830810544, 0.0429315185546875, 0.04288166427612305, 0.04269574356079102, 0.04296102523803711, 0.04258671951293945, 0.042754337310791014, 0.04272518539428711, 0.04293807983398437, 0.042996192932128904, 0.0430571517944336, 0.04285619354248047, 0.04290768051147461, 0.04296051025390625, 0.04291814422607422, 0.04288499069213867, 0.0428834228515625, 0.04297331237792969, 0.04298342514038086, 0.04332748794555664, 0.0432988166809082, 0.04284521484375, 0.04313113784790039, 0.043133663177490233, 0.04277388763427734, 0.043287105560302734, 0.04349292755126953, 0.04322537612915039, 0.04310860824584961, 0.04312675094604492, 0.04310220718383789, 0.043441215515136716, 0.04345951843261719, 0.04435968017578125, 0.043308704376220704, 0.04334175872802734, 0.0435142707824707, 0.04244851303100586, 0.04234073638916016, 0.04207526397705078, 0.042115966796875, 0.04218483352661133, 0.04245008087158203, 0.04247625732421875, 0.04237516784667969, 0.04279283142089844, 0.04273574447631836, 0.042417728424072265, 0.04227734375, 0.04233827209472656, 0.04233420944213867, 0.04246527862548828, 0.042659423828125, 0.042644992828369144, 0.04232080078125, 0.04234444808959961, 0.04250624084472656, 0.04273561477661133, 0.04264128112792969, 0.0422872314453125, 0.04241183853149414, 0.04280339050292969, 0.042774528503417966, 0.042729022979736325, 0.04272377777099609, 0.04268191909790039, 0.042866657257080075, 0.04269836807250976, 0.04281180953979492, 0.043014591217041015, 0.043109825134277344, 0.04290208053588867, 0.04377990341186523, 0.043165889739990235, 0.042962944030761716, 0.0427724494934082, 0.0426475830078125, 0.04272332763671875, 0.043157054901123044, 0.043800289154052735, 0.043068126678466795, 0.042842079162597656, 0.04282371139526367, 0.042854080200195314, 0.04309171295166016, 0.043141246795654294, 0.04307398223876953, 0.0433389778137207, 0.04318288040161133, 0.04296089553833008, 0.043044864654541014, 0.04333542251586914, 0.04331695938110352, 0.04364751815795898, 0.043503200531005856, 0.04346223831176758, 0.04326892852783203, 0.043112449645996094, 0.04337660980224609, 0.043591297149658204, 0.04244307327270508, 0.042485759735107424, 0.04243865585327149, 0.04234598541259765, 0.042111328125, 0.0422762565612793, 0.04238582229614258, 0.042326366424560544, 0.042434558868408204, 0.042590206146240234, 0.04271020889282227, 0.0425841293334961, 0.0426580810546875, 0.042750175476074216, 0.042893566131591794, 0.0425467529296875, 0.042158527374267576, 0.0424035530090332, 0.04242051315307617, 0.042426368713378904, 0.0424161262512207, 0.04300185775756836, 0.042782016754150394, 0.042527423858642575, 0.04250624084472656, 0.04246262359619141, 0.04246384048461914, 0.04341955184936523, 0.042815582275390625, 0.04286259078979492, 0.04316719818115235, 0.04317033767700195, 0.042987518310546875, 0.042713024139404296, 0.04248912048339844, 0.042752799987792967, 0.043030529022216796, 0.04306284713745117, 0.04300598526000977, 0.04330486297607422, 0.04306175994873047, 0.04259625625610351, 0.042573760986328125, 0.04274192047119141, 0.04321484756469727, 0.043431934356689454, 0.043055103302001956, 0.04297318267822266, 0.04287491226196289, 0.04274991989135742, 0.04308377456665039, 0.043565055847167966, 0.043407360076904294, 0.04319641494750977, 0.0433388786315918, 0.04337696075439453, 0.04353286361694336, 0.04390911865234375, 0.04368384170532227, 0.04330464172363281, 0.043225120544433594, 0.043216705322265625, 0.04313516616821289, 0.042603038787841795, 0.04217484664916992, 0.042440769195556644, 0.04225574493408203, 0.042420799255371094, 0.04244614410400391, 0.04237382507324219, 0.042780670166015625, 0.04251238250732422, 0.04247296142578125, 0.04255385589599609, 0.042665985107421874, 0.042618881225585936, 0.04301824188232422, 0.042618881225585936, 0.042518527984619144, 0.04293427276611328, 0.0426126708984375, 0.042548576354980466, 0.042363616943359376, 0.04243046569824219, 0.04250944137573242, 0.04253168106079101, 0.042590240478515624, 0.042807422637939456, 0.0426638069152832, 0.04250419235229492, 0.04263484954833984, 0.042662303924560545, 0.0424917106628418, 0.04287916946411133, 0.04290313720703125, 0.04284867095947266, 0.043237377166748046, 0.04328208160400391, 0.04319881439208984, 0.04294246292114258, 0.04327340698242187, 0.042942272186279294, 0.04298649597167969, 0.043207809448242186, 0.0430208625793457, 0.043009471893310544, 0.04302495956420899, 0.04299177551269531, 0.043182239532470704, 0.04319612884521484, 0.04361568069458008, 0.04312697601318359, 0.04331587219238281, 0.04323328018188476, 0.04323942565917969, 0.04330691146850586, 0.0433419189453125, 0.04311964797973633, 0.04314726257324219, 0.04344249725341797, 0.043423999786376954, 0.04350812911987305, 0.04400128173828125, 0.04373708724975586, 0.04334796905517578, 0.04335340881347656, 0.04265878295898438, 0.042186561584472655, 0.0422360954284668, 0.04237311935424805, 0.04224777603149414, 0.042068382263183594, 0.04221747207641602, 0.04250598526000977, 0.0425432014465332, 0.04283523178100586, 0.04269964981079102, 0.04290755081176758, 0.04257596969604492, 0.0424447021484375, 0.04248601531982422, 0.042604385375976564, 0.04265372848510742, 0.04262499237060547, 0.04293222427368164, 0.04271068954467774, 0.04244278335571289, 0.04241027069091797, 0.04254646301269531, 0.042695423126220704, 0.042807201385498046, 0.04280521774291992, 0.042528606414794924, 0.04307766342163086, 0.04312086486816406, 0.04293593597412109, 0.04266377639770508, 0.0427088623046875, 0.04273788833618164, 0.043199008941650394, 0.043254913330078124, 0.04320703887939453, 0.04351155090332031, 0.04292665481567383, 0.04290063858032227, 0.043119583129882816, 0.04314931106567383, 0.04319846343994141, 0.04310630416870117, 0.043450366973876955, 0.04346006393432617, 0.04305148696899414, 0.043044769287109375, 0.04299792098999024, 0.04323728179931641, 0.043251808166503904, 0.043162689208984376, 0.043315265655517576, 0.04346537780761719, 0.04338217544555664, 0.043536865234375, 0.0434378547668457, 0.04367190551757812, 0.0435219841003418, 0.043143455505371096, 0.04331427383422851, 0.04370524978637695, 0.04372889709472656, 0.043862014770507815]",tokens/s,23.35045098582347,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,11044.757504,12553.4208,0.0,12150.898688,12116.742656,s,1,16.163751953125,16.163751953125,0.0,16.163751953125,16.163751953125,16.163751953125,16.163751953125,[16.163751953125],,kWh,0.0002646348572291724,2.9180462749588036e-05,8.516951258001615e-05,0.00037898483255877655,,MB,2008.809472,13513.916416,0.0,13096.71424,12892.965376,s,10,6.359149841308594,0.6359149841308593,0.0009729047114947553,0.6356495971679688,0.6363471740722657,0.6374991180419922,0.6384206732177734,"[0.635283203125, 0.6349862060546875, 0.6352772827148437, 0.6359114990234375, 0.6356328735351563, 0.635577880859375, 0.6356663208007812, 0.6386510620117187, 0.6360911865234375, 0.6360723266601562]",tokens/s,402.56953584745224,kWh,1.857343602916804e-05,2.0483146219586413e-06,1.2327058472750424e-05,3.294880912387711e-05,tokens/kWh,7769628.305457746,MB,2015.424512,13870.432256,0.0,13453.23008,13237.236736,s,10,43.336995117187506,4.33369951171875,0.004127981912586184,4.33511865234375,4.3372568359375,4.338267578125,4.339076171875,"[4.32694677734375, 4.32575927734375, 4.33499462890625, 4.3370322265625, 4.33584033203125, 4.33420068359375, 4.33615087890625, 4.33524267578125, 4.3392783203125, 4.33154931640625]",tokens/s,14.537233102950907,kWh,0.00012665167289833352,1.3970375263824311e-05,8.414712981765007e-05,0.0002247691779798079,tokens/kWh,280287.5401611318,,s,630,43.31354483032221,0.068751658460829,0.00046645941135002854,0.0687289924621582,0.06930681762695312,0.06947806243896484,0.06990054779052735,"[0.06900150299072266, 0.0679708480834961, 0.06777446746826171, 0.06792806243896485, 0.06832745361328126, 0.06807753753662109, 0.06795574188232421, 0.0680212173461914, 0.067866943359375, 0.06803533172607422, 0.06792697906494141, 0.06813286590576172, 0.06796288299560548, 0.06793011474609376, 0.06854412841796875, 0.06831494140625, 0.06813954925537109, 0.06860189056396485, 0.06823677062988281, 0.06833821105957032, 0.06845442962646485, 0.06852191925048828, 0.06842121887207031, 0.06817836761474609, 0.06834381103515624, 0.06830694580078125, 0.06814080047607422, 0.06834611511230469, 0.06874111938476563, 0.06886768341064453, 0.06855651092529297, 0.06863737487792969, 0.06863193511962891, 0.06858812713623047, 0.0692408676147461, 0.06886144256591797, 0.07063385772705078, 0.06901475524902344, 0.06863311767578124, 0.0685141143798828, 0.06879238128662109, 0.0687586898803711, 0.06892015838623047, 0.06902694702148438, 0.06896473693847656, 0.06862019348144531, 0.06882284545898437, 0.06882969665527344, 0.06937423706054688, 0.06930636596679687, 0.06890812683105468, 0.06907766723632812, 0.06889702606201172, 0.06913999938964843, 0.06901193237304687, 0.06892704010009766, 0.0690588150024414, 0.06920006561279297, 0.06919487762451172, 0.06914669036865234, 0.06928623962402344, 0.06912617492675781, 0.06936815643310547, 0.06848687744140625, 0.06805117034912109, 0.06773395538330078, 0.06842982482910157, 0.06884146881103516, 0.06815948486328124, 0.06769398498535156, 0.0677894058227539, 0.06840729522705079, 0.06807091522216797, 0.06793472290039063, 0.06805020904541016, 0.06808444976806641, 0.0684150390625, 0.0683687973022461, 0.06818000030517578, 0.06818102264404297, 0.06865814208984375, 0.06851993560791016, 0.06814431762695312, 0.06774867248535156, 0.06827152252197266, 0.06812662506103516, 0.0681561279296875, 0.06851785278320313, 0.06819840240478516, 0.06847283172607421, 0.06843392181396485, 0.06858342742919922, 0.06861798095703125, 0.06859801483154297, 0.06886399841308594, 0.06894796752929687, 0.06872064208984376, 0.06867353820800781, 0.06870995330810546, 0.06860598754882813, 0.06873744201660156, 0.06865510559082032, 0.06865296173095703, 0.06882723236083985, 0.06872179412841797, 0.06866323089599609, 0.06855961608886718, 0.06898889923095704, 0.06894818878173828, 0.06908646392822265, 0.0691119384765625, 0.06890354919433593, 0.06919577789306641, 0.06872064208984376, 0.06905401611328126, 0.0689176025390625, 0.06895574188232421, 0.06934783935546875, 0.06928787231445313, 0.0692484130859375, 0.06893647766113281, 0.06931238555908203, 0.06918544006347656, 0.06943513488769532, 0.0694889907836914, 0.06956646728515625, 0.06836873626708985, 0.0683067855834961, 0.06799298858642579, 0.06827196502685547, 0.0684942398071289, 0.06832128143310547, 0.06821887969970702, 0.06790060424804688, 0.06796781158447265, 0.06789849853515625, 0.06833987426757812, 0.06801261138916016, 0.06817340850830078, 0.06838329315185547, 0.06833913421630859, 0.06809164428710937, 0.06848185729980469, 0.06867967987060547, 0.06888253021240234, 0.06856285095214844, 0.06855248260498047, 0.06840956878662109, 0.06871858978271485, 0.06849104309082031, 0.06861993408203125, 0.06849513244628906, 0.06861103820800782, 0.06855023956298828, 0.06871036529541015, 0.0685835189819336, 0.06843750762939453, 0.06878224182128906, 0.06898944091796876, 0.06884352111816407, 0.06927897644042968, 0.06995795440673828, 0.06898323059082032, 0.06870130920410156, 0.06923715209960937, 0.06869001770019531, 0.06884390258789062, 0.06879225921630859, 0.06903814697265626, 0.06886195373535156, 0.06895206451416015, 0.06907289886474609, 0.06895359802246094, 0.06897270202636718, 0.06933744049072266, 0.06988390350341797, 0.06974463653564453, 0.06895820617675781, 0.06906674957275391, 0.06895410919189453, 0.06908313751220703, 0.06896640014648438, 0.06936486053466796, 0.06918643188476563, 0.06898178863525391, 0.06958732604980469, 0.06956502532958984, 0.06937548828125, 0.06923699188232423, 0.06836224365234375, 0.06808370971679688, 0.06827740478515625, 0.0680684814453125, 0.06812812805175782, 0.06876604461669922, 0.06833331298828126, 0.06835225677490234, 0.06800498962402343, 0.06823004913330079, 0.0679927978515625, 0.06807154846191406, 0.06817820739746094, 0.06831251525878906, 0.06854732513427735, 0.06856924438476562, 0.06848886108398437, 0.0685202865600586, 0.06849641418457031, 0.0685696029663086, 0.06873545837402344, 0.06858265686035156, 0.06885247802734375, 0.06863040161132812, 0.06867366027832031, 0.06842569732666015, 0.06838393402099609, 0.06865408325195313, 0.06867692565917968, 0.06933353424072265, 0.06871654510498047, 0.06878396606445313, 0.06876585388183594, 0.06885990142822265, 0.06889881896972656, 0.06874317169189453, 0.06895820617675781, 0.06894950103759766, 0.06915737915039062, 0.06877369689941407, 0.06862595367431641, 0.0685700454711914, 0.06905213165283203, 0.06929612731933593, 0.06912000274658203, 0.06944678497314453, 0.06921100616455078, 0.06887750244140625, 0.06923875427246094, 0.06911673736572266, 0.0692734375, 0.06932294464111328, 0.06950895690917969, 0.06939478302001953, 0.06895996856689453, 0.06938390350341797, 0.06927123260498047, 0.06921222686767578, 0.06947264099121093, 0.06940287780761718, 0.06955785369873047, 0.0692161636352539, 0.06962226867675782, 0.06849472045898437, 0.06810598754882813, 0.0683795166015625, 0.06817791748046875, 0.06851961517333985, 0.06822319793701172, 0.06814115142822266, 0.06847452545166016, 0.06825196838378907, 0.06862806701660157, 0.06831887817382812, 0.06800259399414063, 0.06807347106933594, 0.06819580841064453, 0.06793679809570312, 0.06825984191894531, 0.0685025634765625, 0.0684299545288086, 0.06828050994873047, 0.06809900665283203, 0.06832495880126953, 0.06875762939453126, 0.06868694305419921, 0.06863302612304688, 0.0684712677001953, 0.06844825744628906, 0.06845219421386718, 0.06895222473144531, 0.06860582733154297, 0.0689210205078125, 0.06907743835449219, 0.06864691162109375, 0.06883123016357422, 0.06891315460205077, 0.069010498046875, 0.06877689361572266, 0.06982963562011718, 0.06894627380371093, 0.0690121307373047, 0.06929759979248047, 0.06887216186523437, 0.0689889907836914, 0.06891574096679688, 0.06903504180908203, 0.06913260650634766, 0.06897731018066407, 0.0690851821899414, 0.06886809539794922, 0.06901484680175782, 0.06904694366455078, 0.07095094299316407, 0.0689459228515625, 0.0689760284423828, 0.06923734283447265, 0.06898051452636719, 0.06931417846679687, 0.06914012908935546, 0.06935228729248047, 0.06937760162353515, 0.06915740966796875, 0.06899017333984375, 0.06915129852294923, 0.0695031967163086, 0.0684229736328125, 0.06814176177978516, 0.06821485137939454, 0.06809532928466797, 0.06822108459472656, 0.06821727752685547, 0.06864876556396485, 0.06803465270996094, 0.0682558364868164, 0.06806118774414062, 0.06832947540283203, 0.06843145751953125, 0.06798582458496094, 0.06803014373779297, 0.06841580963134766, 0.06870425415039062, 0.06844153594970703, 0.06833200073242188, 0.06845843505859375, 0.06862025451660156, 0.06870384216308593, 0.06826863861083984, 0.06831094360351563, 0.06828998565673829, 0.06844892883300781, 0.06867743682861328, 0.06861590576171875, 0.0687491226196289, 0.068776611328125, 0.06897782135009765, 0.068698974609375, 0.06900531005859376, 0.06883897399902343, 0.06901395416259766, 0.06877798461914063, 0.0689991683959961, 0.06881206512451171, 0.06856902313232421, 0.0686702423095703, 0.06901103973388673, 0.06931088256835938, 0.0688348159790039, 0.06877776336669922, 0.06894870758056641, 0.06890086364746094, 0.0692608642578125, 0.06916953277587891, 0.06922386932373047, 0.06907062530517578, 0.0693331527709961, 0.06904083251953125, 0.06882508850097656, 0.06896367645263672, 0.06909404754638672, 0.06918915557861328, 0.06898121643066406, 0.0696995849609375, 0.06929571533203124, 0.0692166748046875, 0.06954803466796874, 0.06918962860107422, 0.06962790679931641, 0.06949795532226563, 0.06900339508056641, 0.06843424224853516, 0.06827225494384766, 0.06840115356445313, 0.06854860687255859, 0.06808707427978515, 0.06846937561035156, 0.06816563415527344, 0.06855484771728515, 0.06818201446533204, 0.06847801971435546, 0.06839961242675781, 0.06825414276123047, 0.06829670715332031, 0.06833561706542969, 0.06841548919677734, 0.06836771392822266, 0.06899164581298828, 0.06870582580566406, 0.06850812530517578, 0.06822911834716797, 0.0685301742553711, 0.06847644805908203, 0.06853167724609376, 0.06869078063964844, 0.06832720184326171, 0.06866767883300781, 0.06876761627197266, 0.06866710662841796, 0.06845449829101563, 0.06891718292236328, 0.06900121307373047, 0.06890306854248048, 0.06914899444580078, 0.06895206451416015, 0.0686173095703125, 0.06896527862548828, 0.06928524780273437, 0.06901209259033203, 0.06863168334960937, 0.06926630401611328, 0.06896640014648438, 0.0689620132446289, 0.06916706848144531, 0.0688046417236328, 0.06884156799316406, 0.06915705871582031, 0.06924889373779297, 0.06914012908935546, 0.06907299041748047, 0.06909580993652344, 0.07061885070800782, 0.06927593231201172, 0.0691855010986328, 0.06927158355712891, 0.0691190414428711, 0.06894812774658203, 0.06924368286132812, 0.06930335998535156, 0.06918649291992188, 0.0694497299194336, 0.069359619140625, 0.06945094299316407, 0.06860160064697265, 0.06859750366210937, 0.06799001312255859, 0.06782943725585938, 0.06823299407958984, 0.06799747467041016, 0.06803456115722656, 0.06822988891601563, 0.0683499526977539, 0.06830694580078125, 0.06800982666015624, 0.06833776092529296, 0.06842784118652344, 0.06821456146240235, 0.06817609405517579, 0.06860594940185546, 0.06848684692382813, 0.06859808349609375, 0.06989209747314454, 0.06874908447265625, 0.06839727783203126, 0.06813491058349609, 0.06843516540527343, 0.06856716918945313, 0.06877865600585938, 0.06869827270507813, 0.06865699005126953, 0.0685322265625, 0.0684585952758789, 0.06895811462402343, 0.06855862426757812, 0.06853449249267578, 0.06883942413330078, 0.0699697265625, 0.06882733154296874, 0.06859503936767578, 0.06887286376953125, 0.06875955200195312, 0.06864659118652344, 0.0691630096435547, 0.06899727630615235, 0.06885801696777344, 0.0689510726928711, 0.06867222595214843, 0.069046142578125, 0.06896473693847656, 0.06927565002441406, 0.06869427490234375, 0.06905625915527344, 0.06901350402832031, 0.06898892974853515, 0.06904422760009765, 0.06907698822021484, 0.06934454345703125, 0.06920060729980469, 0.06912818908691407, 0.06936985778808594, 0.06990399932861328, 0.06925350189208984, 0.06923011016845704, 0.06945184326171874, 0.06941903686523437, 0.06927932739257812, 0.06830899047851563, 0.06804275512695312, 0.06808595275878906, 0.06823273468017578, 0.06821670532226562, 0.06804851531982421, 0.06803282928466797, 0.06858758544921875, 0.0684361572265625, 0.06817404937744141, 0.06845849609375, 0.06829055786132812, 0.06822463989257813, 0.06839263916015625, 0.06833433532714844, 0.06847443389892578, 0.06843020629882812, 0.0685136947631836, 0.06870435333251954, 0.06846995544433594, 0.06825043487548828, 0.06838198089599609, 0.06847357177734376, 0.06865440368652344, 0.06847353363037109, 0.06890086364746094, 0.06864281463623047, 0.06895549011230469, 0.068917724609375, 0.06855289459228515, 0.06884146881103516, 0.06869401550292968, 0.06902925109863281, 0.06898252868652344, 0.06858432006835938, 0.06891926574707032, 0.06901353454589844, 0.06923878479003906, 0.06895206451416015, 0.06880032348632813, 0.06952365112304687, 0.06926335906982421, 0.06989004516601563, 0.06880461120605469, 0.06909337615966797, 0.06905036926269531, 0.06918057250976563, 0.06896025848388672, 0.0693666229248047, 0.06893772888183594, 0.0690579833984375, 0.0695301742553711, 0.0704716796875, 0.0689315185546875, 0.06957266998291016, 0.06917485046386719, 0.06935308837890625, 0.06915328216552734, 0.069521728515625, 0.06928179168701172, 0.06957807922363281, 0.06946272277832032, 0.06939958190917969, 0.0685110092163086, 0.0683458251953125, 0.06796492767333985, 0.06784486389160156, 0.06816973114013672, 0.0678845443725586, 0.06827247619628907, 0.06828457641601562, 0.06871654510498047, 0.06872252655029297, 0.06846672058105469, 0.06830681610107422, 0.06832972717285156, 0.06838066864013671, 0.06834381103515624, 0.0685804443359375, 0.06817475128173828, 0.06828851318359375, 0.0683826904296875, 0.06843299102783203, 0.06858428955078125, 0.06866134643554687, 0.06863667297363281, 0.06860594940185546, 0.06866124725341796, 0.06906655883789062, 0.06859359741210938, 0.0686451187133789, 0.06855059051513672, 0.06874323272705078, 0.06860185241699218, 0.06915891265869141, 0.06883888244628907, 0.06844889831542969, 0.06868163299560547, 0.06908108520507812, 0.0689203872680664, 0.0690390396118164, 0.06895616149902344, 0.06855270385742188, 0.06900681304931641, 0.06895980834960938, 0.06880150604248046, 0.06887833404541016, 0.06887628936767579, 0.06902169799804687, 0.06898687744140625, 0.06906400299072266, 0.06897939300537109, 0.06913801574707032, 0.06897090911865235, 0.06922444915771485, 0.06913024139404297, 0.06898258972167969, 0.06922169494628906, 0.06901219177246094, 0.0692245101928711, 0.06961366271972656, 0.0692142105102539, 0.0692490234375, 0.0694824981689453, 0.06948863983154296, 0.06925107574462891]",tokens/s,14.545103672949887,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,6772.20352,7769.817088,0.0,7367.294976,7351.94368,s,1,12.590875,12.590875,0.0,12.590875,12.590875,12.590875,12.590875,[12.590875],,kWh,0.0001635466874708527,1.8026189208199503e-05,5.0951151871994615e-05,0.0002325240285510468,,MB,1478.303744,8380.08832,0.0,7962.886144,7872.44544,s,10,3.210407012939453,0.3210407012939453,0.000807979931147453,0.32093855285644535,0.32192711791992185,0.3223107879638672,0.32261772399902344,"[0.3194629821777344, 0.32087701416015624, 0.32062774658203125, 0.3210090942382812, 0.32062860107421876, 0.3210000915527344, 0.3207613220214844, 0.32184185791015624, 0.32150384521484376, 0.3226944580078125]",tokens/s,797.4066807361165,kWh,9.383221678906749e-06,1.0344282560913563e-06,6.212400803249762e-06,1.6630050738247867e-05,tokens/kWh,15393819.53966137,MB,1504.11264,8631.74656,0.0,8214.544384,8118.577152,s,10,27.203293212890628,2.7203293212890625,0.01649439509137784,2.7150029296875,2.72555498046875,2.747215844726562,2.7645445361328123,"[2.768876708984375, 2.71070458984375, 2.713300048828125, 2.7123681640625, 2.720741455078125, 2.719334228515625, 2.716846923828125, 2.716705810546875, 2.712340576171875, 2.71207470703125]",tokens/s,23.158960757790403,kWh,8.360070792901119e-05,9.220501548192092e-06,5.555381527635088e-05,0.00014837502475355415,tokens/kWh,424599.7606715878,,s,630,27.20015459060668,0.043174848556518555,0.0005240365914026493,0.04310723304748535,0.043551283645629886,0.043972430419921875,0.04518917556762696,"[0.04547404861450195, 0.04490156936645508, 0.04463494491577148, 0.044729984283447266, 0.0447696647644043, 0.04494950485229492, 0.046565376281738284, 0.04503081512451172, 0.04482844924926758, 0.044800159454345706, 0.04483865737915039, 0.04484716796875, 0.044896575927734376, 0.046123550415039065, 0.044849056243896485, 0.044882080078125, 0.04503910446166992, 0.04514003372192383, 0.04507388687133789, 0.04527590560913086, 0.045304065704345704, 0.045074337005615236, 0.04553055953979492, 0.04510163116455078, 0.04520924758911133, 0.04500825500488281, 0.04282262420654297, 0.04330905532836914, 0.04291945648193359, 0.04271558380126953, 0.042721473693847656, 0.04292291259765625, 0.04280937576293945, 0.04296771240234375, 0.042864768981933594, 0.042952831268310544, 0.04291584014892578, 0.04310835266113281, 0.04299980926513672, 0.042971134185791016, 0.043197921752929684, 0.043092254638671876, 0.04339510345458984, 0.043269535064697266, 0.04311737442016601, 0.04305065536499023, 0.04302678298950195, 0.04321852874755859, 0.04339241409301758, 0.04371148681640625, 0.043413791656494144, 0.04313430404663086, 0.04310054397583008, 0.04335615921020508, 0.043396480560302736, 0.043305408477783205, 0.04303683090209961, 0.043091999053955075, 0.04301123046875, 0.043170654296875, 0.04348518371582031, 0.04343145751953125, 0.043310848236083985, 0.04321465682983398, 0.043002239227294924, 0.042633342742919925, 0.042496414184570314, 0.04256668853759766, 0.04279526519775391, 0.042756481170654295, 0.04271923065185547, 0.042729503631591795, 0.04270646286010742, 0.04288095855712891, 0.04279177474975586, 0.042790912628173826, 0.04281145477294922, 0.04266521453857422, 0.04273174285888672, 0.042899585723876955, 0.0429567985534668, 0.042823936462402346, 0.04281068801879883, 0.04300377655029297, 0.04364550399780273, 0.04308412933349609, 0.04308992004394531, 0.04301004791259765, 0.042917728424072266, 0.04279721450805664, 0.0430489616394043, 0.043007713317871094, 0.04280275344848633, 0.04295276641845703, 0.0429532470703125, 0.042882686614990236, 0.0428175048828125, 0.04285699081420898, 0.04279500961303711, 0.042855712890625, 0.043074241638183595, 0.043216705322265625, 0.043065792083740236, 0.04304873657226563, 0.04306041717529297, 0.043031360626220705, 0.043128833770751954, 0.04419350433349609, 0.04348956680297852, 0.04332966232299805, 0.04316697692871094, 0.04301919937133789, 0.0431715202331543, 0.042898944854736325, 0.043132606506347655, 0.04330780792236328, 0.0432619857788086, 0.04324687957763672, 0.04305145645141602, 0.043297054290771485, 0.04340031814575195, 0.04327459335327148, 0.04326863861083984, 0.04328268814086914, 0.04332108688354492, 0.04336435317993164, 0.0431957778930664, 0.04283660888671875, 0.04231913757324219, 0.04239206314086914, 0.04243206405639648, 0.042482177734375, 0.0425533447265625, 0.04277664184570312, 0.042834014892578126, 0.04274774551391602, 0.04260265731811524, 0.04266937637329102, 0.04282575988769531, 0.04310287857055664, 0.042694625854492185, 0.04273696136474609, 0.043218944549560545, 0.04279779052734375, 0.04319660949707031, 0.04362630462646484, 0.043251041412353516, 0.043167999267578125, 0.04304838562011719, 0.043095008850097656, 0.04307331085205078, 0.04298160171508789, 0.0429969596862793, 0.04287321472167969, 0.04275024032592773, 0.04281753540039063, 0.04312876892089844, 0.04297900772094727, 0.04315014266967773, 0.042888671875, 0.043253982543945316, 0.04305481719970703, 0.04305871963500976, 0.043036800384521484, 0.042880672454833985, 0.042964256286621094, 0.04297849655151367, 0.04314940643310547, 0.04318044662475586, 0.04319551849365234, 0.04349862289428711, 0.043412830352783205, 0.04341088104248047, 0.04325270462036133, 0.043256961822509765, 0.04315865707397461, 0.04337638473510742, 0.04313702392578125, 0.04334796905517578, 0.04368384170532227, 0.04324758529663086, 0.043313087463378905, 0.04326972961425781, 0.04323158264160156, 0.043440288543701175, 0.04345436859130859, 0.04344841766357422, 0.04338687896728516, 0.04367715072631836, 0.04334787368774414, 0.04311075210571289, 0.042593505859375, 0.04251305770874023, 0.042661823272705075, 0.04260636901855469, 0.04242179107666016, 0.04263417434692383, 0.042683712005615236, 0.04258652877807617, 0.042522911071777345, 0.0426618881225586, 0.042756000518798826, 0.04266153717041016, 0.04259468841552734, 0.042675777435302736, 0.04291411209106445, 0.042942657470703124, 0.04291977691650391, 0.043167903900146486, 0.04277657699584961, 0.04285001754760742, 0.04290793609619141, 0.04303462219238281, 0.043001697540283206, 0.04287913513183594, 0.04297727966308594, 0.04442844772338867, 0.042859390258789064, 0.04270441436767578, 0.04278726577758789, 0.0430931510925293, 0.042859649658203124, 0.04306118392944336, 0.043171104431152345, 0.043037185668945314, 0.04282755279541016, 0.043004127502441404, 0.04309987258911133, 0.04320489501953125, 0.04312406539916992, 0.04349721527099609, 0.04345529556274414, 0.04336044692993164, 0.04332278442382813, 0.043269630432128905, 0.04328339385986328, 0.043327327728271484, 0.04340943908691406, 0.0431943359375, 0.043412704467773434, 0.04329779052734375, 0.043278335571289066, 0.04310220718383789, 0.04325564956665039, 0.04329276657104492, 0.04321481704711914, 0.043308353424072264, 0.04355088043212891, 0.0433834228515625, 0.043474945068359375, 0.0433889274597168, 0.043325439453125, 0.04362444686889649, 0.04311654281616211, 0.042725025177001955, 0.042590015411376955, 0.04262351989746094, 0.04250419235229492, 0.04257763290405273, 0.042839710235595706, 0.04266684722900391, 0.042575649261474606, 0.042739742279052736, 0.04283107376098633, 0.04271590423583985, 0.04298947143554688, 0.04294819259643555, 0.04285852813720703, 0.04283644866943359, 0.04282969665527344, 0.04281375885009766, 0.043206462860107424, 0.0431756477355957, 0.04322323226928711, 0.04339251327514648, 0.043753726959228516, 0.043211105346679685, 0.04310630416870117, 0.04313497543334961, 0.043286529541015625, 0.04311859130859375, 0.04303366470336914, 0.04313183975219727, 0.04319801712036133, 0.04310265731811523, 0.043259166717529295, 0.04303702545166015, 0.04301862335205078, 0.0431957778930664, 0.043294368743896486, 0.04334793472290039, 0.043321441650390625, 0.04332588958740234, 0.04345849609375, 0.043743873596191404, 0.04394780731201172, 0.043706497192382815, 0.0435868148803711, 0.04345727920532227, 0.04352380752563476, 0.043716896057128904, 0.043431934356689454, 0.043431198120117184, 0.04325449752807617, 0.0434442253112793, 0.043453857421875, 0.043378913879394534, 0.04325619125366211, 0.04315545654296875, 0.043235294342041014, 0.0432918701171875, 0.04321260833740234, 0.043221343994140626, 0.043482975006103514, 0.04373503875732422, 0.0433438720703125, 0.042869857788085934, 0.04244303894042969, 0.04281126403808594, 0.04281532669067383, 0.042861473083496096, 0.04284604644775391, 0.042551647186279296, 0.042618942260742185, 0.042727169036865235, 0.04256288146972656, 0.04246953582763672, 0.04285699081420898, 0.04296908950805664, 0.04283801651000976, 0.042663040161132815, 0.04285494232177734, 0.04300425720214844, 0.04329987335205078, 0.04294755172729492, 0.0431077766418457, 0.04337107086181641, 0.04399257659912109, 0.043065185546875, 0.04302499389648438, 0.04297324752807617, 0.04286259078979492, 0.04294451141357422, 0.042971134185791016, 0.04294179153442383, 0.04327596664428711, 0.04322617721557617, 0.043224990844726564, 0.04304620742797852, 0.04308652877807617, 0.04316908645629883, 0.04309676742553711, 0.04315321731567383, 0.043198143005371094, 0.04366592025756836, 0.04333158493041992, 0.043286529541015625, 0.0433807373046875, 0.04355072021484375, 0.043837440490722655, 0.0436317138671875, 0.043598751068115234, 0.043493377685546876, 0.043464702606201173, 0.04350339126586914, 0.04335164642333984, 0.043372318267822264, 0.04350553512573242, 0.04329964828491211, 0.043323585510253906, 0.04344828796386719, 0.04356095886230469, 0.04336640167236328, 0.04333932876586914, 0.04326649475097656, 0.0434031982421875, 0.04347475051879883, 0.043493824005126955, 0.0434466552734375, 0.042961406707763675, 0.04258816146850586, 0.04237491226196289, 0.0425904312133789, 0.04287472152709961, 0.04278620910644531, 0.042750751495361325, 0.042902751922607424, 0.04293097686767578, 0.042601631164550784, 0.042455230712890625, 0.0426420783996582, 0.04262092971801758, 0.042947681427001956, 0.04277945709228516, 0.04466201782226562, 0.043037151336669924, 0.04289984130859375, 0.043076961517333985, 0.04303699111938476, 0.04336470413208008, 0.043140350341796876, 0.04316032028198242, 0.04303804779052734, 0.04292470550537109, 0.04273775863647461, 0.04274319839477539, 0.04287744140625, 0.04291743850708008, 0.04319891357421875, 0.04297430419921875, 0.04307241439819336, 0.04291961669921875, 0.042983745574951174, 0.04280723190307617, 0.042842174530029295, 0.043081310272216795, 0.04309852981567383, 0.04304451370239258, 0.04316195297241211, 0.0431366081237793, 0.043380767822265624, 0.043302879333496094, 0.043356449127197265, 0.04349932861328125, 0.04349363327026367, 0.04365727996826172, 0.04440883255004883, 0.043609569549560544, 0.04342335891723633, 0.04326902389526367, 0.04323433685302734, 0.04366640090942383, 0.04363241577148438, 0.043221214294433596, 0.04323657608032227, 0.04325046539306641, 0.043358207702636715, 0.04333158493041992, 0.0432632942199707, 0.04330361557006836, 0.04342761611938477, 0.04371388626098633, 0.043098785400390624, 0.042563201904296875, 0.04244057464599609, 0.042503841400146486, 0.04265804672241211, 0.04280790328979492, 0.04281939315795898, 0.042627391815185545, 0.04263718414306641, 0.04277248001098633, 0.042700542449951175, 0.04281756973266602, 0.04277881622314453, 0.04265577697753906, 0.042567680358886716, 0.042668094635009764, 0.04280313491821289, 0.0429936637878418, 0.04319641494750977, 0.04321033477783203, 0.04310467147827148, 0.04322880172729492, 0.04341798400878906, 0.04323638534545898, 0.043170753479003905, 0.04297846221923828, 0.042953601837158205, 0.04297040176391602, 0.042947296142578126, 0.04293017578125, 0.042831871032714845, 0.04287692642211914, 0.042901344299316406, 0.04303478240966797, 0.04291788864135742, 0.042891265869140625, 0.042937408447265624, 0.0429249267578125, 0.04334188842773438, 0.04316928100585937, 0.043332096099853515, 0.04335142517089844, 0.043520641326904294, 0.0436894416809082, 0.04355491256713867, 0.043565441131591796, 0.04345043182373047, 0.04344416046142578, 0.04338070297241211, 0.043286624908447265, 0.04347417449951172, 0.04339993667602539, 0.043493377685546876, 0.04340256118774414, 0.043401920318603515, 0.04323942565917969, 0.04311977767944336, 0.043397441864013675, 0.043363998413085934, 0.04349244689941406, 0.04340038299560547, 0.04480051040649414, 0.04347292709350586, 0.042930912017822266, 0.04253804779052734, 0.042633567810058594, 0.04266649627685547, 0.0425780143737793, 0.042784160614013675, 0.04270755386352539, 0.042634750366210936, 0.04253676986694336, 0.042662593841552736, 0.04255539321899414, 0.042705982208251954, 0.04276319885253906, 0.04299494552612305, 0.04333849716186523, 0.04275350570678711, 0.04280732727050781, 0.042791263580322265, 0.04305276870727539, 0.04317193603515625, 0.04294879913330078, 0.04318838500976562, 0.043177982330322266, 0.042995712280273435, 0.04293340682983399, 0.042797920227050784, 0.04273347091674805, 0.04314064025878906, 0.04302905654907226, 0.04290873718261719, 0.04298438262939453, 0.04276838302612305, 0.0427762565612793, 0.042974910736083984, 0.04303756713867188, 0.04307235336303711, 0.04315180969238281, 0.04300377655029297, 0.04308643341064453, 0.042954177856445314, 0.04308230209350586, 0.04315955352783203, 0.04314012908935547, 0.04346364974975586, 0.043495712280273435, 0.0433573112487793, 0.043315486907958986, 0.04331552124023438, 0.043292671203613284, 0.04334592056274414, 0.04326950454711914, 0.04332112121582031, 0.043192161560058596, 0.04305926513671875, 0.04319865417480469, 0.043346145629882815, 0.043248126983642575, 0.04320604705810547, 0.043460704803466796, 0.04378883361816406, 0.0435939826965332, 0.04356995010375977, 0.04345804977416992, 0.042869056701660156, 0.0426148796081543, 0.0424403190612793, 0.04246380615234375, 0.042390785217285155, 0.042557247161865236, 0.04263151931762695, 0.04278953552246094, 0.04264748764038086, 0.04280934524536133, 0.04256668853759766, 0.042468318939208986, 0.04252396774291992, 0.04256217575073242, 0.04256774520874024, 0.04284323120117187, 0.04304169464111328, 0.04309158325195313, 0.043106689453125, 0.043081729888916016, 0.04314112091064453, 0.042905025482177735, 0.04289388656616211, 0.04310220718383789, 0.043804672241210936, 0.043069438934326174, 0.043038593292236325, 0.04280854415893555, 0.042844959259033207, 0.0428600959777832, 0.04294303894042969, 0.04299161529541016, 0.042909664154052736, 0.042926048278808596, 0.043347808837890626, 0.04315286254882812, 0.04312345504760742, 0.04296860885620117, 0.04306172943115234, 0.04319843292236328, 0.043292705535888674, 0.04332876968383789, 0.043289470672607425, 0.043299999237060544, 0.04336304092407227, 0.04334511947631836, 0.04338358306884765, 0.04333772659301758, 0.04340073776245117, 0.04325542449951172, 0.043141983032226563, 0.0432375373840332, 0.04329635238647461, 0.0432474250793457, 0.04322758483886719, 0.04330086517333984, 0.043630592346191405, 0.043351455688476564, 0.04330723190307617, 0.04334630584716797, 0.043235328674316405, 0.043541568756103516]",tokens/s,23.161633067246036,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,874.82368,601.751552,0.0,199.22944,181.241344,s,1,7.609712890625,7.609712890625,0.0,7.609712890625,7.609712890625,7.609712890625,7.609712890625,[7.609712890625],,kWh,1.523218246665389e-05,1.673019634453695e-06,4.512503609999108e-06,2.1417705711106694e-05,,MB,1331.22048,658.374656,0.0,241.17248,225.803264,s,16,0.20963401603698734,0.013102126002311705,6.758385017720097e-05,0.013090928077697754,0.013177184104919432,0.013189184188842773,0.01320124168395996,"[0.01291977596282959, 0.013167200088500976, 0.013060511589050293, 0.013204256057739257, 0.013160927772521973, 0.01305583953857422, 0.013170207977294922, 0.013070752143859863, 0.013088831901550293, 0.013119104385375977, 0.013043071746826172, 0.01308505630493164, 0.013129440307617188, 0.013184160232543944, 0.013081855773925781, 0.013093024253845216]",tokens/s,19538.81377379763,kWh,3.869660549458439e-07,4.26755320812715e-08,2.1465755457292492e-07,6.442991416000403e-07,tokens/kWh,397330965.49570817,MB,1370.812416,673.05472,0.0,255.852544,225.805824,s,16,9.79578155517578,0.6122363471984864,0.001720885806545329,0.612547576904297,0.6142755737304688,0.6150134277343751,0.615147314453125,"[0.6149576416015625, 0.6123853149414062, 0.6119273071289062, 0.6151807861328125, 0.6099966430664062, 0.6135643920898437, 0.6131492309570312, 0.613593505859375, 0.6134678955078126, 0.6127098388671876, 0.6115953369140625, 0.6128223876953125, 0.6104877319335937, 0.6095772705078125, 0.6095999755859375, 0.6107662963867188]",tokens/s,102.90143714642193,kWh,1.8656132445834464e-05,2.057373956881256e-06,6.97149402767747e-06,2.7685000430393195e-05,tokens/kWh,2275600.470312337,,s,1008,9.788004720687862,0.009710322143539549,0.00015099873226139787,0.009684304237365722,0.009800239849090576,0.009897614479064941,0.010486351537704459,"[0.009378527641296387, 0.00971078395843506, 0.00965283203125, 0.009756352424621582, 0.009669216156005859, 0.009766912460327149, 0.00964566421508789, 0.009698719978332519, 0.009649151802062989, 0.009621503829956055, 0.009695232391357422, 0.0096910400390625, 0.00975267219543457, 0.00973414421081543, 0.009704959869384766, 0.009752608299255372, 0.009705727577209472, 0.009859104156494141, 0.00967420768737793, 0.009709504127502442, 0.009668831825256347, 0.00970143985748291, 0.009792223930358886, 0.009676639556884765, 0.009742591857910156, 0.009641183853149413, 0.009676639556884765, 0.011713408470153808, 0.01059609603881836, 0.009736191749572755, 0.009665727615356445, 0.009684991836547852, 0.009711808204650878, 0.009789088249206542, 0.009725184440612792, 0.009696703910827636, 0.009679488182067871, 0.009717408180236817, 0.009641792297363281, 0.009660927772521973, 0.009657952308654785, 0.010162079811096191, 0.009693056106567384, 0.009777471542358399, 0.009722975730895997, 0.009743264198303223, 0.00970137596130371, 0.009694879531860351, 0.009725855827331544, 0.009676480293273925, 0.009689855575561523, 0.009645824432373046, 0.009748224258422852, 0.009773568153381347, 0.009667872428894043, 0.009752863883972169, 0.009667167663574219, 0.009692352294921875, 0.009642848014831543, 0.009687199592590331, 0.009688799858093261, 0.00973423957824707, 0.009786496162414551, 0.009465855598449707, 0.009729887962341309, 0.0097009916305542, 0.009723808288574219, 0.009672927856445312, 0.009810015678405762, 0.009774944305419923, 0.009798015594482422, 0.009827872276306153, 0.009826144218444825, 0.009812704086303711, 0.009777183532714845, 0.00976863956451416, 0.009724191665649414, 0.009695455551147461, 0.009778271675109864, 0.009743103981018067, 0.009729984283447266, 0.009713760375976562, 0.009707712173461914, 0.009708352088928222, 0.009720895767211914, 0.009722784042358398, 0.00971872043609619, 0.009666560173034668, 0.009641983985900878, 0.00963817596435547, 0.00963145637512207, 0.009703424453735352, 0.009708864212036133, 0.009672639846801757, 0.009675616264343262, 0.009661503791809082, 0.009732288360595703, 0.009887840270996094, 0.009783103942871093, 0.009675519943237305, 0.009672575950622559, 0.009689120292663575, 0.00963798427581787, 0.009711615562438965, 0.009682559967041015, 0.009716095924377442, 0.009713664054870605, 0.00969324779510498, 0.009670080184936524, 0.009652735710144043, 0.009758720397949219, 0.009691136360168457, 0.009711615562438965, 0.009705408096313477, 0.009719391822814942, 0.009708000183105468, 0.009672703742980958, 0.009711808204650878, 0.009697216033935547, 0.009725055694580077, 0.00967347240447998, 0.009685088157653808, 0.009685088157653808, 0.009785152435302735, 0.009702848434448243, 0.00970400047302246, 0.009390111923217773, 0.009686495780944824, 0.009698016166687012, 0.009662431716918945, 0.00973516845703125, 0.009737024307250977, 0.009739551544189453, 0.009670623779296875, 0.009603167533874512, 0.009681568145751954, 0.009631039619445801, 0.009681599617004395, 0.009684991836547852, 0.009686816215515136, 0.00983420753479004, 0.009763551712036132, 0.009785120010375976, 0.009759072303771973, 0.009799327850341797, 0.009836544036865234, 0.009844799995422364, 0.009795167922973632, 0.009740863800048828, 0.009652000427246094, 0.009713248252868652, 0.009722271919250488, 0.009625920295715332, 0.00967795181274414, 0.009716287612915039, 0.009727487564086914, 0.009758720397949219, 0.009705599784851074, 0.009746975898742676, 0.009657855987548827, 0.00969257640838623, 0.009759807586669922, 0.009687071800231933, 0.00971555233001709, 0.009772895812988281, 0.009674336433410645, 0.009687295913696289, 0.009727968215942382, 0.00971401596069336, 0.009678144454956055, 0.009713567733764649, 0.009666720390319824, 0.009687583923339843, 0.009672800064086913, 0.009692352294921875, 0.009687135696411133, 0.00969929599761963, 0.009710335731506347, 0.009733535766601563, 0.009683039665222168, 0.009654784202575683, 0.009742048263549805, 0.00972390365600586, 0.009642271995544434, 0.009713664054870605, 0.0096495361328125, 0.009709983825683595, 0.009686431884765624, 0.009675583839416503, 0.009451807975769043, 0.009762944221496582, 0.009787103652954102, 0.009836895942687988, 0.009799615859985351, 0.009801695823669434, 0.009777183532714845, 0.009736191749572755, 0.010002623558044434, 0.00970956802368164, 0.00978105640411377, 0.00979535961151123, 0.00991215991973877, 0.009752991676330567, 0.009734111785888672, 0.009791040420532226, 0.009771455764770507, 0.009762975692749023, 0.009639360427856446, 0.009750944137573242, 0.009750304222106933, 0.00972208023071289, 0.009676192283630371, 0.009680543899536133, 0.009804608345031739, 0.009667840003967285, 0.009718560218811035, 0.009699328422546387, 0.00965334415435791, 0.00970684814453125, 0.009673600196838378, 0.009771039962768555, 0.009626367568969726, 0.009756735801696777, 0.009678784370422364, 0.009735296249389649, 0.009764063835144042, 0.009780991554260253, 0.009725855827331544, 0.009684703826904296, 0.009764927864074707, 0.009641792297363281, 0.009669183731079101, 0.009623295783996582, 0.00961900806427002, 0.009695775985717773, 0.009668607711791993, 0.009635968208312988, 0.009651200294494629, 0.010290047645568848, 0.010686464309692383, 0.010522527694702149, 0.00970259189605713, 0.009735072135925293, 0.009750528335571289, 0.009696288108825684, 0.00983465576171875, 0.009679840087890626, 0.009624928474426269, 0.009593343734741211, 0.009682559967041015, 0.009613247871398925, 0.00968883228302002, 0.009385984420776367, 0.009863167762756348, 0.009660415649414063, 0.00972332763671875, 0.009640159606933594, 0.009556127548217773, 0.009639776229858399, 0.009627840042114259, 0.009641663551330566, 0.009690624237060547, 0.009669440269470215, 0.009661760330200195, 0.00967148780822754, 0.0097542724609375, 0.009692895889282226, 0.009729951858520507, 0.009786111831665039, 0.00962713623046875, 0.009658368110656738, 0.009660927772521973, 0.009902079582214356, 0.009654272079467773, 0.00964844799041748, 0.009598943710327148, 0.009576512336730956, 0.009609919548034668, 0.009632736206054688, 0.00960256004333496, 0.009620991706848145, 0.009593855857849122, 0.009633664131164552, 0.009600255966186523, 0.009620351791381836, 0.009866975784301757, 0.0096527042388916, 0.009826208114624023, 0.00975436782836914, 0.009658047676086426, 0.009640416145324708, 0.00978121566772461, 0.0097357759475708, 0.0097489595413208, 0.009622912406921386, 0.009645759582519531, 0.009718879699707032, 0.009641632080078125, 0.009644191741943359, 0.009938943862915038, 0.00965452766418457, 0.009625632286071778, 0.009635231971740722, 0.00964236831665039, 0.009599072456359863, 0.009652064323425294, 0.0096046724319458, 0.009628095626831056, 0.009736191749572755, 0.0097958402633667, 0.009848896026611329, 0.00964303970336914, 0.009634559631347656, 0.009631520271301269, 0.009652352333068848, 0.009379167556762696, 0.009737088203430175, 0.009769280433654786, 0.009727359771728516, 0.009683520317077636, 0.009702976226806641, 0.00990227222442627, 0.00966204833984375, 0.009707903861999512, 0.009657759666442872, 0.009663104057312012, 0.009713664054870605, 0.00972390365600586, 0.009684896469116211, 0.00970588779449463, 0.009662240028381348, 0.009731552124023438, 0.0097325439453125, 0.009672703742980958, 0.00963696002960205, 0.009741215705871583, 0.009663871765136719, 0.009679488182067871, 0.009973183631896972, 0.009757247924804688, 0.010553695678710938, 0.010306719779968262, 0.010275551795959473, 0.009700960159301757, 0.009666303634643554, 0.009701343536376953, 0.00963811206817627, 0.009687295913696289, 0.009787391662597657, 0.009652223587036133, 0.009776448249816895, 0.009668864250183106, 0.00966256046295166, 0.009685343742370605, 0.00962723159790039, 0.009797183990478516, 0.00967750358581543, 0.009666720390319824, 0.009666848182678222, 0.009632543563842773, 0.00970963191986084, 0.009659520149230957, 0.009644191741943359, 0.009944671630859376, 0.009821184158325195, 0.009910271644592286, 0.009620479583740234, 0.009744383811950684, 0.009641983985900878, 0.009645792007446289, 0.009623776435852051, 0.009631808280944825, 0.009627936363220214, 0.009629407882690429, 0.00967024040222168, 0.009679519653320312, 0.00973299217224121, 0.00964851188659668, 0.009414655685424805, 0.009716927528381348, 0.009662816047668457, 0.009712096214294433, 0.009662464141845703, 0.00971731185913086, 0.009671104431152345, 0.010227711677551269, 0.009782719612121581, 0.009702015876770019, 0.009939904212951661, 0.00982528018951416, 0.009684991836547852, 0.009828255653381348, 0.009750816345214844, 0.009703231811523437, 0.00974847984313965, 0.009670528411865234, 0.009754752159118652, 0.009682944297790527, 0.00969315242767334, 0.009875712394714355, 0.009665727615356445, 0.00963747215270996, 0.009651200294494629, 0.009652383804321289, 0.00978927993774414, 0.009744383811950684, 0.009655712127685546, 0.009654879570007324, 0.009764863967895507, 0.009631584167480469, 0.009715871810913086, 0.00961945629119873, 0.009701312065124512, 0.009664575576782226, 0.010124768257141113, 0.009679360389709473, 0.009760800361633301, 0.009659711837768554, 0.009778047561645508, 0.009678848266601562, 0.009682751655578614, 0.009637791633605957, 0.009699584007263184, 0.009671680450439453, 0.00971072006225586, 0.009917951583862305, 0.009664640426635743, 0.009704575538635254, 0.00963868808746338, 0.00976905632019043, 0.009667840003967285, 0.00971452808380127, 0.009698687553405761, 0.009721664428710938, 0.00992131233215332, 0.009641087532043457, 0.009739328384399414, 0.009639552116394043, 0.009750752449035645, 0.00973209571838379, 0.009675840377807617, 0.00939680004119873, 0.009863167762756348, 0.009658271789550782, 0.009668704032897948, 0.009629856109619141, 0.009675968170166015, 0.009648799896240234, 0.009700960159301757, 0.009646495819091798, 0.009872511863708496, 0.009806464195251465, 0.00967903995513916, 0.00969257640838623, 0.009705696105957031, 0.009691712379455566, 0.009729920387268066, 0.009688384056091308, 0.00972275161743164, 0.009706368446350097, 0.009626560211181641, 0.009664511680603028, 0.009627967834472657, 0.009666272163391113, 0.010286144256591797, 0.011024736404418945, 0.009785792350769044, 0.009709823608398437, 0.009754495620727539, 0.009771007537841797, 0.00960643196105957, 0.009684703826904296, 0.009636863708496094, 0.009652223587036133, 0.009676799774169922, 0.009687040328979492, 0.009724032402038575, 0.009635552406311036, 0.00967404842376709, 0.009758591651916504, 0.009683648109436034, 0.00971395206451416, 0.009691136360168457, 0.009666560173034668, 0.00971571159362793, 0.009660415649414063, 0.009746432304382324, 0.009646080017089843, 0.00965993595123291, 0.009661087989807129, 0.0099585599899292, 0.009898655891418457, 0.009711615562438965, 0.009668095588684082, 0.009603584289550781, 0.00971776008605957, 0.009675040245056152, 0.009707167625427246, 0.009744447708129883, 0.009701472282409668, 0.009771167755126953, 0.009907967567443847, 0.0096943359375, 0.009666751861572266, 0.009380767822265625, 0.009678239822387696, 0.009658975601196289, 0.009659456253051759, 0.00967910385131836, 0.009748191833496093, 0.009747679710388184, 0.009639936447143555, 0.009663519859313964, 0.009664287567138672, 0.009608127593994141, 0.009713664054870605, 0.009693087577819825, 0.009693280220031738, 0.009646080017089843, 0.009758175849914551, 0.00969372844696045, 0.009705599784851074, 0.00966748809814453, 0.009630144119262695, 0.009634559631347656, 0.009849760055541992, 0.009661151885986327, 0.009652576446533202, 0.009643839836120606, 0.009705632209777833, 0.009822303771972657, 0.009626751899719238, 0.009701631546020507, 0.00962598419189453, 0.010499872207641602, 0.01065187168121338, 0.010602496147155761, 0.00977468776702881, 0.009875071525573731, 0.009734272003173828, 0.00969382381439209, 0.00968502426147461, 0.009836095809936524, 0.009880000114440918, 0.009707072257995605, 0.009608927726745606, 0.009659104347229004, 0.009618656158447265, 0.009608063697814941, 0.009668831825256347, 0.009676128387451173, 0.009668959617614745, 0.009646080017089843, 0.00960102367401123, 0.009808128356933593, 0.009631487846374511, 0.009633088111877441, 0.009661120414733887, 0.009710847854614258, 0.009648480415344239, 0.009658464431762695, 0.009682432174682617, 0.009733216285705566, 0.009643808364868164, 0.00983836841583252, 0.009707327842712402, 0.009671008110046387, 0.009339808464050293, 0.009689087867736817, 0.009683039665222168, 0.009686079978942871, 0.009620320320129395, 0.00972390365600586, 0.009641311645507812, 0.009650400161743164, 0.009652576446533202, 0.009584735870361329, 0.009647808074951171, 0.009621824264526367, 0.00964025592803955, 0.009715456008911133, 0.009781184196472168, 0.009738240242004394, 0.009748543739318848, 0.00974841594696045, 0.009907808303833008, 0.009718527793884277, 0.009658271789550782, 0.009716608047485352, 0.009689984321594238, 0.009665535926818849, 0.009728863716125488, 0.009932415962219238, 0.009851424217224122, 0.009796607971191406, 0.009698304176330566, 0.009648127555847168, 0.009687040328979492, 0.009653471946716308, 0.009677599906921387, 0.009705471992492675, 0.00989568042755127, 0.009758048057556153, 0.009705471992492675, 0.009732224464416505, 0.009644736289978027, 0.009754719734191895, 0.009684991836547852, 0.010029024124145507, 0.009733407974243165, 0.009636608123779297, 0.00971132755279541, 0.009652128219604492, 0.009660256385803223, 0.009685279846191407, 0.009656000137329101, 0.009691712379455566, 0.009770976066589356, 0.009717791557312012, 0.009668607711791993, 0.009653951644897461, 0.009744704246520996, 0.009634143829345703, 0.009682368278503418, 0.00965766429901123, 0.0098721923828125, 0.009916095733642579, 0.010150303840637207, 0.009770400047302246, 0.009682815551757812, 0.009576128005981446, 0.00970751953125, 0.009728320121765137, 0.009761759757995606, 0.009677536010742187, 0.009668160438537597, 0.009760736465454101, 0.00967523193359375, 0.009692288398742676, 0.00964083194732666, 0.00982630443572998, 0.009670816421508788, 0.009631263732910155, 0.009699040412902833, 0.009736672401428222, 0.009750656127929688, 0.009727392196655273, 0.009639967918395996, 0.009672767639160156, 0.009708383560180663, 0.009737215995788574, 0.009630208015441894, 0.009680288314819336, 0.009648896217346192, 0.009613247871398925, 0.009686880111694336, 0.009713184356689454, 0.009698304176330566, 0.009755552291870117, 0.009654656410217285, 0.009666303634643554, 0.009738592147827149, 0.00968735980987549, 0.009684096336364746, 0.00969974422454834, 0.009654751777648926, 0.00967632007598877, 0.009752287864685058, 0.009650943756103515, 0.009680895805358887, 0.009619392395019532, 0.009685248374938964, 0.009628767967224122, 0.009648223876953126, 0.009708095550537109, 0.009648192405700683, 0.00962326431274414, 0.00964022445678711, 0.00963913631439209, 0.009677599906921387, 0.01009663963317871, 0.009727999687194825, 0.009832448005676269, 0.009668607711791993, 0.009656160354614259, 0.009715295791625977, 0.00971014404296875, 0.00967302417755127, 0.009873087882995606, 0.009781248092651367, 0.009731648445129394, 0.009678751945495605, 0.009791071891784669, 0.009849472045898438, 0.00992972755432129, 0.009734368324279786, 0.010288064002990722, 0.009733792304992675, 0.00979372787475586, 0.009733375549316406, 0.009751296043395996, 0.010035200119018555, 0.00973020839691162, 0.009705599784851074, 0.009696224212646484, 0.009757535934448242, 0.009741600036621095, 0.00977779197692871, 0.009746784210205079, 0.00973910427093506, 0.009861951828002929, 0.009823488235473634, 0.00965503978729248, 0.009666560173034668, 0.009727999687194825, 0.00965129566192627, 0.009675616264343262, 0.009674240112304687, 0.009691712379455566, 0.009698752403259278, 0.009654911994934082, 0.009662752151489257, 0.009643679618835449, 0.00970787239074707, 0.009786016464233398, 0.009683967590332031, 0.009686271667480468, 0.009766752243041993, 0.009775487899780274, 0.009664159774780273, 0.009680959701538086, 0.009771488189697265, 0.009828672409057618, 0.009675968170166015, 0.009677663803100586, 0.00965993595123291, 0.009656800270080566, 0.009665632247924804, 0.009659168243408203, 0.0096014404296875, 0.00960854434967041, 0.009605504035949708, 0.009711808204650878, 0.009723711967468261, 0.010065312385559083, 0.00968992042541504, 0.009596672058105469, 0.00966374397277832, 0.00961411190032959, 0.009606656074523925, 0.009675104141235352, 0.009574048042297363, 0.009624064445495606, 0.00960524845123291, 0.009597824096679688, 0.00961023998260498, 0.009421216011047364, 0.009784704208374023, 0.009763456344604493, 0.01004748821258545, 0.009707232475280762, 0.009642271995544434, 0.009756159782409669, 0.009632224082946778, 0.009642016410827637, 0.010061823844909668, 0.00959062385559082, 0.009685152053833008, 0.009703136444091797, 0.009652511596679687, 0.009859199523925782, 0.009690912246704101, 0.009656415939331055, 0.00961945629119873, 0.009637887954711915, 0.009623871803283692, 0.009640928268432616, 0.009612000465393067, 0.009613311767578125, 0.009596927642822266, 0.00961235237121582, 0.009595840454101563, 0.009624671936035157, 0.009568511962890625, 0.009722528457641601, 0.009586784362792969, 0.009596832275390625, 0.009676799774169922, 0.009627584457397462, 0.009625663757324219, 0.00960905647277832, 0.009692671775817872, 0.009665184020996094, 0.00970963191986084, 0.009676959991455078, 0.009614751815795899, 0.009982208251953124, 0.009915840148925782, 0.009728704452514648, 0.009660160064697266, 0.009627903938293457, 0.00970137596130371, 0.009646080017089843, 0.009930720329284668, 0.009683135986328126, 0.00968892765045166, 0.009637887954711915, 0.00960102367401123, 0.00963327980041504, 0.009683327674865723, 0.009635968208312988, 0.009647456169128418, 0.009673376083374024, 0.00964025592803955, 0.009752256393432617, 0.009668607711791993, 0.009672703742980958, 0.009646080017089843, 0.00970531177520752, 0.009380448341369628, 0.009729184150695801, 0.009707615852355958, 0.009707551956176758, 0.009677536010742187, 0.009702560424804687, 0.009599648475646972, 0.009625951766967773, 0.009840703964233399, 0.00963587188720703, 0.00974841594696045, 0.00965782356262207, 0.00967305564880371, 0.0096845121383667, 0.009745951652526855, 0.009655424118041992, 0.009714752197265626, 0.009652064323425294, 0.009587519645690918, 0.009599072456359863, 0.009655712127685546, 0.009624159812927247, 0.00960041618347168, 0.009545344352722168, 0.009612256050109864, 0.009605119705200196, 0.009629695892333985, 0.00959228801727295, 0.009553664207458496, 0.009602848052978516, 0.009675135612487793, 0.00959552001953125, 0.009574399948120118, 0.009578304290771484, 0.009641152381896972, 0.009607583999633788, 0.009623135566711426, 0.009624768257141113, 0.00957420825958252, 0.009682944297790527, 0.009794591903686523, 0.009710559844970703, 0.009597023963928223, 0.009582719802856444, 0.009625215530395508, 0.009607616424560548, 0.009588128089904785, 0.009584799766540527, 0.009582719802856444, 0.009702783584594726, 0.00966329574584961, 0.009594623565673827, 0.009864352226257323, 0.010728384017944336, 0.009596351623535157, 0.009574975967407227, 0.009573984146118163, 0.009621919631958008, 0.009598976135253906, 0.009584383964538574, 0.009599231719970703, 0.009612959861755371, 0.010572159767150879, 0.009334912300109863, 0.00972531223297119, 0.009654784202575683, 0.009666144371032715, 0.009703359603881835, 0.009662943840026856, 0.009705471992492675, 0.009758912086486816, 0.009710559844970703, 0.009648032188415527, 0.009720704078674317, 0.009655455589294433, 0.009657247543334961, 0.009694560050964356, 0.009644031524658203, 0.009939519882202149, 0.009871359825134277, 0.009735520362854005, 0.009808992385864258, 0.00973910427093506, 0.009677696228027343, 0.009619199752807617, 0.010004192352294923, 0.009644512176513673, 0.009670047760009766, 0.009637920379638672, 0.009677375793457031, 0.00965552043914795, 0.009620320320129395, 0.009657376289367676, 0.009622431755065919, 0.009655360221862794, 0.009654560089111329, 0.009576543807983399, 0.00970793628692627, 0.009612575531005859, 0.009775360107421876, 0.009703424453735352, 0.009699584007263184, 0.009634400367736816, 0.009629471778869629, 0.009654175758361817, 0.009575776100158691, 0.009642623901367187, 0.009602815628051757, 0.009595264434814453, 0.009809920310974121, 0.00961740779876709, 0.009614656448364258, 0.009646783828735352, 0.00961945629119873, 0.009629471778869629, 0.009692640304565429, 0.009658623695373535, 0.009624064445495606, 0.009590784072875976, 0.009613311767578125, 0.00962559986114502, 0.009729824066162109, 0.009619680404663086, 0.009607040405273437, 0.009592960357666016, 0.009588319778442383, 0.009335776329040528, 0.009822208404541016, 0.009659744262695313, 0.009716352462768555, 0.009672672271728515, 0.009618528366088867, 0.00965452766418457, 0.0096428804397583, 0.009650176048278808, 0.009770175933837891, 0.009646464347839355, 0.00968876838684082, 0.009615967750549317, 0.00962764835357666, 0.009736191749572755, 0.009815423965454102, 0.009763232231140137, 0.00963532829284668, 0.009665216445922852, 0.00967683219909668, 0.00969651222229004, 0.009640128135681152, 0.009808064460754395, 0.009743935585021972, 0.00967353630065918, 0.009694239616394044, 0.009630687713623047, 0.00958291244506836, 0.00967033576965332, 0.009763968467712403, 0.00966540813446045, 0.009633024215698243, 0.009656224250793457, 0.009644895553588868, 0.009691136360168457, 0.009750528335571289, 0.009667936325073243, 0.009695679664611816, 0.009652447700500489, 0.009635456085205078, 0.00962716770172119, 0.009593855857849122, 0.009752415657043456, 0.009681920051574706, 0.009651424407958985, 0.00968172836303711, 0.009662655830383301, 0.009691935539245605, 0.009567808151245118, 0.009683615684509277, 0.00964185619354248, 0.009648032188415527, 0.009653759956359862, 0.009639904022216797, 0.009726719856262207, 0.009750528335571289, 0.010079168319702148, 0.00971452808380127, 0.009762816429138184, 0.009699328422546387, 0.009695232391357422, 0.009984224319458009, 0.009682720184326171]",tokens/s,102.9831951214222,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7346.364416,8048.738304,0.0,7646.216192,7627.584,s,1,13.3150380859375,13.3150380859375,0.0,13.3150380859375,13.3150380859375,13.3150380859375,13.3150380859375,[13.3150380859375],,kWh,0.00017400211561249154,1.918628030314691e-05,5.574587793000241e-05,0.00024893427384564085,,MB,1640.206336,8732.409856,0.0,8315.20768,8191.863296,s,10,3.347322021484375,0.3347322021484375,0.001556680523422081,0.33537840270996094,0.3357929138183594,0.3359058258056641,0.3359961553955078,"[0.3356960754394531, 0.3349137878417969, 0.33501318359375, 0.3321338500976562, 0.3312918701171875, 0.3352266845703125, 0.335767822265625, 0.33553012084960937, 0.33572988891601563, 0.33601873779296876]",tokens/s,764.7904753617831,kWh,9.794899431527078e-06,1.0802078404072887e-06,6.506495945933499e-06,1.7381603217867866e-05,tokens/kWh,14728215.619192032,MB,1645.154304,9046.982656,0.0,8629.78048,8480.067584,s,10,26.841802001953123,2.6841802001953123,0.004277086541023966,2.6841704101562502,2.6892189208984374,2.689655847167969,2.690005388183594,"[2.676899169921875, 2.67719921875, 2.683130126953125, 2.683345458984375, 2.683413818359375, 2.684927001953125, 2.68529443359375, 2.689121826171875, 2.688378173828125, 2.6900927734375]",tokens/s,23.47085340820853,kWh,7.84914445972188e-05,8.657679412901368e-06,5.208691203986565e-05,0.00013923603604998585,tokens/kWh,452469.0718527993,,s,630,26.833035770416267,0.042592120270502,0.00040777624677249474,0.04256488037109375,0.04314727554321289,0.043258581733703616,0.043506920623779304,"[0.041979839324951175, 0.04193862533569336, 0.04200688171386719, 0.04189801788330078, 0.04225843048095703, 0.04190003204345703, 0.04186246490478516, 0.04202089691162109, 0.041920352935791015, 0.04185990524291992, 0.041909408569335935, 0.04188655853271484, 0.041972927093505856, 0.04208108901977539, 0.04220284652709961, 0.042176799774169924, 0.04210019302368164, 0.042039329528808594, 0.04239411163330078, 0.04243046569824219, 0.042385406494140625, 0.04230144119262695, 0.04219062423706055, 0.04204886245727539, 0.0422716178894043, 0.04234649658203125, 0.042264575958251956, 0.04257510375976563, 0.042300159454345704, 0.04242950439453125, 0.04258092880249023, 0.042458560943603514, 0.04245145416259766, 0.04254422378540039, 0.042619873046875, 0.042403839111328126, 0.04258611297607422, 0.042641407012939454, 0.04274288177490235, 0.04248463821411133, 0.04252467346191406, 0.04277657699584961, 0.04271513748168945, 0.0429356803894043, 0.04311286544799805, 0.042890750885009765, 0.042938144683837894, 0.04277139282226562, 0.04274720001220703, 0.042584766387939454, 0.04261004638671875, 0.04280793762207031, 0.04308582305908203, 0.04299980926513672, 0.043052223205566405, 0.04279993438720703, 0.04271084976196289, 0.04289932632446289, 0.04298681640625, 0.04295167922973633, 0.04302438354492188, 0.043225086212158204, 0.043218944549560545, 0.04233830261230469, 0.04193833541870117, 0.04205014419555664, 0.04183635330200195, 0.04186438369750976, 0.04182710266113281, 0.04189795303344727, 0.041742591857910155, 0.04171980667114258, 0.04184473419189453, 0.042090496063232424, 0.04208822250366211, 0.04202313613891601, 0.04237107086181641, 0.042352638244628905, 0.04231167984008789, 0.042526241302490234, 0.04223433685302734, 0.04215193557739258, 0.04211004638671875, 0.04225276947021484, 0.04225606536865235, 0.04223862457275391, 0.04254710388183594, 0.04244908905029297, 0.042239742279052736, 0.042194942474365234, 0.0422413444519043, 0.04228396987915039, 0.04226662445068359, 0.04243046569824219, 0.042420223236083986, 0.042434558868408204, 0.04272742462158203, 0.04263228988647461, 0.042554271697998046, 0.04244883346557617, 0.0427108154296875, 0.04269903945922852, 0.04295475387573242, 0.0429854736328125, 0.04264249420166016, 0.042576831817626955, 0.04261215972900391, 0.042692161560058596, 0.04270332717895508, 0.04267200088500977, 0.04267865753173828, 0.04326607894897461, 0.043159103393554686, 0.04304966354370117, 0.04287065505981445, 0.042778560638427734, 0.04292323303222656, 0.042724319458007816, 0.042616256713867186, 0.042821887969970704, 0.042977535247802734, 0.04300191879272461, 0.043261951446533206, 0.043515903472900394, 0.04313494491577148, 0.04292816162109375, 0.04245100784301758, 0.04212108612060547, 0.04195974349975586, 0.04179190444946289, 0.04225619125366211, 0.04223980712890625, 0.04196799850463867, 0.04196460723876953, 0.04225657653808594, 0.04205644989013672, 0.04207001495361328, 0.04232787322998047, 0.04222355270385742, 0.04224844741821289, 0.042387454986572266, 0.04229324722290039, 0.04220883178710937, 0.042495582580566404, 0.04279177474975586, 0.04236492919921875, 0.04222908782958985, 0.04226934432983399, 0.042305374145507814, 0.04241628646850586, 0.04221667098999023, 0.04228774261474609, 0.04260192108154297, 0.04251849746704101, 0.04242099380493164, 0.04243619155883789, 0.042368671417236325, 0.04255001449584961, 0.04243251037597656, 0.04243865585327149, 0.04252671813964844, 0.04266582489013672, 0.04319427108764649, 0.042991455078125, 0.042819999694824216, 0.04261833572387695, 0.042618976593017575, 0.04265788650512695, 0.04267852783203125, 0.04278486251831055, 0.04295475387573242, 0.043242782592773435, 0.04305379104614258, 0.04289497756958008, 0.04307187271118164, 0.04287075042724609, 0.04269875335693359, 0.042667552947998046, 0.042813953399658204, 0.04334796905517578, 0.04319232177734375, 0.04295987319946289, 0.04267926406860351, 0.04270012664794922, 0.04279571151733398, 0.042997760772705076, 0.04308281707763672, 0.04309670257568359, 0.04345888137817383, 0.043087871551513675, 0.042487808227539066, 0.04213676834106445, 0.04196435165405273, 0.042000385284423826, 0.04186675262451172, 0.042231647491455075, 0.04203955078125, 0.04193072128295899, 0.04184108734130859, 0.041893280029296875, 0.041934497833251955, 0.042116031646728516, 0.04265071868896484, 0.042552223205566404, 0.04224409484863281, 0.042069950103759766, 0.04216019058227539, 0.042172416687011716, 0.04225843048095703, 0.04215398406982422, 0.04220905685424805, 0.04233443069458008, 0.04256358337402344, 0.042585121154785154, 0.04253494262695313, 0.04270995330810547, 0.042708992004394535, 0.042436607360839845, 0.042471424102783206, 0.042657791137695314, 0.04254227066040039, 0.0425109748840332, 0.04260678482055664, 0.04246473693847656, 0.04305964660644531, 0.04288111877441406, 0.042614463806152345, 0.042365249633789064, 0.04230144119262695, 0.04271104049682617, 0.04271308898925781, 0.04297100830078125, 0.04278607940673828, 0.0427856330871582, 0.043165630340576175, 0.04310185623168945, 0.042705150604248045, 0.04282329559326172, 0.042807422637939456, 0.04283184051513672, 0.04310800170898438, 0.043162399291992185, 0.04298886489868164, 0.042979808807373045, 0.04289945602416992, 0.043171199798583984, 0.04317862319946289, 0.04312067031860352, 0.04340118408203125, 0.04302175903320313, 0.0427874870300293, 0.04273936080932617, 0.04195884704589844, 0.042334110260009765, 0.04220755386352539, 0.042052959442138674, 0.04222159957885742, 0.04223654556274414, 0.042273120880126955, 0.04237516784667969, 0.04217641448974609, 0.0421561279296875, 0.042209121704101564, 0.0419780158996582, 0.04213350296020508, 0.042024959564208986, 0.042075553894042966, 0.04225289535522461, 0.042297344207763675, 0.04237516784667969, 0.042237953186035154, 0.04222265625, 0.04234463882446289, 0.0424189453125, 0.04221887969970703, 0.04237910461425781, 0.04237548828125, 0.0422149772644043, 0.04236355209350586, 0.0423897590637207, 0.04262675094604492, 0.04278681564331055, 0.04255881500244141, 0.04257804870605469, 0.042583934783935545, 0.04259664154052734, 0.04249001693725586, 0.042564128875732424, 0.042608638763427735, 0.04257382583618164, 0.042821632385253904, 0.042700416564941404, 0.04254348754882813, 0.04252057647705078, 0.04265158462524414, 0.04253200149536133, 0.04273004913330078, 0.043353439331054684, 0.04299235153198242, 0.04280271911621094, 0.04269929504394531, 0.04287510299682617, 0.04291584014892578, 0.04275814437866211, 0.04299161529541016, 0.04337596893310547, 0.043412128448486326, 0.04326710510253906, 0.042902305603027345, 0.043071136474609376, 0.042885440826416016, 0.0433337287902832, 0.04370854568481446, 0.04301004791259765, 0.04303792190551758, 0.042523296356201175, 0.042229759216308595, 0.04205926513671875, 0.04224822235107422, 0.04213113784790039, 0.042253055572509766, 0.04205686569213867, 0.041963550567626955, 0.042135841369628904, 0.04232777786254883, 0.04219987106323242, 0.04233577728271484, 0.04222003173828125, 0.04237503814697265, 0.042197120666503905, 0.041990142822265625, 0.042004287719726564, 0.04197548675537109, 0.042146305084228515, 0.04221263885498047, 0.04266169738769531, 0.04258671951293945, 0.04247145462036133, 0.042508449554443356, 0.04236710357666015, 0.042420223236083986, 0.042439903259277344, 0.04245552062988281, 0.04239555358886719, 0.04246726226806641, 0.0428098258972168, 0.0426445426940918, 0.042447296142578125, 0.04242278289794922, 0.04266150283813477, 0.0430428466796875, 0.04280969619750977, 0.042590110778808594, 0.04284214401245117, 0.042767742156982425, 0.04265811157226562, 0.04253852844238281, 0.04254966354370117, 0.04290399932861328, 0.04298070526123047, 0.04292873764038086, 0.04302758407592774, 0.04285945510864258, 0.042813438415527344, 0.04272332763671875, 0.04269833755493164, 0.04296745681762695, 0.04310563278198242, 0.04299388885498047, 0.04317542266845703, 0.0432256965637207, 0.043068992614746095, 0.04382748794555664, 0.04330752182006836, 0.04301004791259765, 0.042980384826660153, 0.04292681503295898, 0.043284736633300784, 0.042352672576904296, 0.04193628692626953, 0.041982559204101565, 0.04195094299316406, 0.0419284782409668, 0.042236415863037106, 0.04207206344604492, 0.042112926483154296, 0.04220435333251953, 0.042390430450439456, 0.04217619323730469, 0.04230995178222656, 0.04231958389282227, 0.04250614547729492, 0.04226015853881836, 0.04219887924194336, 0.04217958450317383, 0.04214531326293945, 0.04245331192016601, 0.04223577499389648, 0.04222784042358398, 0.04267136001586914, 0.04242480087280273, 0.04230377578735352, 0.04228710556030273, 0.04219625473022461, 0.04259667205810547, 0.04240963363647461, 0.0423878402709961, 0.042492286682128906, 0.04267212677001953, 0.04293836975097656, 0.042891265869140625, 0.0426695671081543, 0.042609153747558595, 0.04279439926147461, 0.042813343048095705, 0.04251308822631836, 0.04231987380981445, 0.04268646240234375, 0.0429117431640625, 0.04275379180908203, 0.0431514892578125, 0.04296857452392578, 0.04284444808959961, 0.04275439834594726, 0.042702079772949215, 0.0428223991394043, 0.043302913665771485, 0.04330495834350586, 0.04307763290405273, 0.04307331085205078, 0.0427624626159668, 0.042692607879638675, 0.043127872467041015, 0.04317279815673828, 0.04309196853637695, 0.043484928131103516, 0.04329289627075195, 0.04304694366455078, 0.04301004791259765, 0.043225086212158204, 0.043284481048583984, 0.04225843048095703, 0.04221132659912109, 0.042213375091552735, 0.04214156723022461, 0.042081729888916015, 0.04265644836425781, 0.042334144592285156, 0.0420145263671875, 0.042084320068359375, 0.04232825469970703, 0.04211516952514648, 0.04214988708496094, 0.04238278579711914, 0.04236326217651367, 0.0421638069152832, 0.04288572692871094, 0.04273971176147461, 0.04253286361694336, 0.04231167984008789, 0.04207567977905274, 0.04205001449584961, 0.042223617553710936, 0.04252048110961914, 0.0425492172241211, 0.04243059158325195, 0.04226591873168945, 0.04240864181518555, 0.04261273574829102, 0.04245270538330078, 0.04226019287109375, 0.04236140823364258, 0.042678207397460935, 0.043026496887207034, 0.042912960052490234, 0.04254515075683594, 0.04244972610473633, 0.042460704803466795, 0.04280124664306641, 0.04303203201293945, 0.04322921752929688, 0.04317273712158203, 0.042881023406982424, 0.04282572937011719, 0.042665985107421874, 0.04260454559326172, 0.04298956680297852, 0.04306739044189453, 0.04298649597167969, 0.04292095947265625, 0.042807296752929686, 0.04338687896728516, 0.04335411071777344, 0.04308992004394531, 0.04284415817260742, 0.04321414566040039, 0.04325446319580078, 0.042992862701416015, 0.043709217071533205, 0.043448318481445314, 0.04304659271240235, 0.04298748779296875, 0.043254112243652346, 0.043276287078857424, 0.04219027328491211, 0.0418903694152832, 0.042003456115722655, 0.04208127975463867, 0.04218198394775391, 0.04218537521362305, 0.04199590301513672, 0.042574207305908204, 0.04256911849975586, 0.042176799774169924, 0.04202323150634766, 0.04207206344604492, 0.042231807708740236, 0.04261068725585938, 0.04244070434570312, 0.04238240051269531, 0.04291376113891601, 0.042535743713378905, 0.04231568145751953, 0.04224844741821289, 0.04223590469360351, 0.04208844757080078, 0.042487808227539066, 0.04250624084472656, 0.04242393493652344, 0.042399681091308594, 0.042375232696533205, 0.04224822235107422, 0.0426187858581543, 0.04262137603759766, 0.042716320037841794, 0.04308259201049805, 0.04300595092773438, 0.04266393661499023, 0.04256563186645508, 0.04248371124267578, 0.04247552108764648, 0.042932193756103514, 0.04295478439331055, 0.042932193756103514, 0.04289539337158203, 0.04279856109619141, 0.04341404724121094, 0.04317788696289063, 0.04299305725097656, 0.04313772964477539, 0.04292601776123047, 0.04267833709716797, 0.042883071899414066, 0.04301619338989258, 0.04290560150146484, 0.04288838577270508, 0.042833984375, 0.04311724853515625, 0.043036415100097654, 0.0431962890625, 0.04324806213378906, 0.04314726257324219, 0.04339494323730469, 0.04314739227294922, 0.04313087844848633, 0.04312623977661133, 0.04318601608276367, 0.04237516784667969, 0.041885311126708985, 0.04206016159057617, 0.04207001495361328, 0.041990142822265625, 0.042248191833496096, 0.04231078338623047, 0.042188926696777346, 0.0423199348449707, 0.04221747207641602, 0.042330814361572267, 0.04236492919921875, 0.04260992050170898, 0.0426558723449707, 0.04251916885375977, 0.042436607360839845, 0.0423298225402832, 0.042244384765625, 0.04230758285522461, 0.04210441589355469, 0.042014110565185545, 0.04224665451049805, 0.04279142379760742, 0.042534912109375, 0.04225228881835937, 0.04244038391113281, 0.04240374374389649, 0.04244838333129883, 0.04250483322143555, 0.04244012832641601, 0.04246988677978516, 0.042817726135253906, 0.043003681182861325, 0.04306060791015625, 0.042791519165039066, 0.04295721435546875, 0.04371974563598633, 0.04314822387695313, 0.04270809555053711, 0.04278976058959961, 0.04316774368286133, 0.04322099304199219, 0.042923614501953124, 0.04281180953979492, 0.042657665252685543, 0.04264720153808594, 0.04255097579956055, 0.04254390335083008, 0.0427125129699707, 0.04316831970214844, 0.04314521789550781, 0.04302428817749023, 0.04282540893554688, 0.04284419250488281, 0.042969470977783204, 0.043136096954345705, 0.043172767639160156, 0.04311155319213867, 0.043537281036376954, 0.0433070068359375, 0.04322880172729492, 0.04331763076782227, 0.043893856048583986]",tokens/s,23.478521229960215,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,6767.353856,7769.817088,0.0,7367.294976,7351.94368,s,1,12.8867509765625,12.8867509765625,0.0,12.8867509765625,12.8867509765625,12.8867509765625,12.8867509765625,[12.8867509765625],,kWh,0.00016327587415413896,1.8003184592868308e-05,5.1368652206007503e-05,0.00023264771095301477,,MB,1645.051904,8380.08832,0.0,7962.886144,7872.44544,s,10,3.2418509521484373,0.3241850952148438,0.0015105411650276274,0.32444235229492185,0.3255603240966797,0.3261459915161133,0.3266145254516602,"[0.3210010070800781, 0.322678955078125, 0.3233010559082031, 0.32389852905273436, 0.3247224426269531, 0.3267316589355469, 0.32543017578125, 0.3248792419433594, 0.3241622619628906, 0.32504562377929686]",tokens/s,789.6723315744784,kWh,9.446926927554129e-06,1.0417319137976308e-06,6.295587473741944e-06,1.6784246315093702e-05,tokens/kWh,15252397.706400724,MB,1656.262656,8631.74656,0.0,8214.544384,8118.577152,s,10,27.217691650390627,2.7217691650390625,0.005172989513998363,2.721640869140625,2.725898095703125,2.7291671875,2.7317824609374997,"[2.71280712890625, 2.717132080078125, 2.72190673828125, 2.724744873046875, 2.725171630859375, 2.732436279296875, 2.724072021484375, 2.716677978515625, 2.721367919921875, 2.721375]",tokens/s,23.14670943048024,kWh,7.960932990744507e-05,8.779261182958402e-06,5.269984861145714e-05,0.00014108843970186063,tokens/kWh,446528.4337478514,,s,630,27.214150100708,0.04319706365191747,0.0003528519712173712,0.04319479942321777,0.043600189208984376,0.043674482154846185,0.044100844688415526,"[0.04315951919555664, 0.04282080078125, 0.042427009582519534, 0.04227635192871094, 0.042466014862060544, 0.042657791137695314, 0.04270195388793945, 0.042689407348632816, 0.04267327880859375, 0.04271590423583985, 0.04253504180908203, 0.04262297439575195, 0.04268851089477539, 0.04254617691040039, 0.04247180938720703, 0.04258060836791992, 0.04273971176147461, 0.04302204895019531, 0.04285673522949219, 0.04311040115356445, 0.04292124938964844, 0.042996448516845705, 0.04297318267822266, 0.04314028930664063, 0.04326073455810547, 0.043227264404296875, 0.04265167999267578, 0.04281641769409179, 0.04299235153198242, 0.0431126708984375, 0.04299280166625977, 0.04316223907470703, 0.043120384216308594, 0.04311702346801758, 0.043173152923583986, 0.04322172927856445, 0.042962944030761716, 0.043053054809570314, 0.043105953216552736, 0.04313942337036133, 0.04317744064331055, 0.04306179046630859, 0.04338051223754883, 0.04334198379516602, 0.043173694610595705, 0.0432243537902832, 0.04329571151733398, 0.043144512176513675, 0.04309695816040039, 0.043132736206054685, 0.0433172492980957, 0.04331520080566406, 0.04329471969604492, 0.04322086334228516, 0.04317401504516601, 0.04332921600341797, 0.04356143951416016, 0.04369996643066406, 0.04410192108154297, 0.04413420867919922, 0.04343603134155274, 0.04361644744873047, 0.04337558364868164, 0.04331110382080078, 0.04270489501953125, 0.042505599975585937, 0.04272505569458008, 0.04254105758666992, 0.04243734359741211, 0.04252809524536133, 0.042507137298583984, 0.042683902740478515, 0.04318611145019531, 0.04290790557861328, 0.04281107330322265, 0.04273136138916016, 0.04280956649780274, 0.04265584182739258, 0.04275836944580078, 0.04286860656738281, 0.042888702392578124, 0.04299212646484375, 0.04306572723388672, 0.04304076766967774, 0.043286529541015625, 0.04332953643798828, 0.04332134246826172, 0.04306697463989258, 0.04287120056152344, 0.043030529022216796, 0.04366131210327148, 0.04306739044189453, 0.04297727966308594, 0.04288675308227539, 0.042702816009521485, 0.0428807373046875, 0.04300054550170898, 0.04297727966308594, 0.04323942565917969, 0.04313087844848633, 0.04317580795288086, 0.043065311431884766, 0.043222305297851565, 0.043119678497314455, 0.043554622650146486, 0.04320665740966797, 0.043655166625976564, 0.04355276870727539, 0.0434031982421875, 0.043313343048095705, 0.04328396987915039, 0.043159934997558595, 0.04313497543334961, 0.043493377685546876, 0.04352000045776367, 0.0433889274597168, 0.04346038436889649, 0.043490943908691404, 0.04351446533203125, 0.0436157112121582, 0.04374787139892578, 0.04367887878417969, 0.04346966552734375, 0.043473983764648436, 0.04356332778930664, 0.04348582458496094, 0.043355358123779296, 0.04303155136108398, 0.0425571517944336, 0.04234348678588867, 0.04252774429321289, 0.04289945602416992, 0.042616832733154295, 0.04265145492553711, 0.042614849090576175, 0.0426740493774414, 0.042784160614013675, 0.0427402572631836, 0.04272364807128906, 0.04316569519042969, 0.04305017471313476, 0.04274873733520508, 0.04272246551513672, 0.0447672004699707, 0.04298403167724609, 0.04305302429199219, 0.043125057220458986, 0.043055103302001956, 0.04320035171508789, 0.04322694396972656, 0.04313123321533203, 0.04291993713378906, 0.04292099380493164, 0.04296393585205078, 0.04319427108764649, 0.04322313690185547, 0.043030529022216796, 0.04313478469848633, 0.04294412612915039, 0.042892063140869144, 0.04305487823486328, 0.043274177551269534, 0.04329068756103516, 0.04309731292724609, 0.04327651214599609, 0.043162174224853515, 0.043118495941162106, 0.04334406280517578, 0.04334956741333008, 0.04327664184570312, 0.04448988723754883, 0.04360425567626953, 0.043469375610351565, 0.043603649139404295, 0.04361161422729492, 0.043479103088378906, 0.04348912048339844, 0.04337145614624023, 0.0434442253112793, 0.04355686569213867, 0.04350966262817383, 0.04328457641601562, 0.043218944549560545, 0.04345849609375, 0.04339737701416015, 0.04359167861938477, 0.043310016632080076, 0.04336422348022461, 0.0447760009765625, 0.043302913665771485, 0.042823680877685545, 0.042625022888183595, 0.04250419235229492, 0.04252876663208008, 0.04286873626708984, 0.04275804901123047, 0.04278041458129883, 0.0430022087097168, 0.04290556716918945, 0.04286876678466797, 0.04269055938720703, 0.042708545684814456, 0.04276819229125976, 0.0428361930847168, 0.0426907844543457, 0.042973537445068356, 0.04306108856201172, 0.04292937469482422, 0.04312073516845703, 0.04299030303955078, 0.04311036682128906, 0.04330640029907227, 0.043434558868408205, 0.04332137680053711, 0.043165409088134765, 0.04309635162353516, 0.043012096405029294, 0.043065662384033206, 0.043226303100585936, 0.04322150421142578, 0.04335599899291992, 0.043228927612304686, 0.04313087844848633, 0.043035041809082034, 0.043061023712158204, 0.04310374450683594, 0.04329264068603516, 0.043261951446533206, 0.04527180862426758, 0.043256961822509765, 0.04334681701660156, 0.04347475051879883, 0.04348947143554688, 0.0436610221862793, 0.043648990631103515, 0.04371209716796875, 0.043622718811035154, 0.043501983642578124, 0.04349756622314453, 0.043585025787353515, 0.043641246795654294, 0.04389068984985352, 0.04363792037963867, 0.043584159851074215, 0.043518142700195314, 0.04362387084960938, 0.04338336181640625, 0.04348928070068359, 0.04333964920043945, 0.043657344818115236, 0.04377190399169922, 0.04365212631225586, 0.04347856140136719, 0.043055328369140625, 0.042793632507324215, 0.04282102584838867, 0.0429574089050293, 0.043011871337890625, 0.04339529418945313, 0.042917343139648435, 0.04286291122436523, 0.043278560638427735, 0.04265574264526367, 0.04285200119018555, 0.042873184204101564, 0.04296499252319336, 0.04301619338989258, 0.04284723281860352, 0.04288166427612305, 0.04292441558837891, 0.042950496673583985, 0.043058784484863284, 0.043337886810302734, 0.04318864059448242, 0.04310630416870117, 0.04311366271972656, 0.043192127227783206, 0.04297625732421875, 0.04316774368286133, 0.04326822280883789, 0.04304828643798828, 0.04305155181884766, 0.04339244842529297, 0.04289388656616211, 0.042913791656494144, 0.043081729888916016, 0.043118366241455076, 0.04329904174804688, 0.043169792175292966, 0.04337062454223633, 0.04347014236450195, 0.0435918083190918, 0.04365142440795899, 0.04358364868164062, 0.04368729782104492, 0.04358160018920899, 0.04350812911987305, 0.04356915283203125, 0.04394803237915039, 0.04360806274414063, 0.04358758544921875, 0.04346700668334961, 0.043552513122558596, 0.04349708938598633, 0.04341798400878906, 0.04337059020996094, 0.04344412612915039, 0.04331315231323242, 0.0433623046875, 0.04352614212036133, 0.04349542236328125, 0.043579391479492184, 0.04362163162231445, 0.04349580764770508, 0.043581825256347656, 0.043923423767089846, 0.04315929412841797, 0.042705184936523435, 0.04272048187255859, 0.04272739028930664, 0.042780960083007816, 0.04273001480102539, 0.042646526336669925, 0.04270083236694336, 0.042721473693847656, 0.04276508712768555, 0.04293427276611328, 0.042880481719970706, 0.04429059219360352, 0.043111873626708985, 0.04296352005004883, 0.043186176300048826, 0.043547935485839843, 0.04355759811401367, 0.04361811065673828, 0.043337120056152346, 0.043453216552734375, 0.043714561462402345, 0.04357324981689453, 0.04353238296508789, 0.043202465057373046, 0.043186176300048826, 0.043177982330322266, 0.04296908950805664, 0.04354662322998047, 0.04315900802612305, 0.043224769592285155, 0.04325155258178711, 0.04329366302490235, 0.04325759887695312, 0.043358497619628907, 0.043585601806640624, 0.043503551483154296, 0.04351715087890625, 0.04334652709960937, 0.04340505599975586, 0.04349996948242187, 0.0436756477355957, 0.043671550750732424, 0.04366745758056641, 0.0437022705078125, 0.04360806274414063, 0.043837440490722655, 0.043684993743896484, 0.0437072639465332, 0.04355596923828125, 0.043471393585205076, 0.043508064270019534, 0.04371865463256836, 0.04355398559570312, 0.04356998443603516, 0.0435483512878418, 0.043487552642822266, 0.04359372711181641, 0.04347903823852539, 0.04367305755615234, 0.043783935546875, 0.0437911376953125, 0.04349734497070312, 0.04297289657592773, 0.04275651168823242, 0.04283331298828125, 0.04287075042724609, 0.04289804840087891, 0.04285232162475586, 0.04281455993652344, 0.04290041732788086, 0.04279404830932617, 0.043094974517822265, 0.04331087875366211, 0.04300780868530273, 0.04311081695556641, 0.042991329193115234, 0.04292607879638672, 0.04285468673706055, 0.04305059051513672, 0.04297769546508789, 0.04311449432373047, 0.04334905624389648, 0.04324591827392578, 0.04340316772460937, 0.04345708847045898, 0.043292800903320314, 0.04319641494750977, 0.043151039123535156, 0.04318649673461914, 0.04315135955810547, 0.043097118377685546, 0.04298441696166992, 0.04310630416870117, 0.04313670349121094, 0.04315372848510742, 0.04302643203735351, 0.04316159820556641, 0.04336140823364258, 0.04317273712158203, 0.043210750579833986, 0.04328857421875, 0.043243423461914066, 0.04331903839111328, 0.04329084777832031, 0.04324764633178711, 0.043460704803466796, 0.04362406539916992, 0.043646751403808595, 0.04365167999267578, 0.04372480010986328, 0.04336374282836914, 0.04327052688598633, 0.04330889511108398, 0.04360966491699219, 0.04357923126220703, 0.04355171203613281, 0.04389641571044922, 0.043501983642578124, 0.04339507293701172, 0.04337776184082031, 0.04354550552368164, 0.04344153594970703, 0.043375232696533206, 0.04359987258911133, 0.043608158111572266, 0.04308867263793945, 0.042684417724609375, 0.04251238250732422, 0.042608638763427735, 0.04264755249023437, 0.042712894439697266, 0.04288940811157226, 0.042504032135009764, 0.04255350494384766, 0.042790912628173826, 0.04271923065185547, 0.042674175262451174, 0.04258518218994141, 0.04277920150756836, 0.04289308929443359, 0.04285708618164062, 0.04280928039550781, 0.0429793930053711, 0.04310409545898437, 0.043063392639160154, 0.04312268829345703, 0.04314847946166992, 0.043121086120605466, 0.04294620895385742, 0.04332822418212891, 0.0431321907043457, 0.043045600891113284, 0.04303385543823242, 0.043635456085205075, 0.0429031982421875, 0.042909408569335936, 0.04296700668334961, 0.04293904113769531, 0.043184127807617184, 0.04313497543334961, 0.043096065521240234, 0.042921024322509764, 0.04303763198852539, 0.04335753631591797, 0.044098209381103516, 0.04357120132446289, 0.043216896057128903, 0.04338227081298828, 0.043430305480957034, 0.043304798126220706, 0.04333587265014648, 0.04337875366210937, 0.04332134246826172, 0.04333772659301758, 0.043382816314697266, 0.04332249450683594, 0.04311964797973633, 0.043337535858154294, 0.04341974258422852, 0.043394432067871094, 0.04324515151977539, 0.043195327758789065, 0.04347251129150391, 0.04327462387084961, 0.04346886444091797, 0.04352342224121094, 0.04377686309814453, 0.043312896728515626, 0.042902881622314454, 0.04273459243774414, 0.04253839874267578, 0.04268889617919922, 0.042674304962158204, 0.0425513916015625, 0.04270064163208008, 0.04260796737670899, 0.04251526260375976, 0.04299942398071289, 0.042971519470214846, 0.04298940658569336, 0.04308627319335938, 0.042992862701416015, 0.04299622344970703, 0.04286873626708984, 0.04285603332519531, 0.04302275085449219, 0.04307894515991211, 0.04309673690795898, 0.04345862579345703, 0.043409408569335936, 0.04337206268310547, 0.043253280639648437, 0.04305574417114258, 0.04305740737915039, 0.04317190551757812, 0.042974239349365236, 0.04294976043701172, 0.0432342414855957, 0.0429925422668457, 0.042891265869140625, 0.043698238372802733, 0.04316972732543945, 0.043224063873291016, 0.04308889770507812, 0.043503200531005856, 0.0432492790222168, 0.043243358612060544, 0.043582401275634765, 0.04345814514160156, 0.043387294769287106, 0.04349747085571289, 0.043448318481445314, 0.04329849624633789, 0.04346255874633789, 0.04336387252807617, 0.043348575592041014, 0.043329822540283204, 0.04339257431030273, 0.04319916915893555, 0.043346721649169924, 0.043400161743164065, 0.04335411071777344, 0.04348137664794922, 0.04388220977783203, 0.04407523345947266, 0.043519775390625, 0.043490623474121096, 0.043456321716308595, 0.04349017715454102, 0.043603038787841795, 0.04338687896728516, 0.043022335052490236, 0.043003841400146486, 0.04275948715209961, 0.04267910385131836, 0.04281727981567383, 0.042830337524414064, 0.042720958709716796, 0.04269465637207031, 0.042696510314941406, 0.043122753143310544, 0.043106239318847654, 0.042942657470703124, 0.04314051055908203, 0.04302912139892578, 0.042742816925048825, 0.0428515510559082, 0.043028030395507816, 0.043093215942382815, 0.04304991912841797, 0.043202560424804685, 0.043243518829345705, 0.043232383728027346, 0.04313516616821289, 0.04331590270996094, 0.04318320083618164, 0.04310108947753906, 0.04302643203735351, 0.043126686096191406, 0.04294851303100586, 0.04294675064086914, 0.04345167922973633, 0.04333615875244141, 0.04322076797485352, 0.043006431579589846, 0.043300193786621095, 0.043452510833740236, 0.04345455932617188, 0.04330713653564453, 0.04343228912353515, 0.043241504669189454, 0.04320428848266602, 0.04336054229736328, 0.04348108673095703, 0.043568737030029295, 0.04342630386352539, 0.04342998504638672, 0.043473888397216796, 0.0434813117980957, 0.0433397445678711, 0.04318479919433594, 0.04328857421875, 0.043433982849121096, 0.04341862487792969, 0.04338108825683594, 0.04335887908935547, 0.04325785446166992, 0.04317388916015625, 0.043327617645263675, 0.04342771148681641, 0.04352204895019531, 0.04364287948608398, 0.04352000045776367]",tokens/s,23.14972165835191,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2230.239232,2558.394368,0.0,2155.872256,2032.413184,s,1,8.849880859375,8.849880859375,0.0,8.849880859375,8.849880859375,8.849880859375,8.849880859375,[8.849880859375],,kWh,5.069948461662837e-05,5.585193968730092e-06,1.5416401221990395e-05,7.170107980734885e-05,,MB,2298.92096,2835.218432,0.0,2418.016256,2280.154112,s,10,0.9324708175659179,0.0932470817565918,0.00012180643037577638,0.09319584274291992,0.09344761810302735,0.0934663360595703,0.09348131042480468,"[0.09348505401611328, 0.0934434585571289, 0.09312166595458984, 0.09317011260986328, 0.09320662689208985, 0.09316172790527344, 0.09312764739990234, 0.09318505859375, 0.09331346893310546, 0.09325599670410156]",tokens/s,2745.39422765263,kWh,2.769611799213855e-06,3.054406640319133e-07,1.8421555617734332e-06,4.917208025019201e-06,tokens/kWh,52062064.223731995,MB,2313.904128,2919.104512,0.0,2501.902336,2389.055488,s,10,18.272229736328125,1.8272229736328125,0.004028818296486248,1.8277330932617186,1.8326639526367188,1.833317694091797,1.8338406872558595,"[1.8243748779296876, 1.8225260009765625, 1.833971435546875, 1.8325186767578125, 1.82675390625, 1.828748779296875, 1.8228560791015624, 1.8219384765625, 1.8287122802734375, 1.8298292236328124]",tokens/s,34.478550734695446,kWh,5.357079897370606e-05,5.90859924818996e-06,2.7599229102426552e-05,8.707862732432257e-05,tokens/kWh,723484.0733692067,,s,630,18.269282154083232,0.02899886056203691,0.0004212822361993602,0.028892560005187987,0.02935540561676025,0.02950972194671631,0.031019194602966316,"[0.02936832046508789, 0.029171295166015625, 0.029054399490356445, 0.028795007705688477, 0.028957536697387695, 0.028795135498046874, 0.02892982482910156, 0.029145055770874024, 0.0287926082611084, 0.02875004768371582, 0.02897702407836914, 0.029155296325683595, 0.028884416580200196, 0.028773088455200196, 0.029148639678955077, 0.028981632232666015, 0.028879007339477538, 0.0291778564453125, 0.02919219207763672, 0.029105600357055665, 0.029057056427001952, 0.029309152603149414, 0.028952768325805664, 0.02902134323120117, 0.02902524757385254, 0.028884992599487305, 0.029165151596069337, 0.02938307189941406, 0.02897305679321289, 0.029116031646728515, 0.028708959579467775, 0.028876895904541015, 0.02880735969543457, 0.029233024597167968, 0.028894464492797853, 0.028930944442749025, 0.028748832702636718, 0.02879283142089844, 0.028867551803588867, 0.028829504013061523, 0.028913440704345702, 0.029016128540039064, 0.029143392562866213, 0.02887843132019043, 0.02880963134765625, 0.028923904418945313, 0.02866899108886719, 0.028937055587768556, 0.028763776779174806, 0.028978912353515626, 0.028872800827026368, 0.029212736129760743, 0.02878108787536621, 0.028835264205932618, 0.02865011215209961, 0.02874937629699707, 0.028792768478393554, 0.028748287200927734, 0.02863088035583496, 0.028991647720336914, 0.02891484832763672, 0.02900387191772461, 0.029264095306396485, 0.028944128036499022, 0.028813823699951172, 0.030018560409545897, 0.028920095443725587, 0.028942815780639647, 0.028719104766845704, 0.028743648529052736, 0.028823328018188477, 0.02880073547363281, 0.028676191329956056, 0.028713504791259767, 0.02894838333129883, 0.028880895614624022, 0.028794879913330077, 0.030019584655761718, 0.028872703552246092, 0.028712959289550782, 0.02891366386413574, 0.02882537651062012, 0.028716480255126953, 0.02889193534851074, 0.029089599609375, 0.028976768493652345, 0.02889776039123535, 0.029464672088623047, 0.028835840225219726, 0.028692031860351564, 0.028715456008911133, 0.02881452751159668, 0.028833759307861327, 0.028703584671020507, 0.029048831939697265, 0.02959984016418457, 0.0289770565032959, 0.02880246353149414, 0.02860895919799805, 0.02879657554626465, 0.029187936782836914, 0.028945056915283204, 0.02899740791320801, 0.028788896560668947, 0.028921920776367186, 0.028667903900146483, 0.02880512046813965, 0.02883987236022949, 0.02896214485168457, 0.02868502426147461, 0.028803071975708007, 0.028856576919555663, 0.028700416564941406, 0.02870412826538086, 0.028817695617675783, 0.02909388732910156, 0.02907161521911621, 0.028721151351928712, 0.028891231536865233, 0.028891136169433593, 0.029020095825195314, 0.02887481689453125, 0.029402591705322265, 0.02906883239746094, 0.02915020751953125, 0.028827487945556642, 0.02893177604675293, 0.029077856063842774, 0.028998912811279295, 0.02903055953979492, 0.02890390396118164, 0.029096031188964845, 0.029558687210083007, 0.029183263778686522, 0.029227455139160155, 0.029118112564086914, 0.028771039962768554, 0.028871999740600587, 0.029098688125610353, 0.029100032806396486, 0.02880102348327637, 0.028915552139282225, 0.028912864685058593, 0.02882361602783203, 0.028745824813842774, 0.028787744522094726, 0.028992416381835938, 0.02906153678894043, 0.028770240783691406, 0.028815872192382814, 0.028884832382202148, 0.028760223388671874, 0.028853824615478516, 0.029429887771606444, 0.02920854377746582, 0.030882080078125, 0.029161535263061523, 0.028970144271850587, 0.028875616073608397, 0.028844032287597656, 0.0290729923248291, 0.02960220718383789, 0.028845407485961913, 0.028854944229125976, 0.02890547180175781, 0.02896895980834961, 0.029220863342285155, 0.02922256088256836, 0.028811616897583006, 0.02879897689819336, 0.029114368438720704, 0.029411327362060546, 0.029372415542602538, 0.029427711486816405, 0.029243392944335936, 0.028899328231811523, 0.028692415237426758, 0.0289068489074707, 0.029000415802001953, 0.029314111709594727, 0.02911747169494629, 0.02939689636230469, 0.029319007873535155, 0.029501535415649413, 0.029101568222045897, 0.02893881607055664, 0.028923904418945313, 0.030875648498535156, 0.029313024520874024, 0.029527103424072266, 0.029543359756469725, 0.02937228775024414, 0.029507328033447265, 0.029388992309570313, 0.029335775375366212, 0.029251552581787108, 0.029098207473754883, 0.028980192184448243, 0.029369152069091797, 0.028714303970336915, 0.028808895111083983, 0.028828224182128905, 0.028703168869018556, 0.028743423461914063, 0.028907840728759765, 0.02892972755432129, 0.028708351135253905, 0.02879974365234375, 0.028925952911376954, 0.028913919448852538, 0.028732704162597655, 0.028776575088500976, 0.029221216201782228, 0.029005823135375978, 0.028663808822631837, 0.02865679931640625, 0.028695392608642577, 0.028683391571044922, 0.031075199127197264, 0.030453855514526368, 0.028964799880981447, 0.029134016036987304, 0.028990207672119142, 0.028724512100219726, 0.02881920051574707, 0.028786943435668944, 0.029383424758911134, 0.028792352676391603, 0.028785120010375975, 0.028878847122192384, 0.028782495498657225, 0.028760160446166992, 0.029175199508666993, 0.029477760314941405, 0.029121408462524413, 0.028957536697387695, 0.029007871627807616, 0.028872703552246092, 0.02873321533203125, 0.02872326469421387, 0.029081760406494142, 0.02931475257873535, 0.02922224044799805, 0.028925952911376954, 0.029342687606811524, 0.029560831069946288, 0.0295402889251709, 0.029339519500732422, 0.0290633602142334, 0.02930892753601074, 0.02923423957824707, 0.029084287643432617, 0.02959971237182617, 0.02934105682373047, 0.029487743377685546, 0.02909388732910156, 0.029050815582275392, 0.028989503860473633, 0.028903423309326173, 0.028672000885009766, 0.028708864212036132, 0.028638303756713866, 0.02869487953186035, 0.028772928237915038, 0.029394399642944335, 0.029089439392089845, 0.02879199981689453, 0.02875289535522461, 0.029231807708740235, 0.028935359954833983, 0.028822336196899414, 0.028847103118896485, 0.028879264831542968, 0.028750431060791014, 0.028581056594848633, 0.02888377571105957, 0.028657663345336915, 0.028720928192138673, 0.02904412841796875, 0.02895136070251465, 0.028688383102416993, 0.029798015594482422, 0.03125900840759278, 0.02906060791015625, 0.029125120162963865, 0.028820608139038088, 0.028906208038330078, 0.029247648239135744, 0.02913267135620117, 0.028914880752563477, 0.028879648208618165, 0.029058847427368164, 0.0291494083404541, 0.02869900894165039, 0.028815135955810547, 0.028839967727661134, 0.02873936080932617, 0.028690624237060546, 0.028640512466430665, 0.029313568115234376, 0.029092063903808595, 0.029024255752563476, 0.028897056579589842, 0.028804800033569337, 0.028793056488037108, 0.028676671981811522, 0.028706560134887694, 0.02877440071105957, 0.028843360900878905, 0.028758047103881836, 0.0288590087890625, 0.029023391723632812, 0.029540895462036133, 0.029477216720581054, 0.029112287521362305, 0.02958038330078125, 0.029139871597290038, 0.028964704513549804, 0.02889129638671875, 0.028893184661865235, 0.02911552047729492, 0.028953695297241212, 0.02885744094848633, 0.028961471557617188, 0.028790208816528322, 0.02866223907470703, 0.028739200592041016, 0.028789215087890625, 0.028635135650634767, 0.029007007598876953, 0.028646240234375, 0.029425952911376955, 0.03298275375366211, 0.028907743453979493, 0.02888598442077637, 0.029207136154174803, 0.02895689582824707, 0.02889900779724121, 0.028923871994018555, 0.02896931266784668, 0.028807167053222657, 0.028800512313842775, 0.028725376129150392, 0.02899523162841797, 0.029016799926757812, 0.029187360763549806, 0.02962076759338379, 0.029375904083251952, 0.029110847473144533, 0.028864736557006835, 0.029130752563476563, 0.02887424087524414, 0.02891187286376953, 0.02870297622680664, 0.028839103698730467, 0.02890015983581543, 0.02894771194458008, 0.029260799407958983, 0.02920979118347168, 0.028891712188720702, 0.02877644729614258, 0.02879046440124512, 0.02879929542541504, 0.028712480545043946, 0.02872777557373047, 0.02879283142089844, 0.029413375854492187, 0.02890547180175781, 0.02935398483276367, 0.0287455997467041, 0.02882918357849121, 0.02882115173339844, 0.028834783554077148, 0.02897942352294922, 0.029331232070922853, 0.028921344757080077, 0.02888140869140625, 0.02889507293701172, 0.02907935905456543, 0.02908220863342285, 0.02870604705810547, 0.02872159957885742, 0.02874073600769043, 0.02876710319519043, 0.02875596809387207, 0.02884160041809082, 0.028844095230102538, 0.02875596809387207, 0.028672319412231445, 0.02881331253051758, 0.028888416290283204, 0.02879350471496582, 0.02874163246154785, 0.028803199768066407, 0.028685983657836915, 0.02871104049682617, 0.02862908744812012, 0.02914303970336914, 0.028866559982299804, 0.028862016677856445, 0.028856767654418945, 0.028880895614624022, 0.02879283142089844, 0.028827264785766603, 0.028662143707275392, 0.0288721923828125, 0.029932031631469725, 0.028856319427490236, 0.0287825927734375, 0.02895795249938965, 0.0288222713470459, 0.02870854377746582, 0.028717376708984374, 0.02879897689819336, 0.02874163246154785, 0.028681568145751953, 0.029125280380249023, 0.028860416412353516, 0.028839935302734376, 0.028728736877441406, 0.028770912170410157, 0.03156172752380371, 0.02931711959838867, 0.029450016021728517, 0.029200672149658203, 0.028905376434326172, 0.029052959442138673, 0.028981216430664064, 0.02887887954711914, 0.028962591171264648, 0.028834016799926757, 0.02892185592651367, 0.02878998374938965, 0.0291167049407959, 0.029075904846191405, 0.028933280944824218, 0.029049503326416017, 0.0291343994140625, 0.028805824279785158, 0.02875801658630371, 0.028704544067382813, 0.029051136016845704, 0.02885593605041504, 0.02865567970275879, 0.028795103073120117, 0.02870684814453125, 0.028660736083984374, 0.028664640426635742, 0.02996006393432617, 0.028979328155517577, 0.02894419288635254, 0.028960960388183594, 0.028854207992553713, 0.028876863479614257, 0.028807167053222657, 0.02874355125427246, 0.029040767669677735, 0.028669952392578125, 0.028872703552246092, 0.029083648681640626, 0.029083648681640626, 0.028805215835571288, 0.02897091293334961, 0.028800991058349608, 0.02872659111022949, 0.028842048645019533, 0.028871328353881835, 0.02882899284362793, 0.028998239517211914, 0.029065311431884764, 0.029337600708007814, 0.028740991592407228, 0.02877884864807129, 0.02883612823486328, 0.028700672149658202, 0.028882911682128906, 0.02879622459411621, 0.028642015457153322, 0.028979167938232422, 0.028651744842529296, 0.028786495208740236, 0.028676095962524413, 0.02879283142089844, 0.028723039627075196, 0.028694911956787108, 0.031139616012573243, 0.02878054428100586, 0.028696575164794923, 0.02875187110900879, 0.02895871925354004, 0.0287457275390625, 0.028700672149658202, 0.028687744140625, 0.028696575164794923, 0.02882828712463379, 0.028786687850952147, 0.02889321517944336, 0.02883516883850098, 0.02863539123535156, 0.02893008041381836, 0.029460351943969728, 0.029483488082885742, 0.02936819267272949, 0.029077632904052735, 0.02930803108215332, 0.02940438461303711, 0.029029727935791016, 0.029493919372558595, 0.029464031219482423, 0.028997472763061524, 0.028836320877075196, 0.028672000885009766, 0.03077939224243164, 0.02993971252441406, 0.02892736053466797, 0.028781248092651368, 0.02878803253173828, 0.02861510467529297, 0.028879039764404296, 0.02933046340942383, 0.029128768920898437, 0.029190143585205077, 0.02924959945678711, 0.029133663177490235, 0.029009920120239258, 0.028749439239501955, 0.028665983200073242, 0.028760351181030274, 0.028763872146606445, 0.028860671997070313, 0.028614656448364258, 0.028692480087280273, 0.028777568817138673, 0.028629663467407227, 0.028874847412109376, 0.028814943313598632, 0.028875328063964843, 0.028943424224853517, 0.028758975982666017, 0.028710912704467774, 0.028637184143066406, 0.029108224868774416, 0.028827648162841796, 0.02887295913696289, 0.028739328384399413, 0.029240671157836913, 0.028893856048583983, 0.03135052871704101, 0.02956723213195801, 0.02879897689819336, 0.028717056274414062, 0.02887059211730957, 0.028758079528808593, 0.028612607955932616, 0.028622848510742187, 0.02914518356323242, 0.028727199554443358, 0.02870083236694336, 0.028723039627075196, 0.028874528884887694, 0.02899171257019043, 0.02965228843688965, 0.0291212158203125, 0.029263872146606446, 0.029163520812988283, 0.028991552352905275, 0.028999616622924804, 0.029511680603027345, 0.02950262451171875, 0.029457536697387696, 0.029553983688354494, 0.02931974411010742, 0.02925881576538086, 0.029245439529418944, 0.02888547134399414, 0.029003807067871094, 0.02900521659851074, 0.028906463623046875, 0.028921760559082032, 0.029144607543945312, 0.028809696197509765, 0.02894256019592285, 0.029010719299316406, 0.028818368911743164, 0.02887651252746582, 0.028821855545043944, 0.02891881561279297, 0.028893247604370117, 0.028766271591186523, 0.028793312072753905, 0.028800928115844726, 0.028923519134521486, 0.029090656280517577, 0.02902022361755371, 0.028972991943359373, 0.028870655059814454, 0.028717056274414062, 0.028712959289550782, 0.02878873634338379, 0.028819456100463867, 0.02873139190673828, 0.028721151351928712, 0.028771551132202148, 0.028859167098999022, 0.028907520294189453, 0.02901158332824707, 0.02908582305908203, 0.029327072143554688, 0.02881718444824219, 0.028717824935913086, 0.029345792770385744, 0.033331329345703126, 0.029343616485595702, 0.02920857620239258, 0.028917919158935546, 0.028888832092285155, 0.028890304565429688, 0.0287425594329834, 0.02884787178039551, 0.028811519622802734, 0.029251583099365236, 0.02894380760192871, 0.0288690242767334, 0.028825439453125, 0.028780736923217774, 0.028652992248535156, 0.02893062400817871, 0.028833919525146485, 0.02907686424255371, 0.02900752067565918]",tokens/s,34.48411353476155,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1585.860608,1604.190208,0.0,1201.668096,1189.151232,s,1,8.352587890625,8.352587890625,0.0,8.352587890625,8.352587890625,8.352587890625,8.352587890625,[8.352587890625],,kWh,3.722197351251755e-05,4.098652479497224e-06,1.1060564404016815e-05,5.238119039603159e-05,,MB,1704.194048,1801.322496,0.0,1384.12032,1351.367168,s,10,0.4133818244934082,0.04133818244934082,0.00021710415600139905,0.041259599685668946,0.0416843391418457,0.041755705642700196,0.04181279884338379,"[0.04182707214355469, 0.04121747207641602, 0.04109958267211914, 0.04123040008544922, 0.04134012985229492, 0.04129801559448242, 0.04118268966674805, 0.04166847991943359, 0.041229183197021485, 0.041288799285888675]",tokens/s,6192.821861815605,kWh,1.2516321108796287e-06,1.3802899662050067e-07,8.327475797778712e-07,2.222408687278001e-06,tokens/kWh,115190334.46253668,MB,1709.89568,1843.265536,0.0,1426.06336,1407.548416,s,10,10.648607055664064,1.064860705566406,0.003970466598262344,1.0655339965820312,1.0696396484375001,1.069945068359375,1.070189404296875,"[1.0657847900390625, 1.067237060546875, 1.06957177734375, 1.07025048828125, 1.064951904296875, 1.05922802734375, 1.065283203125, 1.0594881591796874, 1.06758251953125, 1.0592291259765625]",tokens/s,59.162667634063844,kWh,3.110098283995666e-05,3.429957937523701e-06,1.577587681822061e-05,5.030681759570097e-05,tokens/kWh,1252315.352291013,,s,630,10.645323112487779,0.016897338273790148,0.00039187813689604384,0.01681591987609863,0.017147257232666014,0.017354007720947265,0.018099379463195803,"[0.01694918441772461, 0.01691103935241699, 0.01697587203979492, 0.01679155158996582, 0.0168222713470459, 0.017001663208007813, 0.016874303817749025, 0.016639392852783205, 0.016701087951660157, 0.016944128036499022, 0.016809247970581056, 0.01697212791442871, 0.017000064849853516, 0.016884416580200196, 0.016748544692993163, 0.017285024642944336, 0.01686947250366211, 0.01700556755065918, 0.017064960479736328, 0.01722368049621582, 0.01706188774108887, 0.017133567810058595, 0.017213024139404298, 0.017219871520996095, 0.01696329689025879, 0.01706435203552246, 0.01698406410217285, 0.017088512420654296, 0.016946207046508788, 0.017021919250488283, 0.016856224060058593, 0.016872255325317383, 0.016778400421142578, 0.016862079620361327, 0.016869375228881836, 0.01683456039428711, 0.01678303909301758, 0.016875423431396485, 0.01681654357910156, 0.016925952911376954, 0.016952064514160155, 0.016850944519042968, 0.01679974365234375, 0.016863231658935548, 0.01683046340942383, 0.01680384063720703, 0.017476768493652345, 0.016858015060424805, 0.016805824279785157, 0.017102848052978514, 0.01699635124206543, 0.016733728408813476, 0.01672444725036621, 0.016776960372924806, 0.016816032409667968, 0.016828767776489256, 0.016819232940673827, 0.01692755126953125, 0.01676713562011719, 0.016762176513671876, 0.01671443176269531, 0.01681817626953125, 0.016857088088989256, 0.016953855514526366, 0.016889440536499024, 0.01685558319091797, 0.017010175704956054, 0.016814592361450196, 0.01681407928466797, 0.01683865547180176, 0.016805824279785157, 0.016730079650878905, 0.017070175170898438, 0.017035263061523438, 0.017014400482177734, 0.016847232818603515, 0.01678281593322754, 0.016781600952148437, 0.01672831916809082, 0.01681203269958496, 0.017106943130493164, 0.016932863235473633, 0.016918495178222658, 0.016846879959106446, 0.016945152282714843, 0.01678745651245117, 0.016932863235473633, 0.01704550361633301, 0.017147775650024413, 0.01692470359802246, 0.016776479721069337, 0.017033376693725587, 0.01715017509460449, 0.01696607971191406, 0.017013792037963868, 0.017066976547241212, 0.01671347236633301, 0.016961280822753905, 0.016870048522949217, 0.016936735153198244, 0.017059904098510742, 0.017285120010375975, 0.017284927368164064, 0.017747648239135744, 0.017371648788452147, 0.017241279602050782, 0.017033792495727538, 0.017410303115844728, 0.016954368591308593, 0.016909055709838867, 0.016946847915649415, 0.016881568908691406, 0.016887872695922852, 0.016732799530029298, 0.01666201591491699, 0.01672857666015625, 0.016651487350463866, 0.016870176315307617, 0.016699199676513673, 0.016806079864501954, 0.016774879455566407, 0.01705603218078613, 0.01677142333984375, 0.016720640182495118, 0.01675152015686035, 0.016914432525634765, 0.017139711380004884, 0.018069280624389648, 0.01692624092102051, 0.01717318344116211, 0.01707375907897949, 0.017002912521362306, 0.016988000869750976, 0.016864479064941405, 0.016738399505615235, 0.016812416076660158, 0.016796031951904298, 0.016906335830688478, 0.016855039596557618, 0.017491968154907226, 0.0216944637298584, 0.017434623718261717, 0.01680303955078125, 0.01683951950073242, 0.016789375305175783, 0.016693311691284178, 0.016626752853393555, 0.016900447845458983, 0.016738912582397462, 0.01683660888671875, 0.016949247360229493, 0.016873472213745116, 0.016861183166503906, 0.016823423385620116, 0.016806720733642578, 0.016822336196899414, 0.01683456039428711, 0.016655967712402343, 0.01727734375, 0.01715772819519043, 0.01675667190551758, 0.016822399139404295, 0.01687318420410156, 0.01692326354980469, 0.016910335540771485, 0.016879039764404295, 0.016736127853393554, 0.016849599838256835, 0.016682336807250977, 0.016732831954956055, 0.016844032287597656, 0.016769792556762694, 0.0167524471282959, 0.016810176849365234, 0.016766016006469726, 0.016658527374267578, 0.01670662307739258, 0.0167542724609375, 0.016715936660766602, 0.01702300834655762, 0.016789728164672852, 0.016664352416992188, 0.016728063583374024, 0.0168407039642334, 0.016797727584838867, 0.01807686424255371, 0.016742176055908203, 0.016670944213867188, 0.01681488037109375, 0.01683456039428711, 0.016726015090942382, 0.01687875175476074, 0.01684566307067871, 0.01679136085510254, 0.016798240661621094, 0.017088159561157227, 0.016703487396240235, 0.0167380485534668, 0.016983392715454102, 0.01670012855529785, 0.01680601692199707, 0.016880992889404298, 0.016926944732666014, 0.01708902359008789, 0.017051647186279297, 0.016924671173095703, 0.01677926445007324, 0.017010528564453124, 0.018108575820922852, 0.016992095947265626, 0.01689958381652832, 0.016953535079956054, 0.017080575942993163, 0.017049472808837892, 0.017248031616210937, 0.01726310348510742, 0.021069343566894532, 0.017454879760742188, 0.017160959243774414, 0.016912384033203123, 0.01736016082763672, 0.0170317440032959, 0.016830623626708983, 0.016999807357788086, 0.016990848541259766, 0.016951295852661134, 0.016876800537109375, 0.016720640182495118, 0.016752384185791017, 0.016662784576416016, 0.016693248748779296, 0.016668352127075195, 0.016611679077148438, 0.016798912048339845, 0.016801664352416992, 0.016677791595458985, 0.01669478416442871, 0.01683286476135254, 0.016883487701416015, 0.016844512939453125, 0.01699087905883789, 0.01676288032531738, 0.016719871520996094, 0.016703487396240235, 0.01669478416442871, 0.016937408447265625, 0.017122751235961915, 0.01720694351196289, 0.01706188774108887, 0.01685807991027832, 0.01677471923828125, 0.0167542724609375, 0.016790111541748046, 0.016898431777954102, 0.01665836715698242, 0.01668239974975586, 0.016634527206420897, 0.016742399215698242, 0.01757699203491211, 0.017550304412841798, 0.017147199630737305, 0.016961824417114257, 0.01683292770385742, 0.01683456039428711, 0.016764928817749023, 0.017092159271240234, 0.017086912155151367, 0.01709244728088379, 0.01693440055847168, 0.016891775131225587, 0.016943296432495116, 0.01752943992614746, 0.017354368209838867, 0.01710323143005371, 0.016895999908447267, 0.01696329689025879, 0.016840927124023436, 0.016961599349975588, 0.017006591796875, 0.016859136581420898, 0.01675811195373535, 0.01680860710144043, 0.016752639770507814, 0.01681817626953125, 0.016813600540161133, 0.016887519836425783, 0.01692710494995117, 0.01672640037536621, 0.016738304138183592, 0.016747840881347655, 0.016665279388427736, 0.0167293758392334, 0.016707679748535157, 0.016792192459106445, 0.016866912841796877, 0.017152416229248048, 0.016997888565063478, 0.01708624076843262, 0.016920703887939453, 0.016814687728881835, 0.01685196876525879, 0.01683113670349121, 0.016716127395629884, 0.016957439422607423, 0.016989280700683593, 0.016733087539672852, 0.016775007247924804, 0.01696169662475586, 0.016768800735473634, 0.016810207366943358, 0.016882944107055663, 0.016767744064331055, 0.016955392837524414, 0.016732255935668947, 0.01664975929260254, 0.016699296951293945, 0.016758144378662108, 0.01666115188598633, 0.016653663635253908, 0.016648767471313476, 0.01664204788208008, 0.017989183425903322, 0.01680793571472168, 0.016906784057617186, 0.01705308723449707, 0.0170579833984375, 0.017017248153686524, 0.016887807846069337, 0.016934207916259766, 0.016898496627807617, 0.01696908760070801, 0.016832735061645506, 0.016781984329223634, 0.016726015090942382, 0.01711520004272461, 0.01678329658508301, 0.016748544692993163, 0.016639232635498047, 0.0167063045501709, 0.01661337661743164, 0.016680959701538087, 0.01668508720397949, 0.016666591644287108, 0.016602815628051756, 0.01666489601135254, 0.01666815948486328, 0.016639776229858398, 0.0167042236328125, 0.01663795280456543, 0.01668035125732422, 0.016724576950073244, 0.016921728134155274, 0.017050464630126952, 0.016850719451904295, 0.01682022476196289, 0.0168853759765625, 0.017037952423095703, 0.016840736389160157, 0.01688368034362793, 0.01668000030517578, 0.016786367416381835, 0.017047040939331053, 0.017027263641357423, 0.016801664352416992, 0.016791488647460936, 0.016748832702636718, 0.016644447326660157, 0.016737375259399414, 0.016677663803100585, 0.016618879318237304, 0.016726655960083006, 0.016815616607666017, 0.016853343963623046, 0.016650400161743163, 0.016732160568237304, 0.01669059181213379, 0.01703500747680664, 0.016749408721923827, 0.018356544494628906, 0.01692678451538086, 0.016886144638061523, 0.01673116874694824, 0.016688095092773438, 0.016748863220214842, 0.017055456161499023, 0.0168240966796875, 0.016988000869750976, 0.016843103408813478, 0.01696767997741699, 0.016900096893310547, 0.016773120880126953, 0.01702521514892578, 0.016926591873168945, 0.016746431350708007, 0.016699392318725585, 0.01676595115661621, 0.016722944259643553, 0.016672224044799806, 0.016695743560791017, 0.016655967712402343, 0.016777055740356445, 0.01667305564880371, 0.016796031951904298, 0.016655616760253907, 0.016718591690063477, 0.016748544692993163, 0.016699392318725585, 0.016709184646606444, 0.01664620780944824, 0.016715583801269532, 0.01677574348449707, 0.016578559875488282, 0.016740352630615234, 0.016678016662597658, 0.01682521629333496, 0.01717452812194824, 0.017243616104125975, 0.016720415115356446, 0.01676825523376465, 0.017250303268432618, 0.01686342430114746, 0.016876096725463866, 0.01683251190185547, 0.016998079299926756, 0.016902463912963867, 0.01684889602661133, 0.01682022476196289, 0.01697711944580078, 0.016792352676391602, 0.016742399215698242, 0.016742399215698242, 0.016608800888061524, 0.016648128509521486, 0.016732383728027343, 0.016832799911499025, 0.01672812843322754, 0.019907840728759764, 0.01756787109375, 0.017449567794799805, 0.016915712356567383, 0.016769792556762694, 0.017058528900146485, 0.01696281623840332, 0.016972223281860353, 0.016854911804199218, 0.018037151336669922, 0.01704140853881836, 0.01731705665588379, 0.016844640731811522, 0.016790496826171876, 0.01667411231994629, 0.016815807342529295, 0.016705631256103515, 0.01674118423461914, 0.016666688919067384, 0.016727743148803712, 0.016732511520385743, 0.016754304885864258, 0.016662431716918946, 0.016663007736206055, 0.016624799728393556, 0.016664640426635742, 0.016621664047241212, 0.016617696762084962, 0.016595424652099608, 0.01658639907836914, 0.01667228889465332, 0.016642879486083985, 0.01671548843383789, 0.016597280502319334, 0.01659926414489746, 0.01670729637145996, 0.01669740867614746, 0.016701440811157226, 0.016756736755371093, 0.01677926445007324, 0.017014463424682616, 0.01686355209350586, 0.0167956485748291, 0.016880704879760743, 0.01675155258178711, 0.01683865547180176, 0.01670297622680664, 0.016892383575439453, 0.016654367446899413, 0.016695232391357423, 0.01668412780761719, 0.016700384140014648, 0.016668672561645507, 0.01662566375732422, 0.01669875144958496, 0.016720512390136718, 0.016668672561645507, 0.01722163200378418, 0.0166748161315918, 0.017057472229003907, 0.016687200546264647, 0.01718508720397949, 0.016646047592163087, 0.01744076728820801, 0.016733247756958006, 0.01666758346557617, 0.017328128814697266, 0.017059839248657227, 0.017109983444213866, 0.01712544059753418, 0.017131359100341796, 0.016887615203857422, 0.016693504333496093, 0.016684864044189455, 0.016654560089111328, 0.016672767639160157, 0.01665433692932129, 0.016699392318725585, 0.016629791259765624, 0.016672063827514648, 0.01662748718261719, 0.01662656021118164, 0.01661520004272461, 0.016584800720214843, 0.016603071212768553, 0.01666908836364746, 0.016639776229858398, 0.01747760009765625, 0.016744224548339844, 0.016726272583007813, 0.01675040054321289, 0.016611520767211913, 0.01660691261291504, 0.016679231643676757, 0.01661734390258789, 0.016735776901245118, 0.016874080657958986, 0.016936960220336913, 0.017303136825561522, 0.01682806396484375, 0.016743167877197266, 0.016726015090942382, 0.016670303344726564, 0.016644512176513672, 0.016871423721313478, 0.01677926445007324, 0.016723968505859374, 0.016643295288085936, 0.016672832489013672, 0.017232608795166016, 0.018875968933105468, 0.019986303329467773, 0.01736460876464844, 0.01716524887084961, 0.017491584777832032, 0.017405759811401366, 0.016873279571533204, 0.016734912872314454, 0.016684768676757812, 0.01679599952697754, 0.016821823120117187, 0.016933311462402345, 0.016943103790283204, 0.017057119369506837, 0.016777599334716797, 0.016777503967285157, 0.017922048568725587, 0.017165855407714845, 0.016822751998901368, 0.01688083267211914, 0.016835391998291014, 0.016857088088989256, 0.016725759506225586, 0.016772384643554687, 0.01672198486328125, 0.01676790428161621, 0.016768960952758788, 0.01672198486328125, 0.01677107238769531, 0.016695295333862305, 0.016739967346191407, 0.01669977569580078, 0.016919584274291993, 0.01683350372314453, 0.016740352630615234, 0.016756736755371093, 0.016707584381103514, 0.01680998420715332, 0.016842752456665038, 0.01665843200683594, 0.016746496200561522, 0.016728063583374024, 0.016680959701538087, 0.016803199768066407, 0.016890304565429688, 0.017178943634033203, 0.01734671974182129, 0.017428224563598632, 0.01695110321044922, 0.016898015975952148, 0.01697983932495117, 0.016873023986816407, 0.016894336700439452, 0.016654720306396486, 0.01676288032531738, 0.01669494438171387, 0.01683024024963379, 0.016640575408935546, 0.01678950309753418, 0.016788576126098635, 0.016874399185180664, 0.016774303436279298, 0.017353567123413086, 0.016713216781616212, 0.01666819190979004, 0.01667375946044922, 0.016686912536621093, 0.01665247917175293, 0.01678441619873047, 0.016739295959472655, 0.017137664794921875, 0.016790592193603515, 0.016885856628417968, 0.016794464111328126, 0.017035263061523438, 0.016713663101196288, 0.016680063247680663, 0.01660176086425781, 0.01670172882080078, 0.01659059143066406, 0.01671763229370117, 0.016550176620483397, 0.016684511184692382, 0.01670623970031738]",tokens/s,59.180918544497814,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,11045.216256,12553.4208,0.0,12150.898688,12116.742656,s,1,16.161283203125,16.161283203125,0.0,16.161283203125,16.161283203125,16.161283203125,16.161283203125,[16.161283203125],,kWh,0.00026584888338748746,2.9312749071534398e-05,8.613034668200625e-05,0.0003812919791410281,,MB,1978.150912,13505.527808,0.0,13088.325632,12892.834304,s,10,5.963045043945312,0.5963045043945313,0.001705738572441738,0.5955810546875,0.5984445556640625,0.5989901916503906,0.5994267004394531,"[0.5950630493164063, 0.5944014282226563, 0.5951193237304687, 0.595307373046875, 0.5967609252929688, 0.5945224609375, 0.5981566162109375, 0.595854736328125, 0.5995358276367188, 0.5983233032226563]",tokens/s,429.3108606649455,kWh,1.7397974058088253e-05,1.918670501034066e-06,1.1484257553411934e-05,3.0800902112534254e-05,tokens/kWh,8311444.874720803,MB,1982.164992,13883.015168,0.0,13465.812992,13237.636096,s,10,40.82478149414062,4.0824781494140625,0.006199130724574727,4.084245239257813,4.087447387695312,4.088314758300782,4.0890086547851565,"[4.067908203125, 4.0751396484375, 4.082970458984375, 4.080665283203125, 4.08552001953125, 4.082532470703125, 4.08918212890625, 4.086503662109375, 4.087254638671875, 4.08710498046875]",tokens/s,15.431803354303824,kWh,0.00011924196311983081,1.31530402459768e-05,7.93970095959875e-05,0.00021179201296179508,tokens/kWh,297461.64229226386,,s,630,40.81509881973267,0.06478587114243281,0.0005429811052452447,0.06481027221679687,0.06550202178955078,0.06565998191833496,0.06583570388793945,"[0.06345500946044921, 0.06436201477050782, 0.06378771209716796, 0.06349619293212891, 0.0634015998840332, 0.06446630096435547, 0.06484684753417969, 0.06407373046875, 0.06388460922241211, 0.06351327896118164, 0.06353228759765625, 0.06384716796875, 0.06404220581054687, 0.06424262237548828, 0.06377990341186524, 0.06370691299438476, 0.06379321670532226, 0.0645252456665039, 0.06446284484863281, 0.0639642562866211, 0.06425692749023437, 0.06425804901123047, 0.06451404571533204, 0.0648089599609375, 0.0647452163696289, 0.06442623901367188, 0.06430105590820312, 0.06452633666992187, 0.06456444549560547, 0.06434690856933593, 0.06439730834960937, 0.06435382080078125, 0.06442851257324218, 0.06467369842529297, 0.06487769317626953, 0.06495331573486328, 0.06487245178222656, 0.06469017791748047, 0.06508665466308594, 0.06486297607421875, 0.06461856079101562, 0.06464717102050781, 0.0652548828125, 0.0651719970703125, 0.06501331329345703, 0.06459347534179688, 0.06501055908203125, 0.06483926391601562, 0.06531231689453125, 0.06496524810791016, 0.06502620697021484, 0.06502816009521484, 0.06514486694335937, 0.06560355377197266, 0.06523085021972656, 0.06514796447753907, 0.06508025360107422, 0.06508748626708985, 0.06505795288085937, 0.06574355316162109, 0.06520374298095703, 0.06483766174316406, 0.06482118225097656, 0.06349974441528321, 0.0644185562133789, 0.06387507247924805, 0.06384806442260742, 0.06429529571533203, 0.06435129547119141, 0.06420780944824218, 0.06401638031005859, 0.06368255996704102, 0.06378700637817383, 0.06412214660644532, 0.06398230361938477, 0.06414540863037109, 0.06424687957763672, 0.06382479858398438, 0.0641638412475586, 0.0642682876586914, 0.06446694183349609, 0.06418425750732422, 0.0643846435546875, 0.06452428436279296, 0.0644919662475586, 0.06454476928710938, 0.06412847900390625, 0.06414755249023438, 0.0644466552734375, 0.06468633270263671, 0.06514688110351563, 0.0647515869140625, 0.06417001342773437, 0.06456729888916016, 0.06470246124267579, 0.06486409759521485, 0.06440771484375, 0.06460825347900391, 0.06501990509033204, 0.0651325454711914, 0.0648622055053711, 0.06458534240722656, 0.06469430541992187, 0.06487206268310547, 0.06499811553955079, 0.06519193267822265, 0.06496614074707031, 0.06498560333251953, 0.06515618896484375, 0.0648221435546875, 0.06485199737548829, 0.06485964965820312, 0.0651146240234375, 0.06533324432373047, 0.0655804443359375, 0.06534921264648437, 0.06510038757324219, 0.06549954986572265, 0.06525949096679687, 0.06528950500488281, 0.06559616088867187, 0.0653803482055664, 0.06531068420410156, 0.06559283447265625, 0.06574671936035156, 0.06551423645019532, 0.06446316528320313, 0.06478233337402343, 0.0642799072265625, 0.06403247833251953, 0.06408201599121094, 0.06466851043701172, 0.06433990478515625, 0.06386870574951171, 0.06353251266479493, 0.06359494400024414, 0.06438291168212891, 0.06428886413574218, 0.06522096252441406, 0.06479462432861328, 0.06410444641113282, 0.06431712341308594, 0.06451436614990234, 0.06500556945800781, 0.06474137878417968, 0.06480076599121094, 0.06452428436279296, 0.06448121643066407, 0.06443218994140625, 0.0639733772277832, 0.06391398239135743, 0.06418022155761718, 0.064499267578125, 0.06469058990478516, 0.06501299285888672, 0.06463158416748047, 0.0647720947265625, 0.06494384002685546, 0.06531228637695312, 0.06518246459960937, 0.0647515869140625, 0.06465129852294922, 0.06533939361572266, 0.06498863983154297, 0.06457596588134766, 0.06455916595458984, 0.06497280120849609, 0.06480646514892578, 0.06494866943359374, 0.06531072235107421, 0.06508953857421874, 0.06498508453369141, 0.06562569427490235, 0.06526608276367188, 0.06507283020019532, 0.06494969940185547, 0.06554502105712891, 0.06540908813476562, 0.06523052978515625, 0.06529440307617188, 0.06545740509033203, 0.06492057800292969, 0.06485606384277344, 0.0652509765625, 0.0653950424194336, 0.06524912261962891, 0.06569999694824219, 0.0662138900756836, 0.06573465728759766, 0.06416694641113281, 0.06463581085205078, 0.06413033294677735, 0.06390044784545898, 0.06406729888916016, 0.06446723175048828, 0.0642023696899414, 0.06393894577026367, 0.06419455718994141, 0.06417203521728515, 0.06402569580078125, 0.06467052459716797, 0.06431922912597657, 0.06405551910400391, 0.06362044906616211, 0.06391817474365234, 0.06445938873291016, 0.06456297302246093, 0.06432588958740235, 0.06489500427246093, 0.0645406723022461, 0.0643683853149414, 0.0649808349609375, 0.06456156921386719, 0.06438706970214844, 0.06436819458007813, 0.06459613037109375, 0.06527529907226562, 0.06449651336669922, 0.06410649871826171, 0.06416793823242188, 0.06477350616455078, 0.06489561462402343, 0.06514006042480469, 0.06501033782958984, 0.06454271697998047, 0.06497280120849609, 0.06518131256103515, 0.0649742431640625, 0.06494876861572266, 0.06477798461914062, 0.06530265808105469, 0.06505020904541016, 0.06514582061767578, 0.06509929656982422, 0.06488521575927735, 0.0650240020751953, 0.06506495666503906, 0.06513459014892578, 0.06548076629638672, 0.06575430297851563, 0.06542534637451172, 0.0651556167602539, 0.06526799774169922, 0.065617919921875, 0.06573260498046875, 0.06537766265869141, 0.064940673828125, 0.06565631866455078, 0.06566758728027344, 0.06528409576416015, 0.06491545867919922, 0.06548070526123047, 0.06400819396972657, 0.06442803192138671, 0.0641430435180664, 0.06388768005371094, 0.06388300704956054, 0.06410800170898437, 0.06428278350830079, 0.06393283081054688, 0.0639101104736328, 0.06443977355957031, 0.06408246612548828, 0.06379843139648438, 0.06436914825439453, 0.06446934509277344, 0.06426732635498048, 0.06401119995117187, 0.06407743835449219, 0.06412940979003906, 0.06498521423339844, 0.06499520111083984, 0.06471878051757812, 0.06463638305664063, 0.06417369842529297, 0.064629150390625, 0.06481158447265625, 0.06458573150634765, 0.06491693115234375, 0.06469232177734376, 0.06456777954101563, 0.06480806732177734, 0.06470658874511719, 0.06487039947509765, 0.06537302398681641, 0.06492160034179688, 0.06515692901611328, 0.06494841766357422, 0.06446080017089843, 0.06502153778076172, 0.06506861114501954, 0.06487741088867187, 0.06521568298339844, 0.06507807922363282, 0.06499100494384766, 0.06465756988525391, 0.06467913818359375, 0.06466441345214843, 0.06521446228027344, 0.06566297912597656, 0.06559107208251953, 0.06569554901123047, 0.06571356964111329, 0.06517616271972657, 0.0657473602294922, 0.06554828643798828, 0.06535782623291016, 0.06551667022705078, 0.06584937286376953, 0.06537059020996094, 0.06523123168945312, 0.06555648040771485, 0.06509686279296875, 0.06517436981201172, 0.06534963226318359, 0.06410963439941406, 0.06501686096191406, 0.0644525146484375, 0.06417606353759765, 0.06407344055175782, 0.06430665588378906, 0.06421923065185547, 0.06399267196655273, 0.06399516677856446, 0.06421750640869141, 0.06400434875488281, 0.06375177764892578, 0.064372802734375, 0.06449366760253906, 0.06399615859985351, 0.06417565155029296, 0.06400646209716797, 0.06432784271240234, 0.06491136169433594, 0.06509523010253906, 0.0649056625366211, 0.06447030639648438, 0.06432579040527343, 0.06466413116455078, 0.06470655822753907, 0.06454476928710938, 0.06437455749511718, 0.06415923309326171, 0.06444310760498047, 0.06458748626708985, 0.06456348419189453, 0.06456320190429687, 0.06466963195800782, 0.065159423828125, 0.06534124755859375, 0.06547840118408203, 0.06520038604736328, 0.06542047882080078, 0.06511289978027343, 0.06481100463867187, 0.06496256256103515, 0.06510562896728515, 0.06492179107666016, 0.06489449310302735, 0.0649836196899414, 0.06495231628417969, 0.06499305725097657, 0.06564649963378906, 0.06554176330566407, 0.06528396606445312, 0.06505328369140626, 0.06545619201660156, 0.0658597412109375, 0.06533939361572266, 0.06521030426025391, 0.06528620910644531, 0.06545817565917969, 0.06525516510009766, 0.06533548736572266, 0.06552780914306641, 0.06514080047607422, 0.06526726531982421, 0.06550777435302735, 0.06467145538330078, 0.06468991851806641, 0.06447977447509766, 0.06393180847167969, 0.06390403366088868, 0.06408009338378906, 0.06452028656005859, 0.06420486450195312, 0.0639507827758789, 0.0639815673828125, 0.06402252960205078, 0.06466764831542969, 0.06451814270019532, 0.06446604919433593, 0.06395379257202148, 0.06395699310302734, 0.06394012832641602, 0.06442851257324218, 0.06451609802246094, 0.06486630249023438, 0.06470451354980469, 0.06462393951416015, 0.06465164947509766, 0.0644181137084961, 0.06431446075439454, 0.06449561309814453, 0.06445516967773438, 0.06450176239013672, 0.06478479766845703, 0.06509503936767579, 0.06465145874023437, 0.06444486236572265, 0.0648656005859375, 0.06491539001464844, 0.0650165786743164, 0.06500556945800781, 0.06524723052978515, 0.0655093765258789, 0.06519123077392579, 0.06488134765625, 0.06524028778076171, 0.06500784301757813, 0.06515155029296875, 0.06548684692382813, 0.06512249755859376, 0.06496441650390625, 0.06514035034179687, 0.06523750305175781, 0.06483340454101562, 0.06530662536621094, 0.06602342224121094, 0.06538240051269531, 0.0655417251586914, 0.06576512145996094, 0.06533776092529296, 0.0655400619506836, 0.06553628540039062, 0.06567731475830078, 0.06592454528808593, 0.0655038070678711, 0.06547046661376953, 0.06553782653808594, 0.06570393371582031, 0.06445951843261719, 0.06456886291503906, 0.06415408325195313, 0.0641638412475586, 0.06445875549316406, 0.06465945434570312, 0.06452838134765625, 0.06415769958496094, 0.06401229095458984, 0.06443417358398437, 0.06459391784667969, 0.0641638412475586, 0.06412879943847656, 0.0638400001525879, 0.06409613037109375, 0.06428118133544922, 0.0647515869140625, 0.06491753387451171, 0.06431475067138671, 0.06451264190673828, 0.06491545867919922, 0.06452019500732421, 0.06449561309814453, 0.06431334686279297, 0.06439116668701172, 0.06497068786621094, 0.06455017852783203, 0.06464320373535157, 0.064283203125, 0.0645199966430664, 0.0646761245727539, 0.06487216186523438, 0.06486249542236328, 0.06440755462646484, 0.0652759017944336, 0.0655250244140625, 0.06538880157470703, 0.06476438140869141, 0.06473932647705079, 0.06508745574951172, 0.06543363189697265, 0.0650096664428711, 0.06462435150146484, 0.06490287780761719, 0.06503807830810547, 0.06500434875488281, 0.0653311996459961, 0.06536192321777344, 0.0651878433227539, 0.06569983673095703, 0.06571385955810546, 0.06577798461914063, 0.06542950439453125, 0.06513394927978515, 0.06549155426025391, 0.06589033508300782, 0.06548255920410156, 0.06529043579101562, 0.06519987487792969, 0.06579634857177734, 0.06593087768554687, 0.06566336059570313, 0.06530048370361329, 0.06374399948120117, 0.06455910491943359, 0.06463692474365235, 0.06463868713378906, 0.06437097930908203, 0.06448947143554687, 0.06401638031005859, 0.06443827056884766, 0.06395904159545898, 0.06409830474853516, 0.06428995513916015, 0.06417494201660157, 0.06409011077880859, 0.06523075103759765, 0.06437693023681641, 0.06385843276977539, 0.06405286407470703, 0.06440409851074219, 0.06496665954589843, 0.06540665435791015, 0.06494649505615234, 0.06441964721679687, 0.06458386993408204, 0.06469420623779297, 0.06452025604248048, 0.06449971008300781, 0.06453218841552734, 0.06481743621826172, 0.06529811096191407, 0.06469379425048828, 0.06429718780517578, 0.06453862762451172, 0.06467763519287109, 0.06512429046630859, 0.0653436508178711, 0.06528467559814453, 0.06511427307128906, 0.06529843139648438, 0.06501580810546875, 0.06504646301269532, 0.06485612487792969, 0.0654028778076172, 0.06512025451660156, 0.0651325454711914, 0.0650096664428711, 0.06503369903564453, 0.06553577423095704, 0.06521663665771485, 0.06495219421386719, 0.06545843505859375, 0.06548326110839844, 0.0655645751953125, 0.06531491088867188, 0.06577561950683594, 0.06543309020996094, 0.0652715835571289, 0.06509391784667969, 0.06548652648925782, 0.06556076812744141, 0.0653399658203125, 0.06525542449951172, 0.06540083312988282, 0.06564774322509766, 0.06395084762573242, 0.06467151641845703, 0.06417021179199218, 0.0638218231201172, 0.06402662658691406, 0.0646983642578125, 0.06420684814453125, 0.06413926696777343, 0.06416998291015626, 0.06398511886596679, 0.06419222259521484, 0.06469446563720703, 0.06447740936279298, 0.06432953643798828, 0.06379292678833008, 0.06401238250732422, 0.06453936004638672, 0.06487245178222656, 0.06458118438720703, 0.06489542388916016, 0.06507315063476563, 0.06492332458496093, 0.06459347534179688, 0.06462089538574219, 0.06458185577392578, 0.06463507080078125, 0.06503756713867187, 0.06466441345214843, 0.06459327697753907, 0.06466000366210937, 0.06453453063964844, 0.06495846557617188, 0.06487449645996093, 0.06479666900634766, 0.06489497375488282, 0.0651325454711914, 0.06535167694091797, 0.06497599792480468, 0.065162109375, 0.06504857635498047, 0.06517145538330078, 0.06510797119140625, 0.06532505798339844, 0.065185791015625, 0.06504608154296875, 0.06472716522216797, 0.06476774597167968, 0.06480953979492188, 0.06577561950683594, 0.06543974304199218, 0.06580223846435547, 0.06532915496826172, 0.06568950653076172, 0.06567657470703125, 0.06545491027832032, 0.06564800262451172, 0.06550182342529297, 0.06569535827636719, 0.06555391693115234, 0.06530137634277344, 0.06534553527832031, 0.06535897827148437, 0.06565094757080078]",tokens/s,15.435464282042043,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1587.03616,1604.190208,0.0,1201.668096,1189.151232,s,1,8.3159912109375,8.3159912109375,0.0,8.3159912109375,8.3159912109375,8.3159912109375,8.3159912109375,[8.3159912109375],,kWh,3.634801674583438e-05,4.001643686964657e-06,1.1978065137988048e-05,5.232772557078708e-05,,MB,1640.620032,1801.322496,0.0,1384.12032,1351.367168,s,10,0.4135705299377441,0.041357052993774415,0.00017739890895268108,0.041310447692871094,0.04144774856567383,0.041655554962158205,0.0418218000793457,"[0.041863361358642576, 0.04124623870849609, 0.0412476806640625, 0.041315967559814454, 0.04140156936645508, 0.041304927825927734, 0.04135299301147461, 0.04129644775390625, 0.041203166961669924, 0.04133817672729492]",tokens/s,6189.996178850953,kWh,1.2652971018940093e-06,1.3953843145637994e-07,8.379287607705967e-07,2.2427642941209858e-06,tokens/kWh,114144852.7030055,MB,1645.121536,1843.265536,0.0,1426.06336,1407.548416,s,10,10.62936572265625,1.0629365722656252,0.004183777323095977,1.062955078125,1.06710595703125,1.0691208496093751,1.070732763671875,"[1.065916748046875, 1.0711357421875, 1.059759765625, 1.066658203125, 1.0625648193359376, 1.0633453369140624, 1.05744677734375, 1.056900146484375, 1.06080908203125, 1.0648291015625]",tokens/s,59.26976420212631,kWh,4.184492875435695e-05,4.615108470122712e-06,1.935508739362835e-05,6.581512461810802e-05,tokens/kWh,957226.782833843,,s,630,10.626221904754635,0.016867018896435933,0.00024727151265993124,0.016802687644958497,0.017083324241638182,0.01725263395309448,0.0180522176361084,"[0.016684864044189455, 0.016785600662231445, 0.01675468826293945, 0.01678335952758789, 0.016697343826293946, 0.017145856857299805, 0.017501760482788085, 0.01744895935058594, 0.01849795150756836, 0.01746895980834961, 0.017252864837646483, 0.01710041618347168, 0.01700217628479004, 0.0169902400970459, 0.016926399230957033, 0.016825471878051758, 0.016753536224365234, 0.01674710464477539, 0.01670591926574707, 0.01678335952758789, 0.017311744689941407, 0.01702707290649414, 0.017151168823242188, 0.01674505615234375, 0.016761056900024413, 0.0167521915435791, 0.01686582374572754, 0.016693151473999024, 0.0168441276550293, 0.017377952575683593, 0.016834592819213866, 0.016914112091064453, 0.016793888092041017, 0.01681612777709961, 0.01696767997741699, 0.01683772850036621, 0.016892831802368165, 0.0167956485748291, 0.016756736755371093, 0.01681612777709961, 0.016766176223754883, 0.01679420852661133, 0.016726144790649412, 0.016775327682495116, 0.016891807556152345, 0.01680179214477539, 0.016803295135498046, 0.016855712890625, 0.01679759979248047, 0.01695676803588867, 0.016792383193969727, 0.016867136001586912, 0.016770912170410157, 0.016808256149291993, 0.016770624160766603, 0.016830368041992186, 0.016868928909301757, 0.016841312408447266, 0.01680099105834961, 0.01677824020385742, 0.016932863235473633, 0.01679158401489258, 0.016858335494995116, 0.016876287460327148, 0.01693414306640625, 0.01747430419921875, 0.01680998420715332, 0.016717824935913086, 0.016776512145996094, 0.017226560592651367, 0.017315263748168944, 0.016796096801757813, 0.01677248001098633, 0.017029760360717773, 0.01692982482910156, 0.016832992553710936, 0.016848800659179687, 0.016904800415039063, 0.01676697540283203, 0.016748544692993163, 0.016848608016967772, 0.016865568161010744, 0.016934911727905275, 0.016959007263183595, 0.016782848358154297, 0.01680240058898926, 0.01711756706237793, 0.01698329544067383, 0.01705174446105957, 0.017164064407348634, 0.016776063919067382, 0.017035135269165037, 0.016860607147216797, 0.01696224021911621, 0.01689097595214844, 0.017046272277832033, 0.017038623809814454, 0.01685798454284668, 0.01682569694519043, 0.01681167984008789, 0.016884735107421875, 0.016897119522094727, 0.01675152015686035, 0.01676697540283203, 0.01684432029724121, 0.016787519454956056, 0.01683260726928711, 0.01685273551940918, 0.01720582389831543, 0.01730143928527832, 0.017406015396118163, 0.017268735885620116, 0.017071168899536134, 0.017013696670532225, 0.01703910446166992, 0.016962879180908202, 0.01713862419128418, 0.016873472213745116, 0.016776384353637694, 0.01739788818359375, 0.01801491165161133, 0.017072128295898437, 0.01693401527404785, 0.01684979248046875, 0.018550111770629884, 0.017031583786010742, 0.016734207153320312, 0.016859136581420898, 0.016921600341796874, 0.016763904571533202, 0.01667625617980957, 0.016752511978149413, 0.016750656127929687, 0.016699296951293945, 0.016917247772216797, 0.01673539161682129, 0.016716800689697265, 0.016745376586914062, 0.016772031784057617, 0.016709632873535156, 0.016796800613403322, 0.016640895843505858, 0.01665951919555664, 0.016657119750976564, 0.016795072555541992, 0.016712480545043946, 0.016676864624023437, 0.016912351608276366, 0.016842815399169923, 0.016805856704711915, 0.016752639770507814, 0.016774175643920898, 0.016718624114990234, 0.01669340705871582, 0.01681001663208008, 0.01680384063720703, 0.016822559356689453, 0.01674166488647461, 0.01685139274597168, 0.01674835205078125, 0.016744640350341795, 0.016738016128540038, 0.01686895942687988, 0.016792255401611327, 0.016728063583374024, 0.0167587833404541, 0.01677107238769531, 0.016930816650390625, 0.016804927825927733, 0.016810943603515625, 0.01684480094909668, 0.01682022476196289, 0.016965215682983398, 0.01713961601257324, 0.016961183547973633, 0.016925535202026366, 0.01700399971008301, 0.017185312271118164, 0.017180416107177736, 0.016976127624511717, 0.016868768692016603, 0.01683011245727539, 0.016919200897216796, 0.01686966323852539, 0.01679769515991211, 0.016721920013427736, 0.0168175048828125, 0.016828672409057617, 0.016951711654663085, 0.01681875228881836, 0.016835903167724608, 0.017015487670898437, 0.016784383773803712, 0.016888832092285155, 0.01679974365234375, 0.01678108787536621, 0.016750816345214845, 0.017031167984008787, 0.016875520706176757, 0.016767040252685547, 0.016877504348754884, 0.016811904907226564, 0.01698409652709961, 0.01682032012939453, 0.016959232330322267, 0.016799999237060548, 0.016750303268432618, 0.01686147117614746, 0.01717635154724121, 0.016862880706787108, 0.018469024658203125, 0.017827583312988282, 0.01684147262573242, 0.01676483154296875, 0.016764928817749023, 0.016719871520996094, 0.01703321647644043, 0.016752639770507814, 0.016805055618286133, 0.01674118423461914, 0.01683216094970703, 0.016786943435668944, 0.01687843132019043, 0.01684480094909668, 0.016887807846069337, 0.01679155158996582, 0.016846847534179688, 0.01680588722229004, 0.016952352523803713, 0.017015167236328125, 0.017224288940429686, 0.017360895156860352, 0.01719500732421875, 0.016987968444824218, 0.01696767997741699, 0.016806079864501954, 0.016840320587158203, 0.01698240089416504, 0.016867328643798828, 0.01677471923828125, 0.016744895935058592, 0.016732160568237304, 0.01679769515991211, 0.0167314567565918, 0.01674720001220703, 0.01699635124206543, 0.016856800079345702, 0.016916767120361328, 0.01700044822692871, 0.016961536407470702, 0.01721548843383789, 0.016893951416015626, 0.017100799560546876, 0.017115135192871094, 0.016885759353637696, 0.016819456100463866, 0.016818944931030273, 0.01718272018432617, 0.017164287567138673, 0.01713324737548828, 0.017123071670532227, 0.016831039428710937, 0.016746496200561522, 0.016728063583374024, 0.0167969913482666, 0.01699705505371094, 0.016859136581420898, 0.016777215957641603, 0.016727680206298827, 0.016783744812011718, 0.016736255645751954, 0.016815391540527344, 0.016804576873779297, 0.0167890567779541, 0.016751039505004884, 0.016756736755371093, 0.01670262336730957, 0.016796512603759764, 0.016735776901245118, 0.01676278305053711, 0.016673343658447266, 0.016965503692626952, 0.01681011199951172, 0.01684889602661133, 0.01681407928466797, 0.016943328857421874, 0.01696476745605469, 0.01676310348510742, 0.016700992584228514, 0.016794368743896483, 0.016715871810913087, 0.01680179214477539, 0.016774303436279298, 0.01678998374938965, 0.016771039962768554, 0.01681654357910156, 0.016871423721313478, 0.016846847534179688, 0.01676697540283203, 0.01683865547180176, 0.01679155158996582, 0.016965631484985352, 0.017001663208007813, 0.016869823455810548, 0.016736448287963866, 0.016754079818725585, 0.01692956733703613, 0.016928703308105468, 0.016862720489501954, 0.016943679809570313, 0.017043455123901367, 0.016990207672119142, 0.016977920532226562, 0.016988256454467773, 0.016814048767089845, 0.01697110366821289, 0.016883039474487306, 0.016769088745117188, 0.01675971221923828, 0.016690431594848634, 0.01671014404296875, 0.01675270462036133, 0.018353439331054686, 0.016872352600097656, 0.01678982353210449, 0.016725696563720704, 0.016731712341308595, 0.016675264358520507, 0.01687331199645996, 0.016780799865722656, 0.01679417610168457, 0.01666396713256836, 0.016704191207885744, 0.016670719146728515, 0.01681350326538086, 0.016831039428710937, 0.01678102493286133, 0.01666486358642578, 0.016713727951049806, 0.016695167541503905, 0.01680601692199707, 0.01742848014831543, 0.0168154239654541, 0.016775232315063476, 0.016822912216186522, 0.016697343826293946, 0.01718675231933594, 0.01699184036254883, 0.01674083137512207, 0.01681135940551758, 0.016685728073120118, 0.018067455291748045, 0.01678950309753418, 0.017086463928222655, 0.016687103271484375, 0.016730304718017577, 0.016742080688476563, 0.016656639099121094, 0.016768863677978516, 0.01702412796020508, 0.016714048385620118, 0.016699199676513673, 0.016751264572143553, 0.01726678466796875, 0.017030176162719728, 0.01701785659790039, 0.016920576095581053, 0.01743449592590332, 0.016765056610107423, 0.016743616104125978, 0.01708297538757324, 0.016756959915161133, 0.01683456039428711, 0.01683251190185547, 0.016850944519042968, 0.016803199768066407, 0.016797344207763673, 0.0168353271484375, 0.017010719299316406, 0.016857248306274414, 0.016769184112548827, 0.016725055694580077, 0.016706207275390624, 0.01670547294616699, 0.016781312942504883, 0.016819776535034178, 0.016724416732788086, 0.016794912338256834, 0.016782047271728516, 0.0167096004486084, 0.016795679092407225, 0.01690380859375, 0.016738496780395507, 0.016789695739746095, 0.016719423294067382, 0.016685440063476564, 0.017223392486572266, 0.01679759979248047, 0.016778976440429687, 0.016808671951293944, 0.01681612777709961, 0.016889120101928713, 0.016926591873168945, 0.016759647369384765, 0.016901760101318358, 0.01676736068725586, 0.016680959701538087, 0.01681808090209961, 0.01682236862182617, 0.01715376091003418, 0.01680182456970215, 0.016689151763916017, 0.016697343826293946, 0.016661056518554686, 0.01671340751647949, 0.01666771125793457, 0.016861343383789064, 0.016671520233154297, 0.016989856719970702, 0.016693599700927736, 0.01683865547180176, 0.016750591278076172, 0.016738304138183592, 0.01680188751220703, 0.016852895736694337, 0.0167871036529541, 0.016689344406127928, 0.01675894355773926, 0.016853120803833006, 0.016789375305175783, 0.01677516746520996, 0.016719711303710937, 0.01670979118347168, 0.016732160568237304, 0.01671603202819824, 0.016710399627685547, 0.0166997127532959, 0.01666486358642578, 0.01665011215209961, 0.01664463996887207, 0.016728063583374024, 0.016807903289794923, 0.01667068862915039, 0.01675267219543457, 0.016660703659057616, 0.016624992370605468, 0.016671167373657227, 0.016717824935913086, 0.016730112075805666, 0.01665433692932129, 0.01662931251525879, 0.016722368240356444, 0.016652288436889647, 0.016728063583374024, 0.016683231353759764, 0.016813407897949217, 0.016751039505004884, 0.01665023994445801, 0.016711679458618164, 0.01671139144897461, 0.01666227149963379, 0.016734752655029297, 0.016701440811157226, 0.0166297607421875, 0.016672704696655275, 0.01663609504699707, 0.016790912628173827, 0.01671014404296875, 0.01679974365234375, 0.016608383178710936, 0.01663065528869629, 0.016719520568847655, 0.016695327758789062, 0.016676191329956055, 0.016722719192504884, 0.016731487274169923, 0.01684115219116211, 0.016797536849975585, 0.017044031143188476, 0.01677916717529297, 0.016845983505249025, 0.016790687561035158, 0.016711679458618164, 0.016776992797851564, 0.01687936019897461, 0.016817695617675783, 0.01714044761657715, 0.016741472244262694, 0.016733087539672852, 0.016678911209106445, 0.01679769515991211, 0.01682636833190918, 0.016797088623046876, 0.016804447174072267, 0.01685091209411621, 0.017209152221679687, 0.01701411247253418, 0.01701356887817383, 0.016922687530517577, 0.01698374366760254, 0.01692038345336914, 0.016860736846923827, 0.016802751541137695, 0.01681942367553711, 0.016779232025146484, 0.016786527633666993, 0.016710079193115235, 0.01676969528198242, 0.01672515106201172, 0.016793952941894532, 0.016795967102050783, 0.01680998420715332, 0.01677017593383789, 0.016823135375976562, 0.01673423957824707, 0.01677027130126953, 0.016717695236206056, 0.016675392150878907, 0.016722335815429687, 0.016713119506835936, 0.017016735076904297, 0.01914124870300293, 0.01721958351135254, 0.016964832305908204, 0.016827007293701172, 0.016799232482910157, 0.016734880447387697, 0.01670479965209961, 0.01671446418762207, 0.016715103149414063, 0.01682633590698242, 0.016734912872314454, 0.016746496200561522, 0.016752639770507814, 0.01679155158996582, 0.01687868881225586, 0.01693996810913086, 0.016891263961791993, 0.01692323112487793, 0.016840288162231445, 0.016697759628295897, 0.016949247360229493, 0.016854496002197267, 0.01683523178100586, 0.016801536560058592, 0.017168512344360353, 0.016868799209594727, 0.01674435234069824, 0.016917152404785155, 0.016891904830932617, 0.016764928817749023, 0.01674998474121094, 0.016804447174072267, 0.016644096374511717, 0.01665433692932129, 0.016645151138305662, 0.01662870407104492, 0.01662745666503906, 0.016633600234985352, 0.016690752029418946, 0.01669215965270996, 0.01665017509460449, 0.01694451141357422, 0.01686783981323242, 0.016846111297607422, 0.016835487365722657, 0.016909471511840822, 0.016715423583984375, 0.016666976928710938, 0.016639999389648438, 0.016715551376342775, 0.01668070411682129, 0.01664828872680664, 0.01668953514099121, 0.016705184936523437, 0.016699743270874024, 0.016828256607055662, 0.017410207748413085, 0.017988639831542967, 0.018238431930541994, 0.01696767997741699, 0.016902143478393555, 0.01700655937194824, 0.017256479263305664, 0.016711679458618164, 0.016646047592163087, 0.016815744400024413, 0.016804319381713867, 0.016671743392944336, 0.016755680084228515, 0.016745920181274413, 0.01706847953796387, 0.016949407577514647, 0.016887807846069337, 0.016715423583984375, 0.016685216903686524, 0.016736064910888672, 0.016692928314208984, 0.016921279907226562, 0.01677926445007324, 0.016770240783691406, 0.016802623748779298, 0.01685683250427246, 0.016795904159545898, 0.01678745651245117, 0.016766496658325195, 0.01678531265258789, 0.016715551376342775, 0.01675071907043457, 0.016734176635742188, 0.016673471450805662, 0.016777215957641603, 0.016707456588745118, 0.016693376541137697, 0.016662527084350585, 0.01665996742248535, 0.016716287612915038, 0.016842752456665038, 0.01700864028930664, 0.017297407150268555, 0.017299455642700197, 0.01724825668334961, 0.017252351760864256, 0.017380640029907225, 0.01731043243408203, 0.017034847259521483, 0.01689846420288086, 0.016837791442871095, 0.016956256866455077, 0.016815839767456056]",tokens/s,59.28729944159272,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,6770.151424,7769.817088,0.0,7367.294976,7351.94368,s,1,12.8343544921875,12.8343544921875,0.0,12.8343544921875,12.8343544921875,12.8343544921875,12.8343544921875,[12.8343544921875],,kWh,0.00016262083862498002,1.7930771709125052e-05,5.188948595599707e-05,0.00023244109629010215,,MB,1428.455424,8411.5456,0.0,7994.343424,7863.794176,s,10,3.007503631591797,0.3007503631591797,0.0007778213261312478,0.3005518493652344,0.3018658843994141,0.30186633758544923,0.30186670013427735,"[0.2992444152832031, 0.30049298095703125, 0.3002258605957031, 0.3003512878417969, 0.3006107177734375, 0.30155010986328123, 0.30186578369140626, 0.30041238403320314, 0.30088330078125, 0.3018667907714844]",tokens/s,851.2042921940066,kWh,8.817051880515135e-06,9.717589426134103e-07,5.867798811882332e-06,1.5656609635010876e-05,tokens/kWh,16350921.813081415,MB,1449.08288,8684.17536,0.0,8266.973184,8120.408064,s,10,24.71263427734375,2.471263427734375,0.00292096157586033,2.471947998046875,2.4749196044921873,2.475168664550781,2.475367912597656,"[2.46732470703125, 2.467976318359375, 2.466483642578125, 2.47139501953125, 2.47119091796875, 2.472824951171875, 2.4725009765625, 2.47265576171875, 2.475417724609375, 2.4748642578125]",tokens/s,25.49303295349523,kWh,7.265530000698256e-05,8.01449383128984e-06,4.825141115011931e-05,0.00012892120498839176,tokens/kWh,488670.57987607707,,s,630,24.7095817222595,0.03922155828930083,0.00036437899605466735,0.03920817565917969,0.039703884506225584,0.039806880950927735,0.04005883972167969,"[0.03881321716308594, 0.038777313232421874, 0.038645759582519534, 0.03865804672241211, 0.039124256134033204, 0.038887744903564454, 0.03889129638671875, 0.038855072021484374, 0.038717121124267576, 0.03856643295288086, 0.03865599822998047, 0.03866828918457031, 0.038801406860351564, 0.03881788635253906, 0.03878051376342773, 0.03885830307006836, 0.038855422973632814, 0.038795265197753906, 0.03900620651245117, 0.03888332748413086, 0.038973438262939454, 0.03908403015136719, 0.03906054306030273, 0.0388105583190918, 0.03889152145385742, 0.038948863983154294, 0.038885505676269534, 0.03916905593872071, 0.039105377197265624, 0.03909632110595703, 0.03892633438110352, 0.03954051208496094, 0.03961468887329102, 0.03925382232666016, 0.03911648178100586, 0.03908659362792969, 0.039200736999511716, 0.03924153518676758, 0.03912931060791015, 0.039019519805908204, 0.039169025421142575, 0.03928448104858399, 0.03921913528442383, 0.03920518493652344, 0.03921049499511719, 0.03922751998901367, 0.03908563232421875, 0.03937472152709961, 0.039609279632568356, 0.03954278564453125, 0.03947520065307617, 0.03953049468994141, 0.039710430145263674, 0.03959017562866211, 0.03953782272338867, 0.03971772766113281, 0.03973904037475586, 0.03973500823974609, 0.03999756622314453, 0.03967574310302734, 0.03985062408447266, 0.03976217651367187, 0.03954051208496094, 0.039265758514404295, 0.038625823974609376, 0.03847091293334961, 0.038494976043701175, 0.03858454513549805, 0.038758174896240234, 0.038752254486083985, 0.03869286346435547, 0.03901993560791016, 0.038998622894287106, 0.038828033447265625, 0.03895203018188476, 0.038679454803466795, 0.03858160018920898, 0.038719264984130856, 0.03872608184814453, 0.0388935661315918, 0.03888140869140625, 0.038830398559570316, 0.03901801681518555, 0.038943199157714846, 0.03896086502075195, 0.038786945343017576, 0.03882640075683594, 0.039624702453613284, 0.03950982284545899, 0.039317249298095706, 0.03875680160522461, 0.03870515060424805, 0.039051265716552735, 0.03899596786499023, 0.03909427261352539, 0.03927164840698242, 0.039524864196777344, 0.039465248107910154, 0.039290206909179684, 0.03930755233764648, 0.03924825668334961, 0.039165824890136716, 0.03937497711181641, 0.03933180618286133, 0.03947638320922851, 0.03943017578125, 0.03933884811401367, 0.03935846328735351, 0.03925116729736328, 0.039127361297607424, 0.039126846313476564, 0.03927657699584961, 0.03946355056762695, 0.03950150299072266, 0.0394832649230957, 0.03946662521362305, 0.0393135986328125, 0.03932735824584961, 0.03963187026977539, 0.03979375839233398, 0.039768863677978515, 0.039682174682617186, 0.03962595367431641, 0.04024195098876953, 0.0399749755859375, 0.039693984985351566, 0.03864780807495117, 0.03869283294677734, 0.03872361755371094, 0.038577342987060545, 0.0385043830871582, 0.0384582405090332, 0.03851593780517578, 0.0384818229675293, 0.03871833419799805, 0.03873791885375977, 0.038703102111816406, 0.039034881591796876, 0.0390041618347168, 0.03895024108886719, 0.03882870483398437, 0.03882201766967774, 0.038784896850585934, 0.03892412948608399, 0.03884268951416016, 0.03892726516723633, 0.03886371231079101, 0.03894460678100586, 0.03917776107788086, 0.039178272247314454, 0.03912774276733398, 0.03901785659790039, 0.039026878356933595, 0.03903087997436523, 0.03923081588745117, 0.039218177795410154, 0.03907321548461914, 0.03897727966308594, 0.039000896453857424, 0.03905449676513672, 0.039157726287841796, 0.039029632568359375, 0.03909222412109375, 0.03926595306396485, 0.03923126220703125, 0.03926278305053711, 0.039402782440185545, 0.03952905654907227, 0.03977638244628906, 0.039660575866699216, 0.039384063720703126, 0.03952841567993164, 0.03973062515258789, 0.039604801177978516, 0.03959584045410156, 0.03942828750610351, 0.039454559326171874, 0.03965292739868164, 0.03958230209350586, 0.03947315216064453, 0.03932995223999024, 0.03941487884521484, 0.039548736572265625, 0.03958073425292969, 0.03958367919921875, 0.0395873908996582, 0.03954492950439453, 0.039463230133056644, 0.03950364685058594, 0.03920585632324219, 0.03890156936645508, 0.03884236907958984, 0.03868467330932617, 0.03909225463867187, 0.03910960006713867, 0.038757118225097656, 0.038752288818359376, 0.0387108154296875, 0.038617023468017576, 0.03863628768920899, 0.038768638610839845, 0.03878092956542969, 0.038714622497558596, 0.0386578254699707, 0.03869769668579102, 0.03905279922485352, 0.0390838394165039, 0.039000320434570315, 0.03894547271728516, 0.039049217224121094, 0.03896646499633789, 0.03896387100219727, 0.03881590270996094, 0.03883200073242187, 0.039059070587158205, 0.0391064338684082, 0.0390437126159668, 0.039095966339111325, 0.03928508758544922, 0.039258113861083986, 0.03921273422241211, 0.039100734710693356, 0.03966566467285156, 0.03966287994384766, 0.039336318969726565, 0.039389537811279296, 0.039290721893310544, 0.039077247619628906, 0.03907987213134766, 0.039183006286621094, 0.03913852691650391, 0.039130081176757814, 0.03948255920410156, 0.0395720329284668, 0.03963040161132812, 0.039497440338134765, 0.04005372619628906, 0.04006092834472656, 0.039806880950927735, 0.03956556701660156, 0.03939311981201172, 0.039325439453125, 0.03952592086791992, 0.0397215690612793, 0.039626880645751955, 0.03955507278442383, 0.039577598571777346, 0.03944572830200195, 0.03958563232421875, 0.04006803131103515, 0.0400423698425293, 0.039806880950927735, 0.03918841552734375, 0.038800609588623046, 0.03859711837768555, 0.038711647033691406, 0.03868467330932617, 0.03864371109008789, 0.03871535873413086, 0.038817089080810545, 0.03899603271484375, 0.039070369720458985, 0.03878883361816406, 0.0386317138671875, 0.038647167205810545, 0.03864025497436523, 0.03913459014892578, 0.039160446166992186, 0.03893654251098633, 0.0389529914855957, 0.03892633438110352, 0.03877068710327149, 0.03902873611450195, 0.03896934509277344, 0.038938175201416014, 0.03897110366821289, 0.03917488098144531, 0.03921491241455078, 0.039190399169921876, 0.03937721633911133, 0.039066913604736325, 0.039195358276367186, 0.03909427261352539, 0.03909417724609375, 0.03924921417236328, 0.039273246765136716, 0.03922739028930664, 0.03947708892822266, 0.039319713592529296, 0.039417407989501954, 0.03966918563842774, 0.03935334396362305, 0.039360511779785154, 0.0396266860961914, 0.039478782653808595, 0.03942867279052734, 0.039327423095703126, 0.039356735229492186, 0.03930521774291992, 0.03948252868652344, 0.03947577667236328, 0.03956911849975586, 0.0399222412109375, 0.03957712173461914, 0.03972963333129883, 0.03958499145507813, 0.03955791854858398, 0.03952640151977539, 0.03944144058227539, 0.03948847961425781, 0.03947235107421875, 0.03951273727416992, 0.03985625457763672, 0.039841793060302735, 0.039825408935546876, 0.03877507019042969, 0.03871686553955078, 0.03868905639648437, 0.03888127899169922, 0.03893673706054687, 0.03896432113647461, 0.03918876647949219, 0.038928352355957034, 0.03887724685668945, 0.03869331359863281, 0.03865919876098633, 0.03881430435180664, 0.03883647918701172, 0.03876630401611328, 0.039043392181396484, 0.03911676788330078, 0.038995998382568356, 0.038934528350830076, 0.03894038391113281, 0.03922358322143555, 0.0392540168762207, 0.03903065490722656, 0.038884769439697264, 0.038855327606201175, 0.03888457489013672, 0.039008255004882815, 0.03899859237670898, 0.03906383895874024, 0.03907993698120117, 0.03901779174804688, 0.03934016036987305, 0.039330368041992185, 0.03951615905761719, 0.03943587112426758, 0.039444000244140624, 0.03931430435180664, 0.03962879943847656, 0.039550975799560545, 0.03957350540161133, 0.03955276870727539, 0.03970816040039062, 0.039598846435546876, 0.03933388900756836, 0.03929702377319336, 0.0393072624206543, 0.03940512084960938, 0.039455169677734374, 0.03931545639038086, 0.03931340789794922, 0.03927068710327149, 0.039541950225830076, 0.03953718566894531, 0.03942604827880859, 0.03949772644042969, 0.03975167846679688, 0.039725055694580076, 0.03962812805175781, 0.03992822265625, 0.039764225006103514, 0.039600128173828124, 0.03961222457885742, 0.0398460807800293, 0.03990323257446289, 0.03934220886230469, 0.03896918487548828, 0.03878313446044922, 0.038610206604003904, 0.03833932876586914, 0.03845497512817383, 0.03860713577270508, 0.038675937652587894, 0.038724128723144534, 0.03884560012817383, 0.03879817581176758, 0.03898777770996094, 0.03901968002319336, 0.03897740936279297, 0.03928329467773437, 0.03923987197875976, 0.03879955291748047, 0.038731712341308594, 0.03899302291870117, 0.0391894416809082, 0.039051265716552735, 0.03898777770996094, 0.038972991943359375, 0.038961601257324216, 0.039387134552001955, 0.03935641479492188, 0.03927040100097656, 0.03911884689331055, 0.03911065673828125, 0.03930476760864258, 0.03930764770507812, 0.03932985687255859, 0.03929859161376953, 0.03962928009033203, 0.0391632308959961, 0.039060127258300784, 0.03905487823486328, 0.039026817321777346, 0.03947760009765625, 0.0394788818359375, 0.03936502456665039, 0.03929836654663086, 0.0393325424194336, 0.03940556716918946, 0.03950899124145508, 0.03941068649291992, 0.03962265777587891, 0.03961180877685547, 0.03953251266479492, 0.040066879272460935, 0.03984656143188477, 0.03959772872924805, 0.03945475387573242, 0.039553504943847656, 0.03969843292236328, 0.03963264083862305, 0.039548351287841794, 0.03955161666870117, 0.03977580642700195, 0.03973731231689453, 0.03955984115600586, 0.03965542221069336, 0.039723007202148435, 0.039141822814941406, 0.03871337509155273, 0.03872079849243164, 0.03879600143432617, 0.03887104034423828, 0.03873721694946289, 0.03876726531982422, 0.0389714241027832, 0.03896912002563477, 0.03891632080078125, 0.03920230484008789, 0.0390557746887207, 0.038776927947998044, 0.03866828918457031, 0.03875577545166015, 0.039072032928466796, 0.039098495483398436, 0.03893468856811524, 0.038788352966308594, 0.038914817810058594, 0.03914700698852539, 0.03945523071289062, 0.03929292678833008, 0.039167232513427734, 0.03908060836791992, 0.038991966247558595, 0.039019775390625, 0.03887590408325195, 0.03892841720581055, 0.038995681762695314, 0.039106815338134766, 0.03916185760498047, 0.0392309455871582, 0.03906000137329101, 0.03912857437133789, 0.03944252777099609, 0.03955094528198242, 0.03946950531005859, 0.03934822463989258, 0.039390430450439454, 0.03960502243041992, 0.03949977493286133, 0.039485439300537106, 0.039362560272216796, 0.03933184051513672, 0.03943116760253906, 0.03981190490722656, 0.03983174514770508, 0.03966566467285156, 0.039703807830810546, 0.03948588943481445, 0.03936422348022461, 0.03922195053100586, 0.03939897537231445, 0.039635391235351564, 0.03982745742797852, 0.03986022567749024, 0.03966566467285156, 0.03951161575317383, 0.039635391235351564, 0.03973734283447266, 0.03984384155273438, 0.039725055694580076, 0.039215328216552735, 0.03903862380981445, 0.03898185729980469, 0.03918656158447266, 0.03907731246948242, 0.03875897598266602, 0.038596607208251955, 0.03846899032592774, 0.0385399055480957, 0.038833377838134765, 0.038795486450195316, 0.03870483016967773, 0.039168895721435545, 0.03947686386108398, 0.03894480133056641, 0.03914912033081055, 0.039002910614013675, 0.03887308883666992, 0.0389857292175293, 0.03912879943847656, 0.03923791885375977, 0.03910246276855469, 0.03905913543701172, 0.03882835388183594, 0.039241729736328126, 0.0389936637878418, 0.039141120910644533, 0.03928319931030273, 0.03932160186767578, 0.03921075057983398, 0.03939132690429688, 0.0394521598815918, 0.03927107238769531, 0.03918806457519531, 0.0391376953125, 0.03914691162109375, 0.039282398223876955, 0.03930729675292969, 0.039494495391845706, 0.039436286926269534, 0.03933174514770508, 0.03916809463500977, 0.03913318252563477, 0.03929033660888672, 0.039567745208740235, 0.03975183868408203, 0.03964313507080078, 0.03952371215820313, 0.03958348846435547, 0.03976486587524414, 0.039820865631103514, 0.039739776611328125, 0.03964838409423828, 0.03958227157592773, 0.03977664184570313, 0.03975104141235351, 0.03965331268310547, 0.04022342300415039, 0.03990937423706055, 0.0396759033203125, 0.03963187026977539, 0.03965350341796875, 0.03979558563232422, 0.03878675079345703, 0.038596927642822264, 0.03853311920166016, 0.03841804885864258, 0.03860518264770508, 0.038811649322509766, 0.038776832580566405, 0.03880527877807617, 0.039176033020019534, 0.03894515228271484, 0.039090175628662106, 0.039032833099365234, 0.03893657684326172, 0.03920281600952148, 0.03917824172973633, 0.03909836959838867, 0.0390041618347168, 0.03900620651245117, 0.038950912475585936, 0.03885385513305664, 0.03899062347412109, 0.038975486755371096, 0.03890995025634766, 0.03920281600952148, 0.03922108840942383, 0.03910867309570312, 0.039032352447509765, 0.038986305236816406, 0.039294975280761715, 0.0393436164855957, 0.03915622329711914, 0.03919190216064453, 0.039373470306396485, 0.03940761566162109, 0.039378944396972655, 0.03970457458496094, 0.03951542282104492, 0.03933414459228515, 0.03930364990234375, 0.039273727416992185, 0.03926630401611328, 0.039201534271240235, 0.03957145690917969, 0.039534591674804685, 0.03947516632080078, 0.04006095886230469, 0.03987865447998047, 0.03974758529663086, 0.039624126434326175, 0.039586368560791015, 0.03952025604248047, 0.039511585235595705, 0.03970300674438477, 0.039725055694580076, 0.03955712127685547, 0.03952025604248047, 0.039501823425292966, 0.039634944915771485, 0.04006707382202149, 0.03997859191894531, 0.03981548690795898, 0.039712223052978515, 0.039860862731933594]",tokens/s,25.496182294031595,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,878.026752,662.56896,0.0,260.046848,258.555392,s,1,7.6678076171875,7.6678076171875,0.0,7.6678076171875,7.6678076171875,7.6678076171875,7.6678076171875,[7.6678076171875],,kWh,1.514884169165877e-05,1.6637280518077142e-06,4.54222585599684e-06,2.1354795599463324e-05,,MB,1320.751104,759.037952,0.0,341.835776,317.950464,s,18,0.19700390338897705,0.010944661299387613,0.00017890293056821031,0.010882383823394775,0.011055404758453369,0.011227078580856323,0.011532327489852903,"[0.010776960372924804, 0.01088105583190918, 0.01087551975250244, 0.010871904373168945, 0.010880991935729981, 0.010887968063354491, 0.010862624168395996, 0.010838047981262207, 0.01160863971710205, 0.010870752334594726, 0.01085968017578125, 0.010930591583251954, 0.011010687828063965, 0.010936063766479492, 0.010942815780639648, 0.011159744262695313, 0.01088371181488037, 0.010926143646240234]",tokens/s,23390.399483109082,kWh,4.137287959690757e-07,4.5623656667920546e-08,1.9540619976206332e-07,6.547586523990596e-07,tokens/kWh,390983760.293975,MB,1334.74304,786.300928,0.0,369.098752,317.953024,s,18,10.090893615722656,0.5606052008734809,0.0018227325385220328,0.5606619567871094,0.5621151489257813,0.5640866485595702,0.5654816363525391,"[0.5598701782226563, 0.5607464599609375, 0.5606204833984375, 0.561118896484375, 0.5585742797851563, 0.5607034301757813, 0.5637789306640625, 0.5601032104492187, 0.5585845947265625, 0.561402099609375, 0.5605842895507812, 0.5658303833007813, 0.5583926391601562, 0.5607605590820313, 0.5583001098632813, 0.5611235961914063, 0.5611129150390625, 0.5592865600585938]",tokens/s,112.37855071954289,kWh,2.19564079248649e-05,2.421437808531193e-06,7.939937610095888e-06,3.231778334349198e-05,tokens/kWh,1949391.1240879295,,s,1134,10.08220954036713,0.008890837337184414,0.00014483302994854627,0.008857615947723389,0.008994105529785155,0.009098606204986573,0.009525899362564089,"[0.008640512466430664, 0.008859295845031739, 0.008829279899597168, 0.00890880012512207, 0.008984864234924317, 0.008850303649902344, 0.008854399681091309, 0.00910364818572998, 0.008900256156921386, 0.008843296051025391, 0.008849408149719238, 0.008875167846679688, 0.008831839561462402, 0.008849247932434082, 0.008869695663452148, 0.008918399810791016, 0.009139295578002929, 0.008925151824951173, 0.008859359741210937, 0.009140704154968262, 0.008914143562316894, 0.008895135879516602, 0.008879520416259766, 0.008839615821838379, 0.008916416168212891, 0.00888419246673584, 0.008884511947631835, 0.008845376014709474, 0.008890496253967286, 0.008840448379516601, 0.008983424186706543, 0.008822784423828126, 0.008842944145202637, 0.00883465576171875, 0.008866880416870117, 0.008852288246154786, 0.008829216003417968, 0.008900768280029297, 0.008877568244934082, 0.008827808380126954, 0.008852671623229981, 0.008807231903076172, 0.008814592361450196, 0.008844448089599609, 0.008843296051025391, 0.008840000152587891, 0.008865535736083984, 0.008863871574401855, 0.00885580825805664, 0.00884928035736084, 0.008857600212097168, 0.008814592361450196, 0.008889535903930663, 0.00887222385406494, 0.008905247688293457, 0.008889984130859375, 0.008945376396179198, 0.008911520004272462, 0.008886048316955567, 0.008835455894470216, 0.00884275245666504, 0.008876543998718261, 0.008945568084716797, 0.008675328254699707, 0.008900896072387695, 0.008879167556762695, 0.008919103622436524, 0.008878879547119141, 0.008889504432678223, 0.008920831680297852, 0.00888105583190918, 0.008897664070129395, 0.008821855545043946, 0.008853280067443847, 0.008804351806640624, 0.008814592361450196, 0.008878080368041993, 0.008816191673278809, 0.008825280189514161, 0.008816127777099609, 0.008836864471435547, 0.008882944107055663, 0.00912384033203125, 0.008929408073425293, 0.008914655685424804, 0.008960415840148925, 0.008937376022338867, 0.008998751640319824, 0.008897791862487793, 0.008907487869262695, 0.008882207870483399, 0.008839008331298828, 0.008917152404785156, 0.008873984336853028, 0.008856608390808105, 0.008870880126953124, 0.008851103782653809, 0.00885366439819336, 0.008787967681884766, 0.00883244800567627, 0.00887174415588379, 0.008893376350402831, 0.008908512115478516, 0.008922687530517578, 0.009007840156555176, 0.008955360412597656, 0.008895008087158204, 0.008919039726257324, 0.008922495841979981, 0.008946304321289062, 0.008912896156311035, 0.008968000411987304, 0.009021632194519043, 0.008951807975769043, 0.008955904006958008, 0.008981632232666016, 0.008946559906005859, 0.008933216094970704, 0.008852895736694335, 0.00885427188873291, 0.008891488075256348, 0.008907648086547851, 0.008802751541137695, 0.008877056121826172, 0.008882783889770507, 0.008855392456054687, 0.008663135528564453, 0.008889920234680175, 0.008943967819213867, 0.009086432456970215, 0.00887065601348877, 0.008895456314086914, 0.008815423965454101, 0.008838399887084961, 0.008850272178649903, 0.008839167594909669, 0.008849344253540038, 0.008892640113830567, 0.008875743865966797, 0.00894979190826416, 0.008924736022949218, 0.008829055786132812, 0.008827199935913086, 0.008908096313476563, 0.00883084774017334, 0.008825152397155762, 0.008807999610900879, 0.008885184288024902, 0.00883670425415039, 0.008857088088989258, 0.008842144012451172, 0.008872991561889648, 0.008842207908630372, 0.008914943695068359, 0.0088242244720459, 0.008831328392028808, 0.00891055965423584, 0.008825535774230958, 0.008808544158935547, 0.008904735565185546, 0.00888419246673584, 0.008930144309997558, 0.008930208206176758, 0.008885855674743653, 0.008861248016357421, 0.009226112365722657, 0.009089695930480956, 0.008953791618347168, 0.008878463745117188, 0.00897433567047119, 0.008927231788635253, 0.008892352104187012, 0.008851519584655762, 0.00885161590576172, 0.008828767776489258, 0.008889887809753418, 0.008904352188110351, 0.0090032958984375, 0.008859199523925781, 0.008870271682739258, 0.008860223770141602, 0.008861727714538574, 0.009320735931396484, 0.008881888389587402, 0.008890368461608887, 0.008831263542175293, 0.008908032417297363, 0.00885097599029541, 0.008850560188293458, 0.008694432258605956, 0.00883743953704834, 0.009008543968200684, 0.008873663902282714, 0.008834752082824707, 0.008847455978393554, 0.008855903625488281, 0.008825311660766602, 0.008845312118530273, 0.008820544242858887, 0.008907232284545898, 0.009044927597045898, 0.008816479682922363, 0.008898719787597657, 0.008847935676574707, 0.008846591949462891, 0.009173888206481934, 0.008882080078125, 0.009850912094116211, 0.008946975708007813, 0.008887167930603027, 0.008822463989257812, 0.008855232238769531, 0.008891008377075195, 0.008878080368041993, 0.00889241600036621, 0.00889241600036621, 0.008814144134521484, 0.008849856376647949, 0.009095423698425293, 0.008941311836242676, 0.008914912223815918, 0.008925215721130371, 0.00881868839263916, 0.008822784423828126, 0.008820735931396484, 0.008908927917480469, 0.008842687606811523, 0.00881065559387207, 0.008832672119140624, 0.00888486385345459, 0.008842944145202637, 0.008816896438598633, 0.008824128150939942, 0.0088155517578125, 0.008799103736877442, 0.008845952033996582, 0.008818559646606446, 0.008824928283691406, 0.008880288124084472, 0.009271488189697266, 0.009104512214660644, 0.008892319679260253, 0.009046751976013184, 0.008839551925659179, 0.008924127578735352, 0.008889023780822753, 0.008869279861450195, 0.008913920402526856, 0.00885043239593506, 0.008841024398803712, 0.008782624244689941, 0.008890591621398925, 0.00860364818572998, 0.008823616027832031, 0.008828672409057617, 0.008866047859191895, 0.008811903953552245, 0.00889897632598877, 0.008844736099243165, 0.008822719573974609, 0.008803071975708008, 0.008778976440429688, 0.009051008224487305, 0.008869855880737305, 0.008945695877075195, 0.008900799751281739, 0.008888128280639649, 0.00884227180480957, 0.008835136413574219, 0.008810815811157226, 0.008812800407409668, 0.008822463989257812, 0.008817440032958984, 0.00882265567779541, 0.00881049633026123, 0.008845279693603516, 0.008959263801574706, 0.00886627197265625, 0.008951359748840333, 0.008823040008544922, 0.008862175941467286, 0.008839167594909669, 0.008857631683349609, 0.008933664321899414, 0.008885951995849609, 0.008904704093933105, 0.008863743782043456, 0.008857600212097168, 0.008868000030517579, 0.008848416328430175, 0.008973119735717773, 0.00885536003112793, 0.008825023651123047, 0.008830719947814942, 0.00889472007751465, 0.008902655601501466, 0.00892518424987793, 0.008947263717651368, 0.008882111549377441, 0.00888377571105957, 0.008857791900634765, 0.008859968185424805, 0.008847807884216309, 0.008851008415222168, 0.008833472251892089, 0.008848480224609375, 0.008825759887695312, 0.008825023651123047, 0.008863903999328614, 0.008858528137207031, 0.008847935676574707, 0.00883296012878418, 0.008816543579101563, 0.008881792068481446, 0.008855968475341798, 0.008633760452270508, 0.008835871696472168, 0.008824735641479493, 0.008857536315917968, 0.008799967765808105, 0.008853504180908203, 0.008796416282653808, 0.008791328430175781, 0.008839776039123535, 0.008784000396728515, 0.008827103614807128, 0.00882256031036377, 0.00889356803894043, 0.008878975868225098, 0.00885091209411621, 0.008812992095947266, 0.008851103782653809, 0.008973055839538575, 0.008908479690551759, 0.008873567581176758, 0.008878527641296386, 0.008838784217834473, 0.008863615989685058, 0.00898044776916504, 0.008940032005310058, 0.008896512031555176, 0.008933631896972657, 0.00885331153869629, 0.008949664115905762, 0.008926495552062989, 0.00894643211364746, 0.008976479530334473, 0.009099167823791504, 0.008896032333374023, 0.008960479736328126, 0.008956095695495606, 0.008955679893493652, 0.008953887939453125, 0.008928735733032227, 0.008942111968994141, 0.008904831886291504, 0.008871135711669921, 0.008831647872924805, 0.008837120056152344, 0.00881868839263916, 0.008845312118530273, 0.00880851173400879, 0.008766752243041992, 0.010320608139038085, 0.00891203212738037, 0.008901375770568848, 0.00893545627593994, 0.008844544410705566, 0.008839008331298828, 0.008806976318359375, 0.008794464111328126, 0.008890368461608887, 0.008806112289428712, 0.00878211212158203, 0.008805695533752442, 0.00880025577545166, 0.008852160453796387, 0.008867199897766114, 0.008652799606323243, 0.008830944061279296, 0.008841983795166016, 0.008838303565979004, 0.008858464241027831, 0.008867839813232421, 0.00883894443511963, 0.00884275245666504, 0.008874367713928223, 0.008967743873596192, 0.00902633571624756, 0.008934720039367676, 0.008849247932434082, 0.008921952247619628, 0.00896985626220703, 0.009029215812683105, 0.009053024291992187, 0.00904918384552002, 0.008970144271850587, 0.009209792137145996, 0.008929632186889648, 0.00894223976135254, 0.008910847663879394, 0.008929280281066895, 0.008964287757873534, 0.009112704277038574, 0.00888492774963379, 0.008859328269958496, 0.008827199935913086, 0.008936544418334962, 0.008856224060058595, 0.008842559814453125, 0.008885503768920899, 0.00886956787109375, 0.008885760307312012, 0.008879903793334962, 0.008842207908630372, 0.008916640281677246, 0.008917087554931641, 0.008882176399230958, 0.00911359977722168, 0.008912480354309082, 0.00892092800140381, 0.00933523178100586, 0.009336735725402832, 0.009534751892089844, 0.009305024147033691, 0.009040032386779785, 0.008983519554138184, 0.008931967735290527, 0.008923392295837402, 0.008980480194091797, 0.008862848281860352, 0.008845631599426269, 0.008803104400634766, 0.008859423637390137, 0.008956064224243164, 0.008844799995422363, 0.008934816360473634, 0.008844256401062012, 0.008839136123657226, 0.008816639900207519, 0.008835071563720704, 0.00861961555480957, 0.008827872276306152, 0.008849120140075684, 0.008871904373168945, 0.00886787223815918, 0.008935551643371582, 0.008906559944152832, 0.008841279983520509, 0.008851455688476563, 0.008893983840942383, 0.00894108772277832, 0.008865920066833496, 0.00887388801574707, 0.008841343879699707, 0.008854047775268555, 0.008823040008544922, 0.008873984336853028, 0.0088372802734375, 0.008778656005859375, 0.008992768287658692, 0.009391231536865234, 0.008935232162475586, 0.00886128044128418, 0.0089071683883667, 0.008937503814697266, 0.008814175605773926, 0.008999296188354492, 0.008828703880310058, 0.008798111915588379, 0.008818047523498535, 0.008814528465270996, 0.008813599586486816, 0.00895577621459961, 0.008849791526794434, 0.008846431732177735, 0.008817279815673828, 0.008818528175354005, 0.008851263999938965, 0.008819040298461913, 0.008814528465270996, 0.008986687660217286, 0.008820735931396484, 0.008818528175354005, 0.00881065559387207, 0.008867168426513672, 0.008751328468322755, 0.008819135665893554, 0.008882176399230958, 0.008939071655273438, 0.008860095977783203, 0.008847359657287598, 0.008828927993774414, 0.00881868839263916, 0.00887551975250244, 0.008810912132263184, 0.009105504035949707, 0.00912396812438965, 0.008832544326782226, 0.009050815582275391, 0.009129695892333985, 0.008966079711914062, 0.008929280281066895, 0.008914943695068359, 0.008644351959228516, 0.008866047859191895, 0.008976384162902832, 0.008828927993774414, 0.009041312217712403, 0.00891926383972168, 0.008861663818359376, 0.008871583938598633, 0.008835007667541504, 0.008884544372558595, 0.008897024154663086, 0.009084032058715821, 0.008854399681091309, 0.008822367668151856, 0.008840895652770997, 0.008806559562683105, 0.00881107234954834, 0.008863615989685058, 0.008857343673706055, 0.008799967765808105, 0.008798879623413086, 0.008851455688476563, 0.00881049633026123, 0.008775008201599122, 0.008909472465515137, 0.008850655555725097, 0.008843071937561035, 0.008788928031921387, 0.008812000274658203, 0.008788703918457031, 0.008813568115234375, 0.008786304473876953, 0.00883296012878418, 0.008802847862243652, 0.008986623764038085, 0.008882176399230958, 0.008847583770751953, 0.00883407974243164, 0.008895232200622558, 0.008919039726257324, 0.008876192092895507, 0.008869248390197754, 0.008874496459960938, 0.008882304191589356, 0.008887328147888184, 0.008860735893249512, 0.00887782382965088, 0.008855392456054687, 0.008882240295410156, 0.00882697582244873, 0.008793919563293458, 0.008784192085266113, 0.008861056327819824, 0.00881510353088379, 0.008863743782043456, 0.00881868839263916, 0.008848896026611328, 0.008886143684387207, 0.008993120193481445, 0.008884832382202149, 0.008945343971252441, 0.008914591789245606, 0.008810976028442382, 0.00867420768737793, 0.008837120056152344, 0.008875167846679688, 0.008847295761108398, 0.008874912261962891, 0.008882176399230958, 0.008857855796813965, 0.008828448295593263, 0.00883670425415039, 0.008864383697509765, 0.008828960418701171, 0.008857888221740723, 0.010130847930908203, 0.010232095718383788, 0.008853407859802246, 0.008926912307739258, 0.00891536045074463, 0.008892127990722656, 0.008880127906799316, 0.008851743698120118, 0.00882307243347168, 0.00882044792175293, 0.008812416076660156, 0.008816736221313477, 0.00882697582244873, 0.008837056159973145, 0.008802623748779296, 0.008844415664672851, 0.008805055618286133, 0.00883894443511963, 0.00884556770324707, 0.008828703880310058, 0.008836928367614747, 0.008835328102111817, 0.008888383865356446, 0.008843199729919433, 0.008833024024963379, 0.008838208198547363, 0.008876992225646972, 0.008839232444763184, 0.008834943771362305, 0.008846943855285644, 0.008800288200378419, 0.009210304260253906, 0.009099167823791504, 0.009027456283569336, 0.00888649559020996, 0.008853280067443847, 0.00884879970550537, 0.008842144012451172, 0.008886176109313965, 0.009179136276245118, 0.008850943565368653, 0.008824864387512206, 0.008827584266662598, 0.008908864021301269, 0.008836671829223633, 0.008820384025573731, 0.008833824157714843, 0.008800127983093261, 0.008810111999511719, 0.00882915210723877, 0.008834879875183105, 0.008590463638305665, 0.008800352096557617, 0.008807104110717773, 0.008830464363098145, 0.008813055992126465, 0.008847104072570802, 0.008822400093078614, 0.008894816398620605, 0.008987263679504394, 0.008824000358581543, 0.009057951927185058, 0.00885647964477539, 0.00892518424987793, 0.008869888305664063, 0.008806400299072266, 0.008865983963012695, 0.008805919647216796, 0.008780063629150391, 0.00880844783782959, 0.00882915210723877, 0.00878767967224121, 0.008835136413574219, 0.008939488410949707, 0.008867744445800782, 0.008849535942077637, 0.008789888381958008, 0.008795968055725098, 0.00888864040374756, 0.00880031967163086, 0.008794367790222168, 0.008803520202636719, 0.008812895774841309, 0.008863424301147461, 0.008840959548950195, 0.008986335754394531, 0.008884960174560547, 0.008962335586547852, 0.008851231575012207, 0.008831199645996093, 0.008835071563720704, 0.008840928077697754, 0.008880000114440918, 0.008954272270202637, 0.00898579216003418, 0.009009984016418457, 0.00902950382232666, 0.00909119987487793, 0.009019392013549805, 0.00899452781677246, 0.009029055595397949, 0.009134943962097169, 0.00907196807861328, 0.009358304023742676, 0.008982208251953125, 0.008996416091918945, 0.00880025577545166, 0.00885820770263672, 0.008837056159973145, 0.008938591957092285, 0.008863903999328614, 0.008864416122436523, 0.008832672119140624, 0.008882816314697266, 0.008711968421936035, 0.008958208084106445, 0.008836992263793945, 0.008889856338500977, 0.009001567840576171, 0.009032768249511719, 0.008977343559265136, 0.009035872459411622, 0.00913920021057129, 0.009012127876281738, 0.009033727645874023, 0.008914943695068359, 0.008947936058044434, 0.008885184288024902, 0.008936287879943848, 0.008869279861450195, 0.00890294361114502, 0.008995136260986329, 0.008978400230407715, 0.008949952125549316, 0.009106656074523926, 0.009114239692687988, 0.008893792152404784, 0.008915840148925781, 0.00894153594970703, 0.00881999969482422, 0.008818240165710449, 0.00888111972808838, 0.009181216239929199, 0.009116767883300781, 0.008899392127990723, 0.009752832412719726, 0.009035264015197754, 0.00962559986114502, 0.00967024040222168, 0.009016032218933106, 0.008888319969177246, 0.008956159591674805, 0.008850848197937012, 0.008866175651550293, 0.008810015678405761, 0.008803968429565429, 0.008761504173278808, 0.00880502414703369, 0.008830975532531739, 0.008863743782043456, 0.008898207664489746, 0.008791808128356934, 0.0088438720703125, 0.008859647750854491, 0.008914912223815918, 0.008925248146057128, 0.009021568298339843, 0.008972127914428711, 0.009054207801818847, 0.00901961612701416, 0.008965696334838867, 0.008958239555358887, 0.00895411205291748, 0.008945504188537597, 0.008902496337890625, 0.009047679901123047, 0.00902182388305664, 0.008762592315673829, 0.008930591583251954, 0.00889628791809082, 0.008919648170471191, 0.00891603183746338, 0.008862175941467286, 0.008831007957458496, 0.008845760345458984, 0.0088242244720459, 0.008824928283691406, 0.008811008453369141, 0.00880016040802002, 0.008872032165527344, 0.008928511619567871, 0.008818752288818359, 0.00882051181793213, 0.00879094409942627, 0.008783103942871094, 0.008758015632629394, 0.008842304229736328, 0.008831904411315919, 0.008898591995239258, 0.00882688045501709, 0.008835488319396973, 0.008877663612365723, 0.008803968429565429, 0.008812671661376953, 0.008852800369262696, 0.008834015846252442, 0.008782848358154297, 0.008784064292907715, 0.008803135871887207, 0.008900256156921386, 0.008902976036071777, 0.00893545627593994, 0.008867520332336426, 0.008923647880554199, 0.00892630386352539, 0.008878368377685547, 0.008833439826965332, 0.008820735931396484, 0.008916383743286132, 0.00893398380279541, 0.008969951629638671, 0.00889680004119873, 0.008912320137023927, 0.008866368293762207, 0.008850655555725097, 0.008879263877868653, 0.00886240005493164, 0.008896544456481933, 0.008827808380126954, 0.008824831962585449, 0.008813664436340332, 0.008831839561462402, 0.008898176193237305, 0.00882323169708252, 0.008871904373168945, 0.008894495964050293, 0.008830975532531739, 0.008888319969177246, 0.008853504180908203, 0.008810208320617676, 0.008571743965148926, 0.008814304351806641, 0.008839263916015624, 0.008812479972839356, 0.008805919647216796, 0.00883296012878418, 0.008779904365539552, 0.008854175567626953, 0.008914943695068359, 0.008809599876403808, 0.008838144302368164, 0.008992639541625977, 0.008920127868652344, 0.00919980812072754, 0.009195648193359374, 0.009276032447814941, 0.009453472137451171, 0.00897862434387207, 0.009240351676940917, 0.009836671829223632, 0.008896512031555176, 0.008947551727294922, 0.008917247772216797, 0.008812640190124512, 0.00880832004547119, 0.008808575630187988, 0.008828736305236817, 0.008842911720275879, 0.008796511650085449, 0.008951807975769043, 0.008813599586486816, 0.008840319633483886, 0.008810463905334473, 0.008775551795959473, 0.008814592361450196, 0.008806079864501952, 0.008933823585510254, 0.008854944229125977, 0.008823264122009277, 0.008816991806030273, 0.008861184120178223, 0.008847519874572754, 0.009263104438781738, 0.008910847663879394, 0.009115391731262208, 0.00885756778717041, 0.008771903991699219, 0.008801471710205079, 0.008789888381958008, 0.008811455726623535, 0.008824352264404298, 0.008801759719848633, 0.008763456344604492, 0.008763744354248046, 0.008799936294555664, 0.00880076789855957, 0.008784255981445312, 0.008802304267883301, 0.008802304267883301, 0.008838784217834473, 0.008808064460754394, 0.008794143676757812, 0.008809184074401856, 0.008601568222045898, 0.008816384315490722, 0.008842816352844238, 0.00884601593017578, 0.008804351806640624, 0.008826175689697265, 0.008810272216796875, 0.008878175735473632, 0.008829888343811034, 0.008830975532531739, 0.008880224227905274, 0.008863519668579102, 0.008853504180908203, 0.008855680465698242, 0.008828607559204102, 0.008827168464660645, 0.008888223648071288, 0.00882688045501709, 0.00887622356414795, 0.008852416038513184, 0.008883071899414062, 0.009121055603027345, 0.0089136323928833, 0.008894432067871093, 0.008914976119995118, 0.00897590446472168, 0.00906060791015625, 0.008966367721557617, 0.00893337631225586, 0.008893792152404784, 0.00884224033355713, 0.008803999900817872, 0.008824831962585449, 0.008816191673278809, 0.008855999946594239, 0.008854687690734863, 0.008827136039733887, 0.008807007789611816, 0.008834752082824707, 0.008821056365966798, 0.008951199531555177, 0.008802623748779296, 0.00880668830871582, 0.008750432014465332, 0.008810751914978028, 0.008783295631408691, 0.00879100799560547, 0.008768511772155761, 0.008894975662231446, 0.008843775749206542, 0.008823007583618163, 0.008771136283874511, 0.008832672119140624, 0.008798912048339843, 0.00882636833190918, 0.00882307243347168, 0.008816160202026368, 0.009058367729187012, 0.008946175575256348, 0.008812543869018554, 0.008847359657287598, 0.00880844783782959, 0.008936832427978516, 0.00866646385192871, 0.008882847785949707, 0.00886905574798584, 0.008827712059020996, 0.008832096099853515, 0.008892959594726562, 0.008837311744689942, 0.008849535942077637, 0.008808639526367188, 0.00879964828491211, 0.008828864097595215, 0.008817184448242187, 0.008839391708374023, 0.008823904037475586, 0.008897215843200683, 0.008802016258239747, 0.008923392295837402, 0.008863776206970216, 0.008808608055114747, 0.00888105583190918, 0.008847488403320312, 0.008833632469177247, 0.00881276798248291, 0.00880025577545166, 0.008875743865966797, 0.008878368377685547, 0.00890675163269043, 0.009447711944580078, 0.01053872013092041, 0.009529151916503907, 0.00888646411895752, 0.008841216087341308, 0.008837311744689942, 0.008832832336425782, 0.008906815528869629, 0.008787584304809571, 0.008851136207580566, 0.008790656089782715, 0.00876966381072998, 0.008839232444763184, 0.008793919563293458, 0.008810175895690917, 0.008806719779968261, 0.008765439987182617, 0.008814592361450196, 0.008867615699768067, 0.008795488357543945, 0.008841407775878907, 0.008806912422180176, 0.008873472213745117, 0.008881183624267578, 0.008823552131652833, 0.008829855918884277, 0.008867903709411621, 0.008861791610717774, 0.008875871658325195, 0.008850879669189453, 0.008853376388549805, 0.009073344230651856, 0.008839167594909669, 0.008938655853271485, 0.00920847988128662, 0.009009344100952148, 0.008625727653503418, 0.008834752082824707, 0.008859711647033692, 0.008841919898986816, 0.009086976051330567, 0.008855456352233887, 0.00886086368560791, 0.00885372829437256, 0.008849408149719238, 0.008940511703491211, 0.008912608146667481, 0.008863519668579102, 0.008855487823486328, 0.008802592277526855, 0.008875712394714356, 0.008821120262145996, 0.008833087921142579, 0.00886070442199707, 0.008794976234436034, 0.008799807548522949, 0.008812447547912598, 0.008804032325744629, 0.008840031623840332, 0.008847359657287598, 0.008939264297485351, 0.008803775787353516, 0.008821567535400391, 0.008769536018371582, 0.00883897590637207, 0.008800576210021973, 0.008927103996276855, 0.008816127777099609, 0.008776191711425782, 0.008828927993774414, 0.008791232109069824, 0.008800607681274414, 0.008780256271362304, 0.008795424461364746, 0.00882096004486084, 0.008788479804992675, 0.008822175979614258, 0.009593440055847167, 0.009172991752624511, 0.009242624282836913, 0.009519295692443848, 0.008933183670043945, 0.00886732769012451, 0.008855551719665527, 0.009095808029174805, 0.009078656196594238, 0.009000767707824707, 0.009035167694091796, 0.008852255821228028, 0.009000960350036622, 0.008912896156311035, 0.008888319969177246, 0.009011199951171875, 0.008922783851623536, 0.008959808349609376, 0.008909536361694335, 0.008854623794555663, 0.008817376136779785, 0.008863743782043456, 0.00862611198425293, 0.008853568077087402, 0.008849184036254882, 0.00886518383026123, 0.008860480308532715, 0.008861696243286133, 0.008822879791259765, 0.008830880165100098, 0.008831040382385254, 0.0088984956741333, 0.009232224464416504, 0.008974495887756348, 0.008841216087341308, 0.00889840030670166, 0.00887168025970459, 0.008886783599853516, 0.008892288208007812, 0.00887782382965088, 0.008861984252929687, 0.008828927993774414, 0.008824831962585449, 0.00883027172088623, 0.009104063987731933, 0.009012319564819337, 0.008903743743896485, 0.008858943939208984, 0.008826399803161621, 0.008876128196716309, 0.008864159584045411, 0.008837056159973145, 0.008840895652770997, 0.00883801555633545, 0.008828895568847656, 0.008850815773010254, 0.008868512153625488, 0.009023296356201171, 0.008894944190979004, 0.008840928077697754, 0.00880835247039795, 0.008924480438232421, 0.008831775665283204, 0.008876031875610351, 0.0089169921875, 0.008819904327392578, 0.008781855583190917, 0.008780415534973145, 0.00880832004547119, 0.008853055953979493, 0.008800224304199218, 0.008819680213928222, 0.008801664352416992, 0.008860063552856446, 0.00909830379486084, 0.008913472175598144, 0.008839551925659179, 0.00882688045501709, 0.008882176399230958, 0.008878080368041993, 0.008851743698120118, 0.008933088302612305, 0.00881049633026123, 0.008859295845031739, 0.008847935676574707]",tokens/s,112.47534535556849,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3856.355328,4533.911552,0.0,4131.38944,4012.475392,s,1,10.228845703125,10.228845703125,0.0,10.228845703125,10.228845703125,10.228845703125,10.228845703125,[10.228845703125],,kWh,9.272959164585851e-05,1.0221453507349271e-05,2.8893356448012053e-05,0.00013184440160121983,,MB,2170.232832,4733.140992,0.0,4315.938816,4233.462784,s,10,1.7868261718750003,0.1786826171875,0.000816469095859311,0.17896324920654297,0.17915940399169922,0.17919817886352538,0.17922919876098634,"[0.17897996520996093, 0.17631440734863282, 0.17884124755859376, 0.17843927001953125, 0.1790595245361328, 0.1791507873535156, 0.17904202270507813, 0.17881546020507813, 0.17923695373535156, 0.178946533203125]",tokens/s,1432.707915462013,kWh,5.290591152603952e-06,5.834567145564683e-07,3.5171853534285424e-06,9.391233220588963e-06,tokens/kWh,27259465.71519019,MB,2180.902912,4837.998592,0.0,4420.796416,4335.693312,s,10,22.48654248046875,2.2486542480468747,0.21890661804567998,2.1445224609375,2.5158321044921874,2.674327795410156,2.8011243481445316,"[2.832823486328125, 2.48061083984375, 2.13504541015625, 2.141770751953125, 2.1452802734375, 2.138077880859375, 2.159431396484375, 2.141730712890625, 2.1437646484375, 2.168007080078125]",tokens/s,28.016757158073652,kWh,6.228444842489712e-05,6.869768834255755e-06,4.0414127569370076e-05,0.00010956834482852295,tokens/kWh,574983.5876283108,,s,630,22.48379755020143,0.03568856754000225,0.0039310404126924586,0.033976270675659184,0.044604291152954105,0.044869251060485844,0.045552499732971194,"[0.045531200408935546, 0.045709022521972655, 0.04493747329711914, 0.04469184112548828, 0.044634273529052734, 0.044802177429199216, 0.04447235107421875, 0.044824161529541016, 0.044765567779541014, 0.04716694259643555, 0.05268304061889648, 0.04490195083618164, 0.045639968872070315, 0.0448004150390625, 0.044518718719482424, 0.044439998626708985, 0.04440060806274414, 0.04453200149536133, 0.04450028610229492, 0.044431488037109376, 0.04480374526977539, 0.04451414489746094, 0.04445091247558594, 0.04470713424682617, 0.04480169677734375, 0.044566238403320316, 0.04448080062866211, 0.04455987167358398, 0.04454636764526367, 0.044509567260742185, 0.04555705642700195, 0.045164192199707034, 0.044956512451171875, 0.04464191818237305, 0.045023006439208986, 0.04669910430908203, 0.04497760009765625, 0.0449477767944336, 0.045170944213867185, 0.0450522575378418, 0.044718910217285156, 0.04464931106567383, 0.044507137298583986, 0.044593151092529294, 0.04443545532226562, 0.044652576446533206, 0.044850368499755856, 0.044986175537109374, 0.04466787338256836, 0.04447603225708008, 0.04460095977783203, 0.04450966262817383, 0.044666336059570315, 0.04500271987915039, 0.04457558441162109, 0.044980224609375, 0.0447567024230957, 0.044577056884765626, 0.044449760437011716, 0.04446825790405273, 0.0446382064819336, 0.04551686477661133, 0.04476102447509766, 0.044832767486572264, 0.04507555389404297, 0.04491356658935547, 0.04522377777099609, 0.044939422607421876, 0.04487372970581055, 0.04453731155395508, 0.044678817749023436, 0.04508703994750977, 0.04465286254882812, 0.04467766571044922, 0.04471603012084961, 0.04463827133178711, 0.04489161682128906, 0.044531902313232424, 0.044349441528320314, 0.04443734359741211, 0.04443257522583008, 0.04442809677124023, 0.04475081634521484, 0.04532627105712891, 0.04554134368896484, 0.045262462615966795, 0.044956321716308596, 0.0446382064819336, 0.04470723342895508, 0.0447757453918457, 0.04486377716064453, 0.044832767486572264, 0.044548095703125, 0.04605952072143555, 0.03423846435546875, 0.03401728057861328, 0.03378790283203125, 0.03411151885986328, 0.03370390319824219, 0.03505152130126953, 0.033957023620605466, 0.03391296005249023, 0.03361407852172851, 0.0337474250793457, 0.033782142639160156, 0.03383871841430664, 0.033740798950195314, 0.03370751953125, 0.03379584121704102, 0.03371241760253906, 0.03378182220458984, 0.033790721893310546, 0.033781406402587894, 0.03432447814941406, 0.03427532958984375, 0.03407257461547852, 0.03395711898803711, 0.03417779159545899, 0.034129920959472655, 0.03406032180786133, 0.03404115295410156, 0.03388623809814453, 0.03485865783691406, 0.03591881561279297, 0.033896446228027344, 0.03450265502929688, 0.03451087951660156, 0.03374095916748047, 0.0336071662902832, 0.03367731094360352, 0.03361795043945313, 0.03366521453857422, 0.03352604675292969, 0.03358512115478516, 0.033574943542480466, 0.033650688171386715, 0.03366003036499023, 0.0336495361328125, 0.033753089904785157, 0.033646175384521484, 0.03352608108520508, 0.033571006774902344, 0.0337017936706543, 0.03412390518188477, 0.033992576599121097, 0.033933502197265625, 0.033799007415771486, 0.034251232147216794, 0.03378960037231445, 0.0339317741394043, 0.0340863037109375, 0.033592510223388675, 0.03345283126831055, 0.033799198150634764, 0.03343382263183594, 0.033584224700927735, 0.033565185546875, 0.033541343688964845, 0.03361196899414062, 0.033896446228027344, 0.03387875366210937, 0.033659137725830075, 0.033726207733154295, 0.03361996841430664, 0.033872032165527345, 0.033415008544921875, 0.0335810546875, 0.0336445426940918, 0.03359151840209961, 0.03365868759155274, 0.03359331130981445, 0.03372854232788086, 0.03383737564086914, 0.03450003051757813, 0.034399551391601564, 0.03552934265136719, 0.03439465713500976, 0.034598655700683593, 0.03418313598632813, 0.03528297424316406, 0.03415353775024414, 0.034044639587402344, 0.03369132614135742, 0.034256832122802734, 0.033980224609375, 0.03463862228393555, 0.03423231887817383, 0.03403084945678711, 0.03400985717773437, 0.03397030258178711, 0.03388089752197266, 0.03380758285522461, 0.03365353775024414, 0.03402060699462891, 0.03376643371582031, 0.03376726531982422, 0.033816097259521484, 0.03379439926147461, 0.03387631988525391, 0.034028545379638675, 0.03380086517333984, 0.03398649597167969, 0.03437152099609375, 0.033830558776855466, 0.033716350555419924, 0.03383331298828125, 0.03427065658569336, 0.03388582229614258, 0.03418172836303711, 0.03391932678222656, 0.03402073669433594, 0.03408755111694336, 0.03428979110717773, 0.03407040023803711, 0.0340316162109375, 0.03391897583007813, 0.033888256072998044, 0.035096031188964844, 0.033892894744873045, 0.033890209197998046, 0.033871070861816406, 0.03391683197021484, 0.03375510406494141, 0.033827838897705076, 0.03389606475830078, 0.033790336608886716, 0.03421952056884765, 0.034052513122558595, 0.03405804824829101, 0.035025184631347656, 0.034525184631347655, 0.03422639846801758, 0.0341808967590332, 0.0347955207824707, 0.03408031845092773, 0.03404179382324219, 0.03492505645751953, 0.03401932907104492, 0.033716224670410154, 0.03388332748413086, 0.033708030700683594, 0.033797119140625, 0.03363033676147461, 0.03374607849121094, 0.03379878234863281, 0.03397622299194336, 0.03383209609985351, 0.033837471008300785, 0.033630016326904294, 0.03382495880126953, 0.03369004821777344, 0.03388179016113281, 0.03373241424560547, 0.033882144927978516, 0.034054271697998045, 0.033861663818359374, 0.03420774459838867, 0.03804959869384766, 0.03394358444213867, 0.033992862701416014, 0.035676158905029294, 0.03369558334350586, 0.0335398063659668, 0.03364502334594727, 0.03377046585083008, 0.033739776611328126, 0.03349235153198242, 0.033771583557128906, 0.033555007934570315, 0.03379814529418945, 0.03447753524780273, 0.033837600708007814, 0.03379404830932617, 0.03387507247924805, 0.033881057739257814, 0.034074592590332034, 0.034176063537597653, 0.03423100662231445, 0.03418537521362305, 0.03424051284790039, 0.03423136138916016, 0.03444627380371094, 0.03407462310791016, 0.03443705749511719, 0.03467219161987305, 0.03445126342773437, 0.03456480026245117, 0.03446099090576172, 0.03408560180664062, 0.034437088012695315, 0.034589920043945316, 0.03381884765625, 0.03374860763549805, 0.03380844879150391, 0.033676097869873044, 0.03370604705810547, 0.033846847534179686, 0.03376483154296875, 0.03406073760986328, 0.03377369689941406, 0.033992191314697266, 0.03374975967407227, 0.03368137741088867, 0.03389379119873047, 0.033720863342285155, 0.03361939239501953, 0.03364947128295898, 0.03390428924560547, 0.033705665588378904, 0.03384182357788086, 0.034187007904052734, 0.03369599914550781, 0.03358924865722656, 0.034282752990722656, 0.03365964889526367, 0.034241279602050784, 0.03372032165527344, 0.03369779205322266, 0.03388611221313476, 0.033854782104492186, 0.033753536224365235, 0.03393775939941406, 0.03361996841430664, 0.03371171188354492, 0.033674945831298826, 0.033756065368652347, 0.03356409454345703, 0.03361014556884766, 0.03343996810913086, 0.03354188919067383, 0.033805313110351565, 0.034396446228027344, 0.03397091293334961, 0.03369161605834961, 0.03366652679443359, 0.034103519439697264, 0.03358099365234375, 0.03375759887695313, 0.03380223846435547, 0.03377971267700195, 0.03487539291381836, 0.033982463836669925, 0.03375289535522461, 0.03420083236694336, 0.0339752311706543, 0.03387964630126953, 0.034081153869628907, 0.0337490234375, 0.03373577499389648, 0.0336343994140625, 0.03368838500976563, 0.03364255905151367, 0.034280574798583985, 0.03434310531616211, 0.03435532760620117, 0.03447868728637695, 0.0339453125, 0.033941696166992184, 0.03367721557617188, 0.033785247802734376, 0.033628864288330076, 0.0337606086730957, 0.034283424377441404, 0.03405491256713867, 0.033777118682861325, 0.033842849731445315, 0.03658787155151367, 0.03412863922119141, 0.03378575897216797, 0.033814208984375, 0.03432089614868164, 0.034031135559082035, 0.03415382385253906, 0.03393619155883789, 0.033937568664550784, 0.03380003356933594, 0.03371417617797851, 0.033685504913330076, 0.034443614959716796, 0.03384076690673828, 0.03399654388427734, 0.03449913787841797, 0.034611167907714846, 0.034027713775634766, 0.03379404830932617, 0.03388774490356445, 0.03395414352416992, 0.034008895874023434, 0.03444771194458008, 0.033976318359375, 0.03793875122070312, 0.037798336029052734, 0.033896095275878904, 0.03380585479736328, 0.034246494293212894, 0.034011295318603516, 0.03401398468017578, 0.03442076873779297, 0.033724414825439454, 0.033742847442626955, 0.03371859359741211, 0.03356169509887695, 0.03377417755126953, 0.033650463104248046, 0.03360176086425781, 0.0336640625, 0.03372537612915039, 0.033702945709228514, 0.03419030380249023, 0.03363174438476563, 0.03379801559448242, 0.033796737670898434, 0.033938751220703126, 0.033858238220214845, 0.033880062103271484, 0.03407257461547852, 0.03408838272094727, 0.03411580657958985, 0.03430377578735352, 0.034279647827148436, 0.03445334243774414, 0.03411228942871094, 0.03389923095703125, 0.03392537689208985, 0.03668003082275391, 0.042201503753662106, 0.03442278289794922, 0.03412694549560547, 0.03384230422973633, 0.03372623825073242, 0.03470064163208008, 0.033667713165283206, 0.03370569610595703, 0.033769790649414065, 0.03389440155029297, 0.03386982345581055, 0.03395782470703125, 0.034391616821289064, 0.033756736755371095, 0.03394224166870117, 0.03369161605834961, 0.03480780792236328, 0.035917823791503906, 0.03425251388549805, 0.03522496032714844, 0.034044448852539065, 0.03404179382324219, 0.03361942291259765, 0.03473648071289062, 0.0338985595703125, 0.03435782241821289, 0.03434624099731445, 0.03401740646362305, 0.03392607879638672, 0.0338326416015625, 0.03377891159057617, 0.0336629753112793, 0.03385628890991211, 0.03392425537109375, 0.0339549446105957, 0.03380809783935547, 0.0340316162109375, 0.03365478515625, 0.033713470458984374, 0.03355849456787109, 0.033605918884277344, 0.03368185424804687, 0.03362326431274414, 0.03363510513305664, 0.033702239990234376, 0.0336473274230957, 0.033753246307373044, 0.03389487838745117, 0.034003360748291016, 0.03410729598999023, 0.03403363037109375, 0.03397407913208008, 0.0337891845703125, 0.034122528076171874, 0.0337512321472168, 0.033713409423828125, 0.033934368133544925, 0.033959648132324216, 0.03378969573974609, 0.03371443176269531, 0.03367935943603516, 0.03368703842163086, 0.03374518585205078, 0.03381475067138672, 0.0340766716003418, 0.033998462677001955, 0.03419558334350586, 0.03409110260009766, 0.0342119026184082, 0.03404211044311523, 0.03398025512695312, 0.03378969573974609, 0.033734912872314456, 0.03399423980712891, 0.03404032135009766, 0.03382675170898437, 0.034017215728759764, 0.03402108764648438, 0.035095264434814456, 0.03467961502075195, 0.034223934173583985, 0.03415216064453125, 0.03389487838745117, 0.03371212768554688, 0.033699840545654294, 0.03399660873413086, 0.03383091354370117, 0.03380204772949219, 0.033720703125, 0.03385654449462891, 0.033807327270507816, 0.03358643341064453, 0.033656993865966794, 0.03366764831542969, 0.033753440856933596, 0.03376438522338867, 0.033756126403808595, 0.03360326385498047, 0.03376902389526367, 0.033563201904296874, 0.03366860961914062, 0.03368179321289062, 0.033557727813720704, 0.033601406097412106, 0.03378470230102539, 0.033484832763671875, 0.03769379043579101, 0.03435830307006836, 0.03426572799682617, 0.033693695068359376, 0.03361382293701172, 0.03359936141967773, 0.0343903694152832, 0.033920799255371094, 0.03367427062988281, 0.03359843063354492, 0.033742847442626955, 0.033705215454101566, 0.03363507080078125, 0.03376134490966797, 0.033886142730712894, 0.03387801742553711, 0.0341313591003418, 0.03459980773925781, 0.03461465454101562, 0.03515836715698242, 0.034514110565185545, 0.03443385696411133, 0.03433881759643555, 0.03465545654296875, 0.03447654342651367, 0.034062625885009766, 0.033984577178955075, 0.034455486297607425, 0.03424160003662109, 0.03388444900512695, 0.03446031951904297, 0.034557952880859374, 0.03396793746948242, 0.033857921600341796, 0.03370124816894531, 0.03365523147583008, 0.03405369567871094, 0.03399638366699219, 0.033694175720214846, 0.03383456039428711, 0.03405305480957031, 0.03393312072753906, 0.03380550384521484, 0.03389059066772461, 0.03385996627807617, 0.033915103912353514, 0.033891521453857425, 0.03381532669067383, 0.033842849731445315, 0.03380057525634766, 0.03402665710449219, 0.03359625625610352, 0.03381001663208008, 0.033640735626220705, 0.03385712051391602, 0.034043617248535156, 0.03363103866577148, 0.03366864013671875, 0.033673057556152346, 0.033559169769287106, 0.03384848022460937, 0.03376828765869141, 0.034025344848632816, 0.03394572830200195, 0.034072097778320314, 0.034056545257568356, 0.03474214553833008, 0.03438934326171875, 0.034075553894042966, 0.03490611267089844, 0.035100929260253905, 0.035366657257080075, 0.03525151824951172, 0.0353939208984375, 0.035541313171386715, 0.03528908920288086, 0.03555894470214844, 0.0353056640625, 0.035503681182861326, 0.03520995330810547, 0.03505340957641601, 0.03531792068481445, 0.035376991271972656, 0.03469308853149414, 0.03545708847045898, 0.03504140853881836, 0.034945022583007815, 0.03463782501220703, 0.03450470352172851, 0.03440982437133789, 0.034705184936523435, 0.03457247924804688, 0.03459740829467774, 0.03455401611328125, 0.03445555114746094, 0.034500606536865236, 0.034830337524414064, 0.03430950546264649, 0.03448076629638672]",tokens/s,28.02017757869183,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1584.2304,1604.190208,0.0,1201.668096,1189.151232,s,1,8.4257275390625,8.4257275390625,0.0,8.4257275390625,8.4257275390625,8.4257275390625,8.4257275390625,[8.4257275390625],,kWh,3.6876531970824546e-05,4.060446244156687e-06,1.1971120687986891e-05,5.290809890296812e-05,,MB,1705.672704,1801.322496,0.0,1384.12032,1351.367168,s,10,0.41631919860839844,0.04163191986083985,0.00021260578362603222,0.04160505676269531,0.04197338829040528,0.04202442150115967,0.04206524806976318,"[0.04207545471191406, 0.04160111999511719, 0.04152048110961914, 0.04160899353027344, 0.04165644836425781, 0.041610431671142575, 0.04143766403198242, 0.04136604690551758, 0.0419620475769043, 0.04148051071166992]",tokens/s,6149.127901276558,kWh,1.2611619478269826e-06,1.3908403303927074e-07,8.339374487585984e-07,2.234183429624852e-06,tokens/kWh,114583250.68814322,MB,1711.104,1843.265536,0.0,1426.06336,1407.548416,s,10,14.392046630859374,1.4392046630859376,0.008099490559269654,1.4383462524414061,1.4511020385742186,1.4513244567871093,1.4515023913574219,"[1.4335716552734374, 1.429119384765625, 1.4510526123046874, 1.4428548583984375, 1.451546875, 1.446393798828125, 1.4404183349609374, 1.436274169921875, 1.4305079345703124, 1.4303070068359376]",tokens/s,43.7741772354431,kWh,4.1650047833010315e-05,4.593608544096187e-06,1.9363662042641944e-05,6.560731841974845e-05,tokens/kWh,960258.7259691501,,s,630,14.387594770431514,0.022837452016557968,0.00041325239567124335,0.022740240097045897,0.023255177307128908,0.023431049442291258,0.024598996791839602,"[0.022407136917114257, 0.022738687515258788, 0.022823680877685548, 0.022595552444458007, 0.022511104583740234, 0.022600223541259765, 0.022747135162353514, 0.02258470344543457, 0.022624895095825194, 0.022541664123535157, 0.02248678398132324, 0.022606752395629884, 0.02269935989379883, 0.022724767684936524, 0.02263609504699707, 0.022932416915893556, 0.022819967269897462, 0.022563327789306642, 0.022473087310791017, 0.022530176162719726, 0.02250124740600586, 0.02255462455749512, 0.02263859176635742, 0.0227061767578125, 0.0229171199798584, 0.02314963150024414, 0.02295097541809082, 0.02312588882446289, 0.023349248886108398, 0.02303385543823242, 0.022706016540527344, 0.02263055992126465, 0.022732288360595702, 0.022540128707885743, 0.022592159271240236, 0.022673280715942382, 0.02251750373840332, 0.02258729553222656, 0.02250595283508301, 0.022536191940307617, 0.02269388771057129, 0.022703487396240233, 0.022530559539794923, 0.02243561553955078, 0.022535680770874023, 0.02372489547729492, 0.024386655807495116, 0.022791007995605468, 0.022672544479370116, 0.022757408142089843, 0.022670207977294923, 0.02261974334716797, 0.02258367919921875, 0.022464351654052736, 0.0241276798248291, 0.022640640258789063, 0.022769664764404295, 0.022656383514404296, 0.022807167053222655, 0.02272368049621582, 0.02286089515686035, 0.022795551300048827, 0.02272483253479004, 0.022463455200195312, 0.022514944076538087, 0.022600128173828126, 0.022587711334228516, 0.022475872039794922, 0.02249616050720215, 0.022583295822143554, 0.022593055725097656, 0.02244985580444336, 0.022547231674194337, 0.022452224731445314, 0.0229881591796875, 0.02247929573059082, 0.022503616333007813, 0.022603776931762694, 0.02247270393371582, 0.022441375732421876, 0.022451839447021484, 0.022639583587646485, 0.022459680557250977, 0.022562559127807618, 0.022411359786987304, 0.022594335556030274, 0.022527072906494142, 0.023577184677124025, 0.024560031890869142, 0.02325872039794922, 0.02277827262878418, 0.02287001609802246, 0.022657024383544923, 0.022634368896484375, 0.022679040908813477, 0.022577791213989257, 0.022614015579223632, 0.02411017608642578, 0.022764448165893555, 0.022755199432373047, 0.022726783752441405, 0.022540288925170897, 0.022503103256225586, 0.022458688735961914, 0.022722560882568358, 0.022786048889160155, 0.022536191940307617, 0.022486560821533202, 0.02242198371887207, 0.022611967086791994, 0.02269593620300293, 0.022684768676757814, 0.022575807571411134, 0.022601408004760744, 0.02271286392211914, 0.02268079948425293, 0.02251580810546875, 0.022518463134765625, 0.022513439178466797, 0.022624479293823243, 0.022569984436035157, 0.022772735595703125, 0.02276118469238281, 0.022532384872436525, 0.02282700729370117, 0.02265894317626953, 0.022624799728393555, 0.0227772159576416, 0.02256528091430664, 0.022556032180786133, 0.022993247985839845, 0.02262646484375, 0.02277731132507324, 0.02260799980163574, 0.02254310417175293, 0.0226507511138916, 0.022507648468017578, 0.022562816619873048, 0.02241539192199707, 0.025847776412963867, 0.022953983306884765, 0.022497215270996095, 0.022528064727783202, 0.02244812774658203, 0.022621919631958008, 0.022523967742919922, 0.022403295516967774, 0.022535743713378905, 0.022706623077392577, 0.022529983520507814, 0.022413375854492188, 0.022488096237182616, 0.02254742431640625, 0.02271820831298828, 0.022708480834960937, 0.022558719635009765, 0.022845439910888672, 0.02299212837219238, 0.022985151290893555, 0.02314681625366211, 0.02274492835998535, 0.022904928207397462, 0.02317932891845703, 0.02335273551940918, 0.02340127944946289, 0.02369308853149414, 0.02330156707763672, 0.023179840087890625, 0.023310176849365233, 0.023406528472900392, 0.02466633605957031, 0.02553241539001465, 0.023427072525024413, 0.02332467269897461, 0.02371583938598633, 0.023381120681762697, 0.02332147216796875, 0.023359487533569336, 0.02327347183227539, 0.02305638313293457, 0.02315673637390137, 0.022980607986450196, 0.023201791763305665, 0.023297632217407226, 0.023073183059692384, 0.022961408615112304, 0.02292108726501465, 0.023008127212524414, 0.023162879943847657, 0.022878911972045897, 0.023035903930664063, 0.023149599075317384, 0.02296112060546875, 0.022750944137573243, 0.022864160537719728, 0.022853631973266602, 0.022785472869873046, 0.022930240631103514, 0.022834943771362304, 0.02286992073059082, 0.02272982406616211, 0.022745567321777342, 0.02273948860168457, 0.022810720443725587, 0.02275017547607422, 0.02269683265686035, 0.022665279388427734, 0.022804128646850587, 0.02288470458984375, 0.022863872528076173, 0.022848831176757813, 0.022831104278564454, 0.022721216201782225, 0.022803936004638672, 0.022800928115844728, 0.022716255187988282, 0.022804224014282226, 0.02270044708251953, 0.022749183654785156, 0.022837247848510742, 0.0229171199798584, 0.02306662368774414, 0.0228351993560791, 0.02291302490234375, 0.022755327224731444, 0.022675455093383787, 0.022771263122558595, 0.022630239486694338, 0.022710880279541015, 0.02588857650756836, 0.023270687103271483, 0.023145248413085937, 0.02287424087524414, 0.023279167175292968, 0.02289504051208496, 0.022962175369262695, 0.022947839736938477, 0.02288025665283203, 0.023117824554443358, 0.022841344833374022, 0.023188703536987303, 0.022886367797851564, 0.02288928031921387, 0.022769760131835938, 0.022800159454345704, 0.022798240661621092, 0.02273695945739746, 0.022755008697509765, 0.022731231689453124, 0.022681472778320312, 0.02276313591003418, 0.022704479217529296, 0.02298307228088379, 0.023236480712890625, 0.02300864028930664, 0.02296486473083496, 0.022880640029907227, 0.023076864242553712, 0.022941503524780273, 0.022769855499267577, 0.022831104278564454, 0.02295804786682129, 0.024036895751953124, 0.02324496078491211, 0.02289084815979004, 0.023017311096191408, 0.02300739288330078, 0.02295964813232422, 0.022729183197021486, 0.022793664932250976, 0.022721023559570314, 0.022695968627929688, 0.022743072509765625, 0.02279209518432617, 0.02290390396118164, 0.022946399688720705, 0.02284979248046875, 0.022835264205932616, 0.02273206329345703, 0.023003360748291016, 0.022998687744140624, 0.02307148742675781, 0.022993087768554688, 0.02323036766052246, 0.02327356719970703, 0.022871679306030273, 0.022890880584716798, 0.022838911056518554, 0.022935935974121094, 0.02304819107055664, 0.022943584442138672, 0.02303932762145996, 0.022964832305908203, 0.023090944290161133, 0.02324937629699707, 0.023379072189331055, 0.023434303283691407, 0.023205696105957033, 0.023136095046997072, 0.023091360092163084, 0.023064031600952148, 0.023112096786499024, 0.023021024703979494, 0.02316464042663574, 0.02315340805053711, 0.023148735046386718, 0.022980607986450196, 0.023238304138183594, 0.022899040222167967, 0.02326643180847168, 0.023016319274902344, 0.023029472351074217, 0.023213472366333008, 0.023292800903320313, 0.023197696685791015, 0.023254783630371093, 0.023392223358154298, 0.0231461124420166, 0.023152448654174804, 0.023012447357177734, 0.022953727722167968, 0.024614912033081054, 0.02323865509033203, 0.023339008331298827, 0.023226367950439454, 0.02315590476989746, 0.02269660758972168, 0.02261417579650879, 0.022831104278564454, 0.02277168083190918, 0.02261199951171875, 0.02253004837036133, 0.022619359970092772, 0.022501535415649414, 0.022637184143066407, 0.0224451847076416, 0.022616960525512694, 0.02307276725769043, 0.023082271575927734, 0.022919456481933595, 0.02285817527770996, 0.022787200927734376, 0.02287295913696289, 0.022640640258789063, 0.023117824554443358, 0.022733823776245117, 0.022597696304321287, 0.022862783432006838, 0.022623552322387695, 0.022702783584594727, 0.022746368408203124, 0.02274348831176758, 0.023081024169921874, 0.022534400939941406, 0.022618112564086915, 0.022425184249877928, 0.022721120834350586, 0.022442943572998048, 0.022440256118774413, 0.023393888473510743, 0.02252662467956543, 0.022550848007202147, 0.02333695983886719, 0.023077951431274415, 0.022915136337280272, 0.02262841606140137, 0.022571840286254884, 0.022991968154907227, 0.023557024002075197, 0.023440671920776368, 0.02377948760986328, 0.0239968318939209, 0.02384092712402344, 0.023685056686401366, 0.02337183952331543, 0.022970367431640625, 0.022760831832885742, 0.022667903900146485, 0.02261020851135254, 0.022978559494018554, 0.02320115280151367, 0.023087007522583008, 0.022902624130249023, 0.02289955139160156, 0.02304732894897461, 0.02323747253417969, 0.0227587833404541, 0.02275600051879883, 0.022773759841918945, 0.022751232147216797, 0.022937599182128905, 0.022796287536621093, 0.02309868812561035, 0.02379961585998535, 0.023217023849487303, 0.023254655838012697, 0.023240928649902345, 0.023537824630737305, 0.02347001647949219, 0.023275583267211915, 0.023177215576171875, 0.022796287536621093, 0.022724191665649415, 0.02258780860900879, 0.02294937515258789, 0.022656639099121093, 0.022519712448120118, 0.022725183486938475, 0.022487455368041993, 0.022884416580200195, 0.02305836868286133, 0.023044095993041993, 0.0231014404296875, 0.022732799530029296, 0.022773759841918945, 0.02267955207824707, 0.02247475242614746, 0.0224399356842041, 0.022550527572631835, 0.02247065544128418, 0.022495231628417968, 0.022441471099853515, 0.022591007232666015, 0.022679616928100586, 0.023208864212036134, 0.023182464599609376, 0.022620512008666993, 0.022638208389282228, 0.023622560501098632, 0.022513151168823242, 0.022616575241088867, 0.022562816619873048, 0.02278188705444336, 0.022599519729614256, 0.022761695861816405, 0.02271628761291504, 0.0226910400390625, 0.02317366409301758, 0.02264035224914551, 0.02254217529296875, 0.022545215606689453, 0.022417407989501953, 0.022417760848999022, 0.022588607788085937, 0.02262268829345703, 0.022380767822265626, 0.022500736236572266, 0.02242176055908203, 0.02282124710083008, 0.02305001640319824, 0.022845439910888672, 0.02315673637390137, 0.023357440948486328, 0.02319683265686035, 0.022862688064575195, 0.02294988822937012, 0.0226694393157959, 0.02250534439086914, 0.02250547218322754, 0.022498559951782227, 0.022440704345703125, 0.022474079132080077, 0.02252047920227051, 0.022502399444580077, 0.022614879608154295, 0.022539583206176758, 0.02258211135864258, 0.022633920669555663, 0.02272483253479004, 0.02393350410461426, 0.024034143447875977, 0.022805440902709962, 0.02285158348083496, 0.022775808334350587, 0.02288761520385742, 0.022752031326293946, 0.02299295997619629, 0.022578752517700196, 0.02279052734375, 0.02307075119018555, 0.02318889617919922, 0.02286569595336914, 0.022697792053222657, 0.022932479858398438, 0.022474176406860353, 0.0225118408203125, 0.022571359634399414, 0.022718463897705078, 0.022473791122436523, 0.02267616081237793, 0.02261395263671875, 0.02279846382141113, 0.023103679656982422, 0.022730752944946288, 0.022688959121704103, 0.02278384017944336, 0.023028703689575197, 0.023029760360717775, 0.022718463897705078, 0.022980607986450196, 0.02336467170715332, 0.023022016525268554, 0.022985183715820312, 0.02272662353515625, 0.022693920135498046, 0.02260531234741211, 0.022530208587646483, 0.02261622428894043, 0.02260348892211914, 0.022743999481201174, 0.02265875244140625, 0.023810335159301758, 0.024748064041137694, 0.02292950439453125, 0.022824415206909178, 0.022770111083984374, 0.022875776290893556, 0.022673791885375976, 0.022796287536621093, 0.022609855651855467, 0.022707712173461913, 0.023822463989257813, 0.023067071914672853, 0.02264678382873535, 0.02253379249572754, 0.022455648422241212, 0.022443008422851563, 0.022396160125732423, 0.022386560440063475, 0.02255961608886719, 0.022427648544311524, 0.02257289505004883, 0.022431455612182617, 0.02262633514404297, 0.022487455368041993, 0.0225218563079834, 0.022511615753173828, 0.02255462455749512, 0.02268345642089844, 0.022819007873535156, 0.02263859176635742, 0.022622207641601562, 0.022479103088378905, 0.02255641555786133, 0.022740991592407226, 0.02252783966064453, 0.02250921630859375, 0.022426111221313477, 0.022562240600585936, 0.022599327087402345, 0.02242243194580078, 0.022527999877929687, 0.022598783493041993, 0.02292550468444824, 0.022761503219604493, 0.022528415679931642, 0.022960384368896483, 0.0224849910736084, 0.022642688751220705, 0.022656320571899414, 0.022713024139404295, 0.022680639266967773, 0.022805248260498047, 0.022603296279907228, 0.022523679733276368, 0.02266579246520996, 0.022481216430664062, 0.02253206443786621, 0.022802560806274415, 0.02263654327392578, 0.022552032470703125, 0.022509727478027344, 0.022488639831542968, 0.02245622444152832, 0.02265590476989746, 0.022562816619873048, 0.02260201644897461, 0.02274412727355957, 0.02252217674255371, 0.022538400650024413, 0.02270191955566406, 0.022970464706420897, 0.022735103607177735, 0.022717920303344727, 0.022430240631103517, 0.022487039566040038, 0.022874111175537108, 0.02327142333984375, 0.023277759552001953, 0.02285548782348633, 0.0232992000579834, 0.022801279067993164, 0.023037311553955078, 0.022893184661865233, 0.02309734344482422, 0.024930303573608398, 0.023037952423095705, 0.022843616485595703, 0.022810400009155272, 0.022675455093383787, 0.022595264434814452, 0.02253036880493164, 0.022668928146362306, 0.02262054443359375, 0.022724607467651366, 0.022552576065063477, 0.022366207122802736, 0.023994239807128906, 0.022490304946899416, 0.022397247314453125, 0.022405759811401367, 0.022525056838989258, 0.022395776748657226, 0.02240015983581543, 0.02239164733886719, 0.02243142318725586, 0.022450336456298826, 0.022371999740600584, 0.022433696746826173, 0.022476383209228516, 0.022395904541015626, 0.02242355155944824, 0.02238172721862793, 0.022416223526000978, 0.022614015579223632, 0.022566944122314452, 0.022752704620361327, 0.022630527496337892, 0.022433984756469728, 0.022419679641723634]",tokens/s,43.78772199608627,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2210.312192,2271.084544,0.0,1868.562432,1779.722752,s,1,8.8734150390625,8.8734150390625,0.0,8.8734150390625,8.8734150390625,8.8734150390625,8.8734150390625,[8.8734150390625],,kWh,5.555162824166473e-05,6.120300259281184e-06,1.7204735985987263e-05,7.887666448693318e-05,,MB,2291.687424,2378.039296,0.0,1960.83712,1916.447232,s,10,0.9602471694946287,0.09602471694946288,0.0005314548679986565,0.09622380828857421,0.09644146041870116,0.09657058753967285,0.09667388923645019,"[0.09474278259277344, 0.09572444915771484, 0.09566486358642579, 0.09582342529296875, 0.09621011352539062, 0.09623750305175781, 0.09640054321289063, 0.09641276550292968, 0.09669971466064453, 0.09633100891113282]",tokens/s,2665.98026146467,kWh,2.8034591516668823e-06,3.0917322925711184e-07,1.8717634021714865e-06,4.98439578309548e-06,tokens/kWh,51360287.41301423,MB,2296.0128,2545.811456,0.0,2128.60928,2001.572864,s,10,18.213513549804688,1.8213513549804687,0.20169815502324753,1.9585209350585937,1.992731689453125,1.9977705688476561,2.001801672363281,"[1.5250811767578125, 1.5233592529296875, 1.527644775390625, 1.788803466796875, 1.965163818359375, 1.9588001708984375, 1.971997802734375, 1.9916119384765625, 2.0028094482421874, 1.95824169921875]",tokens/s,34.589701667241236,kWh,4.4147953917500264e-05,4.869201887553396e-06,2.447522989762857e-05,7.349238570268222e-05,tokens/kWh,857231.662812937,,s,630,18.210598930358877,0.028905712587871245,0.003399946626746792,0.03098747253417969,0.03175563201904297,0.031974277210235595,0.03289979751586914,"[0.024432640075683593, 0.02431715202331543, 0.024183807373046876, 0.024159423828125, 0.023928672790527343, 0.02410963249206543, 0.02396793556213379, 0.024219968795776366, 0.02398585510253906, 0.023945215225219727, 0.023994367599487306, 0.024037376403808593, 0.024006656646728516, 0.023961599349975587, 0.024004608154296874, 0.02421753692626953, 0.023971168518066407, 0.024091360092163085, 0.024004608154296874, 0.024145919799804686, 0.024253887176513673, 0.02430828857421875, 0.024240127563476564, 0.024252416610717774, 0.024284160614013672, 0.024158720016479493, 0.024111072540283204, 0.02411369514465332, 0.024025087356567384, 0.024183807373046876, 0.024017568588256835, 0.025530303955078125, 0.024197536468505858, 0.024426143646240236, 0.02456611251831055, 0.024473567962646485, 0.024356576919555666, 0.024426816940307617, 0.024223743438720705, 0.024180736541748047, 0.02407993507385254, 0.02425187110900879, 0.024125888824462892, 0.02419868850708008, 0.02412851142883301, 0.02406809616088867, 0.02413363265991211, 0.024045568466186523, 0.0240897274017334, 0.02407823944091797, 0.02402195167541504, 0.024131519317626953, 0.024074335098266602, 0.024215551376342775, 0.024197120666503907, 0.024203264236450195, 0.024325439453125, 0.024324800491333006, 0.024817663192749022, 0.02427903938293457, 0.024367103576660155, 0.024270784378051757, 0.0243569278717041, 0.02420569610595703, 0.024240480422973634, 0.024311872482299806, 0.02417024040222168, 0.023914751052856446, 0.023992095947265625, 0.02406012725830078, 0.024186880111694335, 0.024018943786621092, 0.02426371192932129, 0.024218208312988283, 0.024062463760375977, 0.02409359931945801, 0.023980703353881836, 0.02397369575500488, 0.024051904678344727, 0.023990463256835938, 0.024225439071655273, 0.024142303466796876, 0.02411110305786133, 0.024387584686279298, 0.024563711166381837, 0.024560863494873048, 0.024257312774658202, 0.02416815948486328, 0.024152095794677735, 0.02412774467468262, 0.02425651168823242, 0.024061952590942383, 0.023980031967163085, 0.023941120147705077, 0.023944799423217773, 0.023886144638061522, 0.02394291114807129, 0.02390998458862305, 0.024319936752319336, 0.02409760093688965, 0.024002559661865236, 0.02392678451538086, 0.024006656646728516, 0.023942367553710937, 0.023942975997924804, 0.023894752502441406, 0.023965951919555663, 0.023857152938842774, 0.023928800582885743, 0.023896095275878906, 0.024236032485961914, 0.024371135711669923, 0.024469024658203126, 0.024418495178222657, 0.024330015182495116, 0.02525651168823242, 0.024612224578857422, 0.024295200347900392, 0.02419798469543457, 0.023953407287597657, 0.023957504272460937, 0.02395136070251465, 0.023918048858642578, 0.023980575561523436, 0.025159519195556642, 0.025847455978393555, 0.024130815505981444, 0.024210176467895507, 0.024017951965332032, 0.024083423614501952, 0.023903392791748048, 0.024056671142578125, 0.023975839614868166, 0.02417807960510254, 0.023931583404541015, 0.023975616455078126, 0.024086175918579103, 0.02433910369873047, 0.024631103515625, 0.024463552474975586, 0.024184383392333985, 0.024381887435913085, 0.024518655776977538, 0.02455548858642578, 0.0246661434173584, 0.024532127380371093, 0.024277599334716796, 0.024125696182250977, 0.02405887985229492, 0.02394576072692871, 0.02397161674499512, 0.024043296813964842, 0.02409062385559082, 0.02425324821472168, 0.02448307228088379, 0.024447839736938478, 0.024551424026489257, 0.024669727325439452, 0.024561759948730468, 0.024570240020751952, 0.024434688568115235, 0.02405705642700195, 0.024171295166015624, 0.024120864868164064, 0.024132064819335938, 0.024334495544433593, 0.024008544921875, 0.02399558448791504, 0.023927616119384765, 0.02390822410583496, 0.02404159927368164, 0.023942495346069338, 0.024076383590698244, 0.023982912063598632, 0.02391609573364258, 0.024676544189453125, 0.027428640365600585, 0.0244564151763916, 0.02408345603942871, 0.02402899169921875, 0.02403055953979492, 0.023999135971069337, 0.02395359992980957, 0.024023040771484375, 0.024133184432983398, 0.02405830383300781, 0.024403968811035157, 0.024129535675048826, 0.024049663543701173, 0.02424403190612793, 0.02414031982421875, 0.024271007537841796, 0.024223743438720705, 0.024172096252441405, 0.02414022445678711, 0.024027135848999022, 0.02410086441040039, 0.02393017578125, 0.023994560241699218, 0.02400921630859375, 0.024123392105102538, 0.023998464584350586, 0.02395136070251465, 0.023971839904785155, 0.023992319107055664, 0.023941120147705077, 0.024025087356567384, 0.02393087959289551, 0.025308479309082033, 0.0241177921295166, 0.023984287261962892, 0.023941120147705077, 0.023923776626586915, 0.028683296203613283, 0.02908355140686035, 0.03116851234436035, 0.03103721618652344, 0.03120560073852539, 0.031318016052246093, 0.03093903923034668, 0.031342144012451174, 0.031701120376586914, 0.030986656188964845, 0.030851072311401367, 0.030899295806884764, 0.031191104888916014, 0.03123641586303711, 0.031074848175048828, 0.031045120239257814, 0.0309682559967041, 0.031002687454223632, 0.03119308853149414, 0.03122777557373047, 0.031068288803100585, 0.031020160675048827, 0.031011423110961913, 0.031080608367919923, 0.031549568176269534, 0.03162460708618164, 0.031064640045166014, 0.03121513557434082, 0.031209888458251952, 0.03121561622619629, 0.0314102725982666, 0.03139695930480957, 0.03112335968017578, 0.030895103454589845, 0.030953023910522463, 0.03112588882446289, 0.0309105281829834, 0.031080448150634765, 0.03095756721496582, 0.034810367584228515, 0.03118479919433594, 0.031010368347167968, 0.031063007354736327, 0.03121151924133301, 0.0311910400390625, 0.030944480895996093, 0.031009567260742187, 0.030895103454589845, 0.031153152465820313, 0.031180192947387695, 0.030956127166748046, 0.030891679763793947, 0.03095792007446289, 0.03094691276550293, 0.03140854454040527, 0.031021055221557618, 0.031033344268798828, 0.031064064025878906, 0.031045631408691408, 0.03438310241699219, 0.032406272888183596, 0.03131324768066406, 0.03239593505859375, 0.031070207595825194, 0.030947328567504883, 0.03082582473754883, 0.031107744216918944, 0.030935039520263673, 0.03130121612548828, 0.030996671676635744, 0.030859487533569336, 0.030914560317993164, 0.03082444763183594, 0.030898176193237304, 0.03093891143798828, 0.030933216094970704, 0.03099852752685547, 0.030873600006103515, 0.031123455047607423, 0.030871551513671876, 0.03154438400268555, 0.031189952850341797, 0.031148031234741212, 0.030932159423828126, 0.030946111679077147, 0.031039392471313477, 0.03080953598022461, 0.030900896072387694, 0.03114166450500488, 0.030818527221679687, 0.031301631927490234, 0.031163679122924805, 0.03132489585876465, 0.030936159133911133, 0.031359071731567385, 0.030864191055297852, 0.030911680221557616, 0.030937215805053712, 0.030928800582885742, 0.030946016311645508, 0.03090230369567871, 0.030947328567504883, 0.030955488204956055, 0.030947328567504883, 0.030869504928588868, 0.03100828742980957, 0.03087958335876465, 0.03135087966918945, 0.030967456817626953, 0.03136396789550781, 0.03096575927734375, 0.03105936050415039, 0.031208032608032225, 0.030963712692260743, 0.03103539276123047, 0.03151872062683105, 0.03109622383117676, 0.03116873550415039, 0.031080831527709962, 0.03137104034423828, 0.031381727218627926, 0.03128678321838379, 0.031150592803955077, 0.03146096038818359, 0.03106857681274414, 0.03085011291503906, 0.031036256790161133, 0.031077951431274415, 0.030927295684814452, 0.03140208053588867, 0.03163955116271973, 0.031016960144042968, 0.030818239212036132, 0.030924575805664063, 0.030841119766235353, 0.030912511825561522, 0.03079113578796387, 0.030926368713378907, 0.030952064514160157, 0.031008960723876954, 0.030939327239990235, 0.030830591201782227, 0.03090345573425293, 0.031001440048217775, 0.031054048538208007, 0.031099712371826172, 0.03102614402770996, 0.03176038360595703, 0.030875200271606444, 0.030883615493774413, 0.031002464294433593, 0.03105011177062988, 0.03090598487854004, 0.030898176193237304, 0.030882400512695314, 0.03105990409851074, 0.03125609588623047, 0.0310828800201416, 0.030996864318847656, 0.03132928085327148, 0.03169484710693359, 0.031034048080444337, 0.030922079086303712, 0.030968799591064453, 0.03178838348388672, 0.031049983978271484, 0.03105948829650879, 0.0309039363861084, 0.030976512908935546, 0.030832735061645508, 0.030846080780029296, 0.03091094398498535, 0.031199647903442384, 0.031055871963500976, 0.03119513511657715, 0.03138067245483398, 0.031390527725219726, 0.031348512649536135, 0.031150304794311523, 0.03101865577697754, 0.0312891845703125, 0.030992895126342773, 0.03125657653808594, 0.031040704727172852, 0.03132921600341797, 0.03121958351135254, 0.03095961570739746, 0.031281152725219724, 0.03147987174987793, 0.03124019241333008, 0.03123142433166504, 0.03161039924621582, 0.03127782440185547, 0.031020992279052733, 0.031163936614990236, 0.031200000762939453, 0.031092575073242187, 0.031008832931518553, 0.0312520637512207, 0.03127142333984375, 0.0313507194519043, 0.03128121566772461, 0.031174047470092774, 0.031222368240356447, 0.031344224929809573, 0.03160105514526367, 0.0317704963684082, 0.031557695388793945, 0.03190947151184082, 0.03179529571533203, 0.03164169692993164, 0.03134275245666504, 0.03138291168212891, 0.03182054328918457, 0.03151667213439941, 0.03152246475219726, 0.03170556831359863, 0.03141353607177735, 0.0313286075592041, 0.031025407791137695, 0.03134873580932617, 0.031119359970092773, 0.03115827178955078, 0.03142860794067383, 0.03127705574035645, 0.031520639419555664, 0.03163475227355957, 0.03184067153930664, 0.03191116714477539, 0.03183200073242187, 0.03155753517150879, 0.03135990333557129, 0.031148031234741212, 0.031035200119018554, 0.0326995849609375, 0.033890655517578125, 0.031377344131469725, 0.031523551940917965, 0.03181324768066406, 0.03174233627319336, 0.0313753604888916, 0.03130748748779297, 0.031136383056640626, 0.03166172790527344, 0.03146956825256347, 0.03316940689086914, 0.032129024505615236, 0.031868160247802736, 0.031548160552978516, 0.03225711822509766, 0.03170393562316894, 0.03183996772766113, 0.031412544250488283, 0.03129689598083496, 0.030982784271240234, 0.03110220718383789, 0.03162393569946289, 0.03121971130371094, 0.031236095428466795, 0.03127827262878418, 0.03137801551818847, 0.03195107269287109, 0.031968416213989256, 0.03178780746459961, 0.03180710411071777, 0.03192630386352539, 0.03141059112548828, 0.03178700828552246, 0.031016960144042968, 0.030893695831298827, 0.031000959396362306, 0.031878656387329105, 0.03124393653869629, 0.030984767913818358, 0.03143708801269531, 0.03170857620239258, 0.03292835235595703, 0.031964479446411134, 0.031554239273071286, 0.03143475151062012, 0.031340543746948245, 0.03142204856872559, 0.031365535736083985, 0.031315359115600586, 0.03171711921691894, 0.03126144027709961, 0.03126896095275879, 0.031311071395874024, 0.031828767776489256, 0.03155353546142578, 0.03134668731689453, 0.0317542724609375, 0.031457279205322264, 0.031178655624389647, 0.03150243186950684, 0.03111523246765137, 0.031217695236206055, 0.03122310447692871, 0.031197887420654297, 0.031110687255859373, 0.03139955139160156, 0.031755104064941406, 0.03195462417602539, 0.03214736175537109, 0.03304841613769531, 0.03401529693603516, 0.03281356811523437, 0.03228828811645508, 0.03238479995727539, 0.032191070556640625, 0.032145503997802735, 0.03214950561523437, 0.03199756813049316, 0.03204719924926758, 0.03185795211791992, 0.031924352645874024, 0.031959039688110355, 0.03158233642578125, 0.03155430412292481, 0.03141529655456543, 0.03132943916320801, 0.03148950386047363, 0.03211008071899414, 0.031979711532592776, 0.03153990364074707, 0.031524864196777344, 0.032, 0.03197952079772949, 0.031845727920532224, 0.03162179183959961, 0.03163862419128418, 0.03159942436218262, 0.031893535614013674, 0.03190377616882324, 0.03172969627380371, 0.031526912689208986, 0.032069278717041017, 0.03227888107299805, 0.032271713256835935, 0.03204390335083008, 0.03232745742797852, 0.031959039688110355, 0.03171043205261231, 0.03165056037902832, 0.03190294456481933, 0.03170182418823242, 0.03156172752380371, 0.03165305519104004, 0.031841087341308597, 0.031255935668945314, 0.031142528533935548, 0.031066112518310547, 0.03095529556274414, 0.03104115104675293, 0.03114089584350586, 0.031029632568359375, 0.030808095932006837, 0.031232576370239257, 0.031098400115966797, 0.031119840621948242, 0.031033344268798828, 0.031148031234741212, 0.03197907257080078, 0.03282988739013672, 0.031344127655029294, 0.0313568000793457, 0.03123664093017578, 0.031204864501953124, 0.030898656845092774, 0.0311092472076416, 0.031036672592163087, 0.03099110412597656, 0.03129958343505859, 0.030988288879394532, 0.03099139213562012, 0.030943519592285158, 0.030890687942504883, 0.03081769561767578, 0.03092131233215332, 0.03092265510559082, 0.030775392532348633, 0.03096780776977539, 0.03123184013366699, 0.031404191970825196, 0.03158569526672363, 0.030968223571777344, 0.03080825614929199, 0.030858400344848633, 0.030851680755615233, 0.030828800201416016, 0.030914560317993164, 0.031127359390258787, 0.031060159683227537, 0.0310435848236084, 0.03079743957519531, 0.03110335922241211, 0.0311910400390625, 0.031162368774414063, 0.03121561622619629, 0.031010368347167968, 0.031000736236572266, 0.030801183700561525, 0.031054624557495115, 0.03097007942199707, 0.030926015853881834, 0.03087443161010742, 0.030869440078735353, 0.03088595199584961, 0.03084492874145508, 0.0309935359954834, 0.030753664016723633, 0.031047679901123046, 0.031143583297729493, 0.030865760803222658, 0.031006719589233397, 0.03159823989868164, 0.031050079345703124]",tokens/s,34.595237773850876,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1584.709632,1604.190208,0.0,1201.668096,1189.151232,s,1,8.7103603515625,8.7103603515625,0.0,8.7103603515625,8.7103603515625,8.7103603515625,8.7103603515625,[8.7103603515625],,kWh,3.802293766247355e-05,4.1867660681864155e-06,1.1953620673993415e-05,5.416332440465338e-05,,MB,1604.210688,1801.322496,0.0,1384.12032,1351.367168,s,10,0.4132649230957031,0.04132649230957031,0.00020603983807786284,0.041266910552978514,0.04145219039916992,0.04167006301879883,0.04184436111450195,"[0.041887935638427735, 0.041223136901855466, 0.04118329620361328, 0.04140377426147461, 0.041241630554199216, 0.041363391876220706, 0.04117472076416016, 0.04113151931762695, 0.04129219055175781, 0.041363327026367185]",tokens/s,6194.573642552189,kWh,1.2591889369791742e-06,1.388665947227741e-07,8.346187232500409e-07,2.2326742549519895e-06,tokens/kWh,114660703.15998915,MB,1611.264,1843.265536,0.0,1426.06336,1407.548416,s,10,10.733422241210937,1.0733422241210937,0.0048307207519187245,1.0739862060546874,1.0776815185546875,1.0794766845703125,1.0809128173828126,"[1.0616793212890625, 1.07112255859375, 1.0735338134765624, 1.0744385986328124, 1.0812718505859376, 1.0722564697265624, 1.07509765625, 1.0772825927734375, 1.0754315185546874, 1.071307861328125]",tokens/s,58.695165981742264,kWh,3.126345105843362e-05,3.4478815412109768e-06,1.590875578254956e-05,5.0620088382194155e-05,tokens/kWh,1244565.1916751796,,s,630,10.729833663940438,0.01703148200625465,0.0003127795947548861,0.016963775634765622,0.017254445075988768,0.01743060960769653,0.0182112064743042,"[0.017252735137939453, 0.01695510482788086, 0.01686899185180664, 0.016828895568847656, 0.01681817626953125, 0.016928768157958983, 0.01683046340942383, 0.01672150421142578, 0.01678316879272461, 0.016896608352661133, 0.016786687850952147, 0.016853759765625, 0.016922399520874022, 0.017352863311767577, 0.01684486389160156, 0.01675619125366211, 0.0168371524810791, 0.01679769515991211, 0.016898080825805663, 0.0168570556640625, 0.016867328643798828, 0.01681203269958496, 0.016749664306640624, 0.016808319091796874, 0.016775711059570313, 0.01677926445007324, 0.016782783508300782, 0.016773279190063477, 0.01687196731567383, 0.016904064178466797, 0.016936927795410155, 0.01684230422973633, 0.01676131248474121, 0.01683417510986328, 0.016761215209960937, 0.016766016006469726, 0.01681292724609375, 0.016769088745117188, 0.0167642879486084, 0.016806528091430663, 0.016785408020019533, 0.016786495208740235, 0.01683990478515625, 0.016803552627563476, 0.01674345588684082, 0.016850080490112305, 0.016930303573608398, 0.01683488082885742, 0.016932735443115233, 0.017023103713989258, 0.01681376075744629, 0.017058111190795897, 0.016691200256347655, 0.016781312942504883, 0.0168143367767334, 0.01682419204711914, 0.016805599212646485, 0.01682579231262207, 0.016793472290039062, 0.01687049674987793, 0.016875328063964842, 0.016971616744995116, 0.016830528259277344, 0.017288896560668947, 0.01726131248474121, 0.017272607803344726, 0.016871936798095705, 0.016836448669433592, 0.016969120025634766, 0.01723423957824707, 0.017096351623535157, 0.01707241630554199, 0.016941280364990235, 0.016910207748413085, 0.01680726432800293, 0.01693516731262207, 0.016874015808105467, 0.017059104919433594, 0.017238752365112305, 0.017297407150268555, 0.017168384552001953, 0.017199424743652342, 0.017033119201660157, 0.01703023910522461, 0.016990911483764647, 0.017098751068115235, 0.01704355239868164, 0.017055456161499023, 0.01692486381530762, 0.016973215103149412, 0.016857376098632814, 0.016894271850585937, 0.016898048400878905, 0.016953344345092772, 0.0169881591796875, 0.016957183837890626, 0.016867584228515625, 0.01698406410217285, 0.016914304733276368, 0.01718284797668457, 0.016977216720581053, 0.01695020866394043, 0.01690559959411621, 0.016836383819580077, 0.016996320724487306, 0.017138303756713866, 0.01684889602661133, 0.01721478462219238, 0.016955072402954102, 0.016837631225585938, 0.016910367965698243, 0.01683046340942383, 0.016787424087524414, 0.01680143928527832, 0.016867679595947267, 0.01679155158996582, 0.01702409553527832, 0.0168375358581543, 0.017043327331542967, 0.01717056083679199, 0.017188863754272463, 0.01703891181945801, 0.01698182487487793, 0.016935104370117186, 0.01701865577697754, 0.017014560699462892, 0.017869024276733397, 0.017099872589111328, 0.017021503448486328, 0.016914783477783205, 0.016906368255615235, 0.01683580780029297, 0.016935583114624023, 0.016940256118774415, 0.016841503143310548, 0.017022975921630858, 0.016893503189086913, 0.017139871597290038, 0.01686147117614746, 0.016838048934936522, 0.016863391876220702, 0.0167890567779541, 0.01688172721862793, 0.017081151962280272, 0.017149696350097655, 0.017020959854125977, 0.016949472427368165, 0.017089855194091796, 0.017064479827880858, 0.01707206344604492, 0.016918752670288088, 0.01690835189819336, 0.016904159545898436, 0.0168734073638916, 0.01684592056274414, 0.016876480102539063, 0.01683216094970703, 0.016925024032592773, 0.016877439498901366, 0.016840255737304688, 0.016841567993164063, 0.016983776092529296, 0.017051904678344727, 0.01703094482421875, 0.017259647369384765, 0.017220447540283203, 0.016930816650390625, 0.016947200775146484, 0.016835903167724608, 0.016831167221069337, 0.016955263137817384, 0.01762112045288086, 0.019949567794799804, 0.017156095504760743, 0.0170614070892334, 0.016925247192382812, 0.016885663986206053, 0.016948415756225587, 0.01694598388671875, 0.01690403175354004, 0.0169268798828125, 0.016900352478027344, 0.016968767166137697, 0.016957855224609374, 0.017131807327270508, 0.01708598327636719, 0.01718320083618164, 0.01698793601989746, 0.01699247932434082, 0.01698486328125, 0.017047552108764647, 0.016879552841186522, 0.01701593589782715, 0.016978879928588868, 0.01700044822692871, 0.01691414451599121, 0.016988447189331055, 0.01711471939086914, 0.01710326385498047, 0.01722265625, 0.017385951995849608, 0.01728268814086914, 0.017189855575561522, 0.017078208923339843, 0.016924768447875976, 0.016881471633911134, 0.01688947105407715, 0.016994783401489258, 0.01694063949584961, 0.016984479904174805, 0.016979007720947265, 0.017017824172973633, 0.016861152648925782, 0.01702400016784668, 0.01696870422363281, 0.016898048400878905, 0.01720924758911133, 0.01699446487426758, 0.017042943954467774, 0.016935007095336914, 0.017079999923706055, 0.016970048904418944, 0.017047679901123047, 0.016864864349365235, 0.017067935943603514, 0.017009056091308594, 0.016998880386352538, 0.01725129508972168, 0.01769923210144043, 0.018577888488769533, 0.017089536666870117, 0.01699500846862793, 0.016859455108642576, 0.01687071990966797, 0.01690880012512207, 0.016906431198120117, 0.016857311248779296, 0.016922592163085937, 0.017147008895874023, 0.016937664031982422, 0.017141759872436522, 0.018198528289794923, 0.01701478385925293, 0.01693062400817871, 0.016899648666381835, 0.01684543991088867, 0.016902143478393555, 0.01684889602661133, 0.01683660888671875, 0.016848480224609375, 0.016875936508178712, 0.016854719161987306, 0.016920576095581053, 0.01694246482849121, 0.01688368034362793, 0.01699292755126953, 0.017096704483032226, 0.01707827186584473, 0.01709014320373535, 0.017166751861572266, 0.017156095504760743, 0.017100799560546876, 0.016937152862548828, 0.016852895736694337, 0.016934816360473632, 0.016860191345214843, 0.016935903549194335, 0.01743436813354492, 0.017770015716552734, 0.017769184112548828, 0.017283071517944337, 0.017043455123901367, 0.016891904830932617, 0.016900096893310547, 0.017041023254394532, 0.016946592330932618, 0.016998783111572265, 0.016964191436767577, 0.01704550361633301, 0.017218784332275392, 0.017136768341064455, 0.01708201599121094, 0.01699648094177246, 0.016891775131225587, 0.016874879837036134, 0.017060031890869142, 0.016890304565429688, 0.01674809646606445, 0.017113536834716798, 0.017152000427246093, 0.017104192733764647, 0.01726323127746582, 0.0171397762298584, 0.017069311141967775, 0.01704217529296875, 0.019865791320800782, 0.01952339172363281, 0.01786684799194336, 0.01718400001525879, 0.017435327529907226, 0.01704751968383789, 0.01701593589782715, 0.016913280487060547, 0.01711257553100586, 0.01703196716308594, 0.01711471939086914, 0.017053184509277345, 0.01708518409729004, 0.016977792739868165, 0.016988000869750976, 0.017035680770874022, 0.01711692810058594, 0.016910335540771485, 0.016978015899658205, 0.01695359992980957, 0.017382495880126952, 0.017292160034179688, 0.017051679611206055, 0.017160192489624023, 0.017006303787231444, 0.0170534725189209, 0.016947103500366212, 0.016945760726928712, 0.016990207672119142, 0.016920576095581053, 0.016971168518066407, 0.017027904510498047, 0.01682329559326172, 0.016849695205688478, 0.016869375228881836, 0.017102304458618163, 0.016908832550048828, 0.01724006462097168, 0.016936960220336913, 0.016904191970825197, 0.017127424240112304, 0.018247264862060547, 0.018216384887695312, 0.01727574348449707, 0.017003904342651366, 0.017003263473510742, 0.017426015853881836, 0.01691484832763672, 0.016791679382324218, 0.016864896774291992, 0.017019136428833008, 0.01685443115234375, 0.0169007682800293, 0.01692665672302246, 0.01683046340942383, 0.01682204818725586, 0.01682454490661621, 0.016910560607910158, 0.018183967590332032, 0.016949247360229493, 0.01682841682434082, 0.01681407928466797, 0.01681817626953125, 0.01691596794128418, 0.017084672927856447, 0.016840959548950197, 0.01688528060913086, 0.016883935928344727, 0.016842111587524414, 0.01687612724304199, 0.01691062355041504, 0.01697171211242676, 0.01690630340576172, 0.016867328643798828, 0.016816415786743165, 0.01685798454284668, 0.016816160202026368, 0.016835391998291014, 0.01698739242553711, 0.016849824905395508, 0.016848703384399415, 0.017206367492675782, 0.0168703670501709, 0.017008256912231446, 0.016848512649536133, 0.016941055297851563, 0.017017824172973633, 0.016888992309570312, 0.017787519454956054, 0.01701683235168457, 0.016891904830932617, 0.0169881591796875, 0.017139711380004884, 0.01710665512084961, 0.01730793571472168, 0.017063936233520507, 0.01705727958679199, 0.017259008407592775, 0.01706991958618164, 0.01707638359069824, 0.017209344863891602, 0.017102848052978514, 0.017108160018920897, 0.017056480407714843, 0.01699440002441406, 0.017102176666259766, 0.01705232048034668, 0.017082368850708008, 0.017258176803588866, 0.017180992126464845, 0.017172479629516603, 0.01718272018432617, 0.016982015609741212, 0.01700454330444336, 0.017133567810058595, 0.017276159286499025, 0.016917024612426758, 0.017144031524658203, 0.016893951416015626, 0.01746678352355957, 0.017193567276000975, 0.017090112686157226, 0.016892127990722657, 0.016989599227905272, 0.0170579833984375, 0.017478271484375, 0.016932096481323242, 0.016916576385498046, 0.017014495849609373, 0.01705379295349121, 0.016904415130615233, 0.017201791763305663, 0.016990207672119142, 0.01704960060119629, 0.016893951416015626, 0.016900224685668944, 0.01678451156616211, 0.016845151901245116, 0.01696335983276367, 0.0168088321685791, 0.01679641532897949, 0.016976415634155275, 0.0168720645904541, 0.016991968154907226, 0.01701696014404297, 0.01705289649963379, 0.01717308807373047, 0.017047552108764647, 0.016967456817626955, 0.017158048629760742, 0.017403999328613282, 0.017170656204223634, 0.01700454330444336, 0.01702409553527832, 0.01692560005187988, 0.01702217674255371, 0.016925567626953124, 0.016888832092285155, 0.01693788719177246, 0.017151327133178712, 0.0170482234954834, 0.017036928176879882, 0.016922048568725586, 0.01689491271972656, 0.016903999328613282, 0.01687571144104004, 0.01698771286010742, 0.016914176940917968, 0.01694380760192871, 0.01681612777709961, 0.016939008712768554, 0.01723391914367676, 0.0170446720123291, 0.01702524757385254, 0.016988000869750976, 0.01685068893432617, 0.016870208740234375, 0.01688355255126953, 0.016838464736938476, 0.01682076835632324, 0.016901567459106447, 0.01694572830200195, 0.016904191970825197, 0.01698406410217285, 0.017031167984008787, 0.01705779266357422, 0.016996000289916994, 0.0169039363861084, 0.016965568542480467, 0.016929439544677734, 0.017306623458862306, 0.01991983985900879, 0.01752252769470215, 0.017381568908691407, 0.017254400253295898, 0.01721340751647949, 0.017159296035766602, 0.017089056015014648, 0.01714009666442871, 0.017118240356445314, 0.01722390365600586, 0.017424671173095704, 0.017756639480590822, 0.017092607498168946, 0.01700044822692871, 0.01693903923034668, 0.01703727912902832, 0.01700044822692871, 0.017117183685302736, 0.01729097557067871, 0.017171295166015624, 0.01781545639038086, 0.017098880767822264, 0.017005823135375978, 0.017039360046386717, 0.017135423660278322, 0.01708742332458496, 0.017534975051879884, 0.01782374382019043, 0.017473535537719728, 0.017509376525878906, 0.017314720153808593, 0.017483583450317385, 0.017171743392944337, 0.017271808624267578, 0.017176160812377928, 0.017122880935668945, 0.016992639541625977, 0.016986591339111328, 0.016982080459594727, 0.017157632827758788, 0.017111488342285156, 0.017022016525268555, 0.01694611167907715, 0.017277088165283203, 0.017059295654296876, 0.016963264465332032, 0.017027456283569335, 0.016916160583496095, 0.017156736373901367, 0.0168603515625, 0.016928863525390626, 0.016927040100097657, 0.016776832580566406, 0.016902015686035155, 0.016845056533813477, 0.01695939254760742, 0.018026399612426757, 0.016835071563720702, 0.01680624008178711, 0.016906240463256835, 0.016910335540771485, 0.01689299201965332, 0.016831552505493164, 0.017008512496948243, 0.01682022476196289, 0.016879615783691407, 0.016846656799316406, 0.01688595199584961, 0.016936128616333007, 0.016851776123046874, 0.01688175964355469, 0.016861087799072267, 0.016832639694213867, 0.016794527053833008, 0.017283103942871095, 0.016837568283081056, 0.016895103454589843, 0.017465568542480468, 0.016915103912353516, 0.01681817626953125, 0.016861183166503906, 0.016893951416015626, 0.01686720085144043, 0.016897727966308593, 0.01725484848022461, 0.01692803192138672, 0.01684489631652832, 0.016912927627563478, 0.016918079376220703, 0.016882207870483397, 0.017084415435791016, 0.01693280029296875, 0.01770502471923828, 0.01699951934814453, 0.01694358444213867, 0.01688175964355469, 0.01681852722167969, 0.016865280151367186, 0.01692780876159668, 0.01674131202697754, 0.01686297607421875, 0.016808448791503908, 0.01683328056335449, 0.016896896362304688, 0.016969856262207032, 0.016924448013305664, 0.017074432373046875, 0.016945119857788085, 0.016989408493041994, 0.01683126449584961, 0.017006752014160156, 0.01693065643310547, 0.01717452812194824, 0.017059839248657227, 0.016906400680541993, 0.016761728286743164, 0.016843328475952147, 0.0167346248626709, 0.01683456039428711, 0.016861183166503906, 0.016968767166137697, 0.01711609649658203, 0.01732918357849121, 0.017197439193725586, 0.017230432510375978, 0.01740595245361328, 0.017324031829833983, 0.017115135192871094, 0.017020736694335938, 0.01689414405822754, 0.0172227840423584, 0.0170382080078125, 0.016900224685668944, 0.016942975997924804, 0.016963264465332032, 0.016939327239990233, 0.017031167984008787, 0.016969728469848632, 0.016865280151367186, 0.01749622344970703, 0.01710883140563965, 0.016858720779418947, 0.016988576889038084, 0.01705369567871094]",tokens/s,58.714796494677294,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4294.79936,4886.233088,0.0,4483.710976,4465.672704,s,1,11.3411953125,11.3411953125,0.0,11.3411953125,11.3411953125,11.3411953125,11.3411953125,[11.3411953125],,kWh,0.00010723141636664573,1.1816384344187595e-05,3.3593082430005694e-05,0.00015264088314083903,,MB,2153.345024,5309.857792,0.0,4892.655616,4837.669376,s,10,1.829616714477539,0.1829616714477539,0.0006672803014414367,0.18289334106445312,0.18363573455810547,0.18390239334106445,0.18411572036743162,"[0.1817277069091797, 0.18255628967285156, 0.18336026000976563, 0.1824678039550781, 0.1824938507080078, 0.18301922607421875, 0.1827674560546875, 0.18416905212402343, 0.1834785919189453, 0.18357647705078126]",tokens/s,1399.2001601991417,kWh,5.329781003485523e-06,5.875646131138243e-07,3.535977576254255e-06,9.453323192853603e-06,tokens/kWh,27080423.971278954,MB,2157.24032,5477.629952,0.0,5060.427776,5014.227968,s,10,19.27095361328125,1.9270953613281254,0.003907943556371394,1.928769775390625,1.9308436767578125,1.9309702392578125,1.9310714892578125,"[1.926625732421875, 1.930058349609375, 1.928083251953125, 1.929456298828125, 1.9205299072265625, 1.930412353515625, 1.921873779296875, 1.9308155517578125, 1.9310968017578125, 1.9220015869140625]",tokens/s,32.691687844955084,kWh,4.6519011892763826e-05,5.129247808477372e-06,3.103332785694766e-05,8.268158755818885e-05,tokens/kWh,761959.2446221774,,s,630,19.268220180511484,0.030584476477002343,0.0004521787380282983,0.030477583885192873,0.030909994125366214,0.03117836618423462,0.03220870994567871,"[0.03178332710266113, 0.030823551177978515, 0.030554399490356446, 0.030832767486572266, 0.030376415252685546, 0.030687007904052734, 0.030613727569580078, 0.03100876808166504, 0.030917856216430666, 0.030863391876220704, 0.0306429443359375, 0.03046553611755371, 0.030544384002685547, 0.030478336334228515, 0.03041279983520508, 0.03054547119140625, 0.030280128479003906, 0.030507007598876954, 0.03041862487792969, 0.030425407409667968, 0.03042508888244629, 0.03041689682006836, 0.03056844711303711, 0.030317920684814453, 0.030479007720947266, 0.030402111053466796, 0.030429567337036133, 0.03023468780517578, 0.030436767578125, 0.030446176528930665, 0.030449663162231445, 0.030332927703857423, 0.03035238456726074, 0.030490720748901367, 0.030417823791503908, 0.030390272140502928, 0.03043667221069336, 0.030563007354736327, 0.03031622314453125, 0.030388032913208008, 0.030594911575317383, 0.030390272140502928, 0.030548479080200194, 0.03095363235473633, 0.03139993667602539, 0.030994272232055663, 0.03077340888977051, 0.031098880767822266, 0.03096780776977539, 0.030717952728271485, 0.030692895889282226, 0.03075695991516113, 0.030508447647094726, 0.03047270393371582, 0.03069366455078125, 0.03039660835266113, 0.030283039093017577, 0.030328704833984376, 0.03027571105957031, 0.030210784912109375, 0.031229951858520507, 0.03023027229309082, 0.03036310386657715, 0.03065667152404785, 0.03055001640319824, 0.030482431411743165, 0.03181158447265625, 0.03096940803527832, 0.036151744842529296, 0.030889024734497072, 0.030708192825317383, 0.03054982376098633, 0.030511775970458985, 0.03042416000366211, 0.030413728713989258, 0.030385663986206055, 0.030341119766235353, 0.03030886459350586, 0.03112073516845703, 0.030284448623657225, 0.030259199142456054, 0.030369792938232422, 0.030291967391967774, 0.03034707260131836, 0.03025446319580078, 0.03033087921142578, 0.03035615921020508, 0.030281471252441405, 0.030363935470581055, 0.03055001640319824, 0.030453727722167968, 0.03039039993286133, 0.030502912521362304, 0.03093049621582031, 0.030452159881591796, 0.030900224685668946, 0.03080169677734375, 0.030709983825683594, 0.03057459259033203, 0.030498271942138673, 0.030476831436157228, 0.030492671966552733, 0.030457279205322266, 0.030789567947387696, 0.031066751480102538, 0.030711584091186524, 0.030628063201904296, 0.030449663162231445, 0.030521343231201172, 0.030619359970092772, 0.030427263259887694, 0.03042729568481445, 0.030423040390014647, 0.030498464584350585, 0.030511455535888674, 0.03042460823059082, 0.03039468765258789, 0.030601375579833983, 0.030373888015747072, 0.030537311553955077, 0.030253471374511717, 0.030369792938232422, 0.03056390380859375, 0.030584575653076172, 0.030509183883666992, 0.03050553512573242, 0.031164384841918944, 0.030699871063232423, 0.03047216033935547, 0.030322399139404297, 0.03031881523132324, 0.030632192611694337, 0.030729984283447264, 0.030624479293823243, 0.03047145652770996, 0.03056915283203125, 0.030844959259033203, 0.030691328048706053, 0.03095756721496582, 0.030676671981811524, 0.030474559783935547, 0.030428768157958985, 0.030368160247802735, 0.03036319923400879, 0.030529983520507814, 0.030334911346435546, 0.030281791687011717, 0.030623743057250977, 0.030312448501586913, 0.03078348731994629, 0.030363616943359376, 0.032116767883300784, 0.03069536018371582, 0.030678176879882814, 0.03053865623474121, 0.03040870475769043, 0.030724096298217773, 0.03056844711303711, 0.030543615341186523, 0.03065228843688965, 0.030585216522216796, 0.03040460777282715, 0.03046348762512207, 0.03042870330810547, 0.030475231170654298, 0.03078963279724121, 0.0308403205871582, 0.030789503097534178, 0.030960256576538087, 0.03039174461364746, 0.03033657646179199, 0.030307327270507813, 0.030418272018432616, 0.030632608413696288, 0.03039823913574219, 0.03037552070617676, 0.030322784423828124, 0.030443647384643554, 0.030339487075805666, 0.030319744110107422, 0.03031462478637695, 0.030349119186401367, 0.030345823287963865, 0.03129923248291016, 0.031548095703125, 0.0309616641998291, 0.03060736083984375, 0.030625343322753906, 0.03077974319458008, 0.03080784034729004, 0.030635871887207032, 0.030568416595458985, 0.03084476852416992, 0.030620384216308593, 0.030476287841796876, 0.030644479751586913, 0.030552064895629883, 0.030445568084716795, 0.030522432327270508, 0.030327520370483398, 0.030482656478881837, 0.030373888015747072, 0.030443519592285157, 0.030316543579101563, 0.030330495834350588, 0.03027801513671875, 0.03028326416015625, 0.030660831451416015, 0.03060940742492676, 0.030619935989379884, 0.030352832794189454, 0.03031622314453125, 0.03020889663696289, 0.030552064895629883, 0.031336448669433595, 0.03035955238342285, 0.030475391387939452, 0.03063897514343262, 0.030504959106445313, 0.03042099189758301, 0.030486528396606444, 0.03032678413391113, 0.03040870475769043, 0.0303636474609375, 0.03037593650817871, 0.030418176651000977, 0.033501216888427734, 0.030785247802734374, 0.031013599395751955, 0.03035276794433594, 0.030575328826904297, 0.030345407485961914, 0.03042508888244629, 0.030765056610107422, 0.030887935638427736, 0.030330751419067385, 0.030393535614013673, 0.03049158477783203, 0.030360864639282226, 0.030315231323242188, 0.03052524757385254, 0.032210464477539065, 0.031596256256103517, 0.030880704879760742, 0.031108320236206053, 0.030481184005737304, 0.03056025505065918, 0.03051468849182129, 0.030429632186889648, 0.030684736251831053, 0.03067750358581543, 0.030582208633422852, 0.030943744659423827, 0.030469215393066407, 0.030445632934570314, 0.03079782485961914, 0.030397151947021483, 0.030500768661499023, 0.030427040100097655, 0.030478656768798826, 0.03048796844482422, 0.03038470458984375, 0.030611488342285158, 0.03037593650817871, 0.030449663162231445, 0.03056435203552246, 0.03049193572998047, 0.030425823211669922, 0.030666751861572264, 0.03035545539855957, 0.030648319244384766, 0.03043292808532715, 0.030390207290649413, 0.030517663955688477, 0.03035136032104492, 0.030513120651245118, 0.030253087997436524, 0.030310400009155275, 0.03084492874145508, 0.030242816925048828, 0.030373888015747072, 0.030332927703857423, 0.03038822364807129, 0.030491743087768555, 0.030507936477661132, 0.030619647979736327, 0.030369792938232422, 0.030382080078125, 0.03057049560546875, 0.03042099189758301, 0.03037798309326172, 0.030368896484375, 0.03059596824645996, 0.030459903717041017, 0.030213632583618165, 0.030345184326171875, 0.03026383972167969, 0.03029350471496582, 0.030466560363769532, 0.030619647979736327, 0.030602848052978516, 0.030533151626586916, 0.030613536834716796, 0.03066111946105957, 0.030476383209228516, 0.03052774429321289, 0.03054591941833496, 0.030420736312866212, 0.03088755226135254, 0.030374528884887696, 0.03086089515686035, 0.03055449676513672, 0.030309919357299805, 0.030348991394042967, 0.0302989444732666, 0.030779903411865234, 0.030801599502563476, 0.030909120559692384, 0.030625120162963867, 0.03056912040710449, 0.030365695953369142, 0.030528799057006836, 0.030350048065185545, 0.030410751342773438, 0.03043436813354492, 0.030591936111450196, 0.030466047286987305, 0.030642175674438478, 0.030569696426391603, 0.030433376312255858, 0.030538335800170898, 0.030345312118530275, 0.03041279983520508, 0.030519296646118164, 0.03058687973022461, 0.03058892822265625, 0.030588287353515625, 0.03050150489807129, 0.03033193588256836, 0.03045475196838379, 0.03072204780578613, 0.03100467109680176, 0.030650367736816408, 0.030865407943725585, 0.031186431884765626, 0.0312642879486084, 0.031257568359375, 0.03135078430175781, 0.031464479446411134, 0.031175392150878906, 0.03125683212280273, 0.031061376571655273, 0.031174720764160155, 0.03139641571044922, 0.030590431213378906, 0.03043791961669922, 0.03057254409790039, 0.03170918464660644, 0.030420127868652343, 0.030780256271362303, 0.03056435203552246, 0.03038822364807129, 0.030633983612060548, 0.030377632141113282, 0.03032428741455078, 0.03032143974304199, 0.030328832626342773, 0.03031449508666992, 0.030298080444335938, 0.03055414390563965, 0.030371328353881837, 0.030273727416992188, 0.03021798324584961, 0.030369375228881838, 0.0302458553314209, 0.03032406425476074, 0.030273920059204103, 0.03027756881713867, 0.031180799484252928, 0.03078758430480957, 0.030455808639526367, 0.03050444793701172, 0.030330720901489257, 0.030309024810791015, 0.030410751342773438, 0.03040870475769043, 0.03040460777282715, 0.03037183952331543, 0.030328832626342773, 0.030500864028930662, 0.03095961570739746, 0.030590816497802733, 0.030242271423339843, 0.030359935760498048, 0.030275680541992187, 0.030777183532714844, 0.030386560440063475, 0.030185152053833007, 0.030408287048339845, 0.03025584030151367, 0.030267391204833984, 0.03062700843811035, 0.030404767990112304, 0.030312768936157225, 0.030481855392456056, 0.03049139213562012, 0.030471935272216796, 0.030538143157958983, 0.03039948844909668, 0.030286848068237306, 0.030846975326538087, 0.030393407821655272, 0.030788543701171876, 0.03061759948730469, 0.030502912521362304, 0.03036774444580078, 0.030439424514770507, 0.030308351516723633, 0.03039232063293457, 0.030507007598876954, 0.030346303939819335, 0.030473152160644532, 0.031123455047607423, 0.0304005126953125, 0.030484479904174806, 0.030322559356689455, 0.030294048309326173, 0.03061155128479004, 0.030648319244384766, 0.031055328369140624, 0.030437759399414063, 0.030400672912597657, 0.03029337692260742, 0.030304895401000977, 0.030449663162231445, 0.030203903198242187, 0.030410751342773438, 0.030346944808959962, 0.030652639389038085, 0.031215711593627928, 0.031186559677124023, 0.031336799621582034, 0.030621696472167968, 0.030310400009155275, 0.030640127182006836, 0.030504959106445313, 0.030484479904174806, 0.030582399368286134, 0.03449689483642578, 0.030684288024902345, 0.03075542449951172, 0.030463775634765624, 0.030388608932495117, 0.03043084716796875, 0.03047270393371582, 0.030590879440307618, 0.03061564826965332, 0.03057459259033203, 0.030717792510986327, 0.03046006393432617, 0.030328832626342773, 0.030321792602539064, 0.030354303359985353, 0.030287839889526366, 0.030283584594726562, 0.03056857681274414, 0.030530912399291992, 0.030571264266967775, 0.030457183837890624, 0.03047248077392578, 0.030524831771850586, 0.030783552169799805, 0.030317087173461914, 0.030680639266967773, 0.03088467216491699, 0.03085843276977539, 0.030816287994384767, 0.03039516830444336, 0.03038412857055664, 0.03053932762145996, 0.031526464462280274, 0.03445235061645508, 0.03072204780578613, 0.03055615997314453, 0.030416799545288087, 0.03041904067993164, 0.030250816345214843, 0.030353248596191405, 0.030832256317138672, 0.03067363166809082, 0.030341056823730467, 0.030312320709228516, 0.030349504470825194, 0.030482431411743165, 0.0303176326751709, 0.03031545639038086, 0.03029145622253418, 0.030280191421508788, 0.030365695953369142, 0.03058073616027832, 0.03027507209777832, 0.030370271682739258, 0.030248992919921874, 0.030328832626342773, 0.03230534362792969, 0.03115007972717285, 0.030762975692749023, 0.030697504043579103, 0.030434879302978515, 0.03034976005554199, 0.030398464202880858, 0.030445568084716795, 0.03033625602722168, 0.030395135879516602, 0.030486528396606444, 0.03038822364807129, 0.03205855941772461, 0.03105670356750488, 0.030350751876831054, 0.030316703796386717, 0.030288320541381836, 0.030259199142456054, 0.031102720260620116, 0.03032089614868164, 0.03262428665161133, 0.03077292823791504, 0.03046668815612793, 0.030527360916137697, 0.030353567123413087, 0.03038617515563965, 0.03079100799560547, 0.030503551483154298, 0.030527519226074218, 0.03041279983520508, 0.0303636474609375, 0.030506847381591796, 0.030955680847167967, 0.031098880767822266, 0.03220441436767578, 0.03072198486328125, 0.030635648727416993, 0.03054038429260254, 0.030611679077148436, 0.030283775329589844, 0.030411903381347655, 0.03059391975402832, 0.03059097671508789, 0.030766496658325194, 0.03069603157043457, 0.030465280532836914, 0.03060383987426758, 0.03046214485168457, 0.030317983627319335, 0.03025884819030762, 0.030412832260131837, 0.030501792907714844, 0.030332927703857423, 0.030443519592285157, 0.030672895431518556, 0.030508031845092775, 0.03059391975402832, 0.03027120018005371, 0.030245183944702148, 0.030681184768676758, 0.030762880325317384, 0.03054755210876465, 0.030480928421020508, 0.031389152526855466, 0.030761503219604493, 0.030294015884399415, 0.03037932777404785, 0.030298816680908204, 0.0304005126953125, 0.030522687911987305, 0.030353567123413087, 0.030609344482421873, 0.030359872817993162, 0.030300447463989258, 0.03037593650817871, 0.030472192764282226, 0.03032803153991699, 0.03030505561828613, 0.030891231536865234, 0.030264095306396486, 0.03032678413391113, 0.030502399444580077, 0.030831104278564454, 0.03042451286315918, 0.03035385513305664, 0.030217599868774415, 0.03046665573120117, 0.030535839080810548, 0.03026857566833496, 0.03044028854370117, 0.0303853759765625, 0.03056719970703125, 0.030848608016967774, 0.030979488372802736, 0.030667743682861327, 0.030539712905883788, 0.030685279846191408, 0.03080806350708008, 0.030462976455688476, 0.030327199935913086, 0.030374496459960938, 0.03061721611022949, 0.030622079849243165, 0.030956575393676758, 0.03057094383239746, 0.030329183578491212, 0.030314687728881837, 0.030464000701904297, 0.030320640563964843, 0.03036774444580078, 0.030717952728271485, 0.03052672004699707, 0.030322656631469727, 0.030511903762817382, 0.030443519592285157, 0.030515167236328126, 0.030459455490112305, 0.030312639236450195, 0.030601503372192383, 0.03058278465270996, 0.03034726333618164, 0.030445568084716795, 0.030298112869262695, 0.03040460777282715, 0.0308503360748291, 0.030515552520751953]",tokens/s,32.69632556084257,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1047.318528,912.130048,0.0,509.607936,491.434496,s,1,7.8931640625,7.8931640625,0.0,7.8931640625,7.8931640625,7.8931640625,7.8931640625,[7.8931640625],,kWh,2.3784416454138106e-05,2.615132943439747e-06,7.312783628005137e-06,3.3712333025582986e-05,,MB,1374.629888,1025.376256,0.0,608.17408,592.24832,s,10,0.20221363067626955,0.020221363067626955,0.0009067938122947693,0.019965807914733888,0.020348537445068357,0.021637165069580078,0.022668067169189454,"[0.020027040481567383, 0.019763359069824217, 0.02000592041015625, 0.019802431106567382, 0.019925695419311523, 0.0200064640045166, 0.0229257926940918, 0.019828832626342774, 0.020062175750732422, 0.01986591911315918]",tokens/s,12659.87852272129,kWh,5.841911631499443e-07,6.442588114472575e-08,3.678780720799968e-07,1.016495116374667e-06,tokens/kWh,251845774.63886377,MB,1388.085248,1040.05632,0.0,622.854144,605.085696,s,10,10.521125610351561,1.052112561035156,0.002799083005156702,1.0531063232421873,1.0548631958007813,1.0554110290527343,1.0558492956542969,"[1.050134033203125, 1.0559588623046876, 1.054447265625, 1.0477164306640625, 1.0531734619140625, 1.0530391845703124, 1.0489066162109375, 1.0542381591796874, 1.054741455078125, 1.0487701416015625]",tokens/s,59.8795246185592,kWh,3.0891509911847134e-05,3.40682836533891e-06,1.2174020850319656e-05,4.647235912750569e-05,tokens/kWh,1355644.541890968,,s,630,10.515767957687368,0.016691695170932348,0.0002803327339042266,0.0166222562789917,0.01691523494720459,0.017135252571105954,0.01784000793457032,"[0.016380767822265625, 0.016780960083007813, 0.016632160186767577, 0.016550912857055664, 0.016516096115112306, 0.016520320892333986, 0.016935808181762695, 0.017348608016967772, 0.01657241630554199, 0.016450719833374025, 0.016581151962280275, 0.016547712326049804, 0.016815872192382814, 0.016505279541015626, 0.01681657600402832, 0.016695072174072265, 0.016619327545166016, 0.016633056640625, 0.016667135238647462, 0.01722777557373047, 0.016757247924804687, 0.01666771125793457, 0.016565183639526367, 0.016578559875488282, 0.01658470344543457, 0.01663599967956543, 0.01656812858581543, 0.01670479965209961, 0.01682009506225586, 0.01673116874694824, 0.016596895217895508, 0.016469856262207032, 0.01659427261352539, 0.01664031982421875, 0.016595327377319335, 0.016590911865234374, 0.016670112609863282, 0.0166910400390625, 0.016765247344970702, 0.01670604705810547, 0.016588703155517578, 0.01672764778137207, 0.016765439987182617, 0.016823520660400392, 0.016906463623046875, 0.016689727783203125, 0.016713951110839845, 0.01663369560241699, 0.016564159393310546, 0.01663759994506836, 0.016555519104003907, 0.016612192153930665, 0.01653753662109375, 0.016539520263671875, 0.016572608947753906, 0.016613632202148437, 0.016850656509399414, 0.01660316848754883, 0.016557376861572267, 0.01661612892150879, 0.01664543914794922, 0.01658131217956543, 0.016547136306762696, 0.016222240447998047, 0.016564224243164064, 0.01654278373718262, 0.016564992904663085, 0.016546079635620117, 0.016523231506347658, 0.016410144805908203, 0.01658323287963867, 0.016647359848022462, 0.01657923126220703, 0.016886783599853517, 0.01700931167602539, 0.017004512786865236, 0.017025279998779296, 0.01695961570739746, 0.016698591232299803, 0.016708383560180663, 0.016563711166381837, 0.016656736373901367, 0.01690025520324707, 0.017283296585083006, 0.017351648330688477, 0.01760700798034668, 0.017610303878784178, 0.017590208053588866, 0.017629919052124025, 0.017310047149658205, 0.01720924758911133, 0.01727289581298828, 0.01675993537902832, 0.016640832901000976, 0.016578399658203125, 0.016508159637451173, 0.016516000747680663, 0.016444704055786134, 0.016535999298095704, 0.016480287551879882, 0.01655625534057617, 0.016525344848632814, 0.016750591278076172, 0.016693248748779296, 0.016713983535766603, 0.01667580795288086, 0.016538175582885742, 0.016547679901123047, 0.016498655319213868, 0.016572128295898436, 0.016489152908325196, 0.016856447219848632, 0.01702521514892578, 0.016562400817871095, 0.01682454490661621, 0.016649215698242188, 0.01662054443359375, 0.01656595230102539, 0.016557376861572267, 0.016700672149658202, 0.016664480209350584, 0.01655792045593262, 0.01658880043029785, 0.01650864028930664, 0.016519519805908205, 0.01675052833557129, 0.01627136039733887, 0.016736255645751954, 0.016662431716918946, 0.016621664047241212, 0.01669055938720703, 0.01667750358581543, 0.016664575576782227, 0.01651055908203125, 0.016549823760986328, 0.016500192642211912, 0.016898624420166014, 0.01659539222717285, 0.017922048568725587, 0.017612255096435547, 0.017145952224731444, 0.018630815505981446, 0.01679782485961914, 0.016779008865356444, 0.016991968154907226, 0.01692742347717285, 0.016750591278076172, 0.016626880645751952, 0.016743104934692384, 0.01663724708557129, 0.016620351791381837, 0.016605215072631838, 0.016625631332397462, 0.0165928955078125, 0.016577760696411134, 0.016681760787963868, 0.016533504486083983, 0.01654902458190918, 0.016565088272094727, 0.0165928955078125, 0.0164881591796875, 0.016615711212158202, 0.016664575576782227, 0.01676288032531738, 0.016613439559936525, 0.016793535232543944, 0.01666598320007324, 0.01664064025878906, 0.016652288436889647, 0.016649791717529297, 0.01651737594604492, 0.016566463470458984, 0.016748544692993163, 0.016621408462524415, 0.016545951843261717, 0.01659436798095703, 0.01671174430847168, 0.01696512031555176, 0.016642559051513673, 0.01675315284729004, 0.016523263931274415, 0.016562175750732423, 0.0167607364654541, 0.016500831604003906, 0.016861183166503906, 0.01680201530456543, 0.01668611145019531, 0.016538368225097657, 0.016545791625976563, 0.016180992126464844, 0.016689504623413086, 0.016539648056030275, 0.01652128028869629, 0.016531391143798826, 0.016570367813110352, 0.016547040939331056, 0.016486719131469728, 0.016476640701293944, 0.016490400314331053, 0.016531072616577148, 0.01653536033630371, 0.0165316162109375, 0.016767711639404298, 0.016912160873413087, 0.01742464065551758, 0.0169736328125, 0.016852928161621095, 0.016648096084594728, 0.016670143127441406, 0.016687231063842775, 0.016773664474487304, 0.0165164794921875, 0.01651980781555176, 0.016523296356201173, 0.016539615631103517, 0.016592863082885743, 0.01650716781616211, 0.016520959854125977, 0.016477695465087892, 0.016529504776000976, 0.016488895416259766, 0.01649580764770508, 0.016540224075317383, 0.01656768035888672, 0.016528224945068358, 0.016563615798950194, 0.01669590377807617, 0.016639999389648438, 0.01663795280456543, 0.016558080673217773, 0.016590143203735353, 0.016659135818481444, 0.016517120361328123, 0.016494592666625976, 0.01643283271789551, 0.016500608444213867, 0.017413951873779296, 0.01670412826538086, 0.016693248748779296, 0.016736255645751954, 0.016633087158203125, 0.016578559875488282, 0.016638368606567384, 0.01658915138244629, 0.01676438331604004, 0.016725919723510743, 0.016585344314575194, 0.016596031188964844, 0.01659343910217285, 0.016588672637939453, 0.016796192169189452, 0.01661337661743164, 0.01679952049255371, 0.016644927978515626, 0.016920448303222656, 0.016486080169677734, 0.01651696014404297, 0.016499008178710937, 0.01651091194152832, 0.016544992446899415, 0.016492448806762695, 0.01654015922546387, 0.0166014404296875, 0.016789087295532225, 0.016804384231567382, 0.016621023178100585, 0.016675327301025392, 0.016852767944335937, 0.01664364814758301, 0.01656857681274414, 0.016591264724731446, 0.016861183166503906, 0.016965791702270507, 0.01683030319213867, 0.01679155158996582, 0.016842752456665038, 0.01676723289489746, 0.01682377624511719, 0.016734495162963867, 0.01653555107116699, 0.016519168853759765, 0.016661792755126952, 0.016523263931274415, 0.01654425621032715, 0.01659516716003418, 0.01658399963378906, 0.016644800186157226, 0.016633056640625, 0.016605983734130858, 0.016517120361328123, 0.016536767959594727, 0.016696128845214844, 0.016695232391357423, 0.016587936401367187, 0.016673471450805662, 0.016482336044311523, 0.016644287109375, 0.01660723114013672, 0.016547840118408205, 0.016631135940551756, 0.019049280166625975, 0.016885599136352538, 0.016899391174316405, 0.01668764877319336, 0.0167425594329834, 0.016616800308227538, 0.016705888748168946, 0.016666112899780275, 0.016874303817749025, 0.01678870391845703, 0.016519968032836913, 0.016748544692993163, 0.016576255798339844, 0.016914655685424804, 0.016746559143066407, 0.016268447875976564, 0.016722911834716796, 0.016729984283447265, 0.016623552322387696, 0.016535615921020506, 0.016568288803100586, 0.016566303253173827, 0.0165533447265625, 0.017025663375854493, 0.016661535263061522, 0.016589088439941405, 0.016658239364624024, 0.016634624481201173, 0.01658687973022461, 0.016559295654296875, 0.016547744750976562, 0.016524192810058593, 0.016484352111816408, 0.01644451141357422, 0.016477088928222656, 0.016480384826660158, 0.01660323143005371, 0.016538784027099608, 0.01646793556213379, 0.016564416885375976, 0.016816095352172853, 0.016447999954223632, 0.0164715518951416, 0.01647648048400879, 0.01652140808105469, 0.016461151123046875, 0.01650534439086914, 0.01650806427001953, 0.01661644744873047, 0.016745920181274413, 0.016451679229736327, 0.01653193664550781, 0.016558080673217773, 0.0165295352935791, 0.016508800506591797, 0.01657241630554199, 0.0164454402923584, 0.016509056091308594, 0.017893024444580078, 0.017585664749145507, 0.018968351364135744, 0.017113855361938476, 0.016701696395874023, 0.01692857551574707, 0.016830591201782225, 0.016672767639160157, 0.016719839096069337, 0.01666870307922363, 0.016755840301513673, 0.01661427116394043, 0.016773120880126953, 0.016592096328735352, 0.017094655990600584, 0.016626752853393555, 0.01658412742614746, 0.01647439956665039, 0.016558080673217773, 0.018143232345581056, 0.0163507194519043, 0.016630079269409178, 0.016632095336914062, 0.01664761543273926, 0.017122175216674803, 0.016600896835327148, 0.016734464645385742, 0.016621503829956055, 0.016571775436401367, 0.016485055923461913, 0.01660927963256836, 0.01656012725830078, 0.01658291244506836, 0.016496383666992187, 0.016517120361328123, 0.016586751937866212, 0.016563295364379883, 0.01662447929382324, 0.016612512588500977, 0.01654876708984375, 0.016749568939208984, 0.016546815872192384, 0.016575551986694335, 0.016524063110351563, 0.016596832275390626, 0.01666489601135254, 0.0166582088470459, 0.016590303421020507, 0.01661574363708496, 0.016953792572021485, 0.0167587833404541, 0.016695072174072265, 0.016725887298583986, 0.01709414482116699, 0.016826976776123048, 0.01697407913208008, 0.016690208435058595, 0.0165994873046875, 0.01665446472167969, 0.016775583267211912, 0.016666175842285157, 0.016550336837768555, 0.016774528503417967, 0.016588991165161132, 0.016726463317871094, 0.017295360565185547, 0.016641664505004882, 0.016609664916992187, 0.016721920013427736, 0.01683875274658203, 0.016551839828491212, 0.016516864776611326, 0.016494848251342772, 0.016549888610839843, 0.01647369575500488, 0.016484384536743165, 0.01651545524597168, 0.016586271286010743, 0.016431583404541015, 0.016411775588989257, 0.016503679275512696, 0.016545791625976563, 0.01653660774230957, 0.016521215438842773, 0.016814271926879884, 0.01663385581970215, 0.016539648056030275, 0.016465919494628906, 0.016539648056030275, 0.016650144577026366, 0.01654115104675293, 0.016535871505737303, 0.016922943115234373, 0.016558080673217773, 0.016581888198852538, 0.01652934455871582, 0.01654457664489746, 0.016514623641967773, 0.01644384002685547, 0.016545791625976563, 0.016475488662719726, 0.016507551193237303, 0.016480255126953124, 0.016422815322875976, 0.01650022315979004, 0.01656483268737793, 0.016512895584106447, 0.016723487854003908, 0.01662345504760742, 0.016526079177856444, 0.01652716827392578, 0.01669548797607422, 0.01659267234802246, 0.016476383209228517, 0.01765900802612305, 0.01855718421936035, 0.017171072006225585, 0.016971359252929686, 0.016822399139404295, 0.0170185604095459, 0.016681856155395507, 0.016829856872558592, 0.01690412712097168, 0.016630144119262696, 0.01668681526184082, 0.016619808197021486, 0.01665433692932129, 0.01667683219909668, 0.01681001663208008, 0.01664169692993164, 0.0166014404296875, 0.016701440811157226, 0.016719871520996094, 0.01664614486694336, 0.016922624588012695, 0.016878623962402344, 0.017014944076538085, 0.016785408020019533, 0.016775360107421877, 0.016750688552856444, 0.016790048599243164, 0.016953344345092772, 0.01669334411621094, 0.01661667251586914, 0.017219648361206055, 0.01679033660888672, 0.01632271957397461, 0.01665862464904785, 0.016569791793823244, 0.01664064025878906, 0.016619455337524413, 0.016562175750732423, 0.01661952018737793, 0.016583776473999022, 0.01661382484436035, 0.016599519729614258, 0.016658336639404296, 0.01661142349243164, 0.0168222713470459, 0.017225631713867186, 0.016697439193725586, 0.01681407928466797, 0.017326080322265625, 0.017088064193725584, 0.016933311462402345, 0.016707487106323242, 0.016676191329956055, 0.016865087509155274, 0.016597951889038086, 0.016584384918212892, 0.016675264358520507, 0.016819679260253906, 0.016693151473999024, 0.01669375991821289, 0.01657347106933594, 0.016640031814575195, 0.016600000381469728, 0.01680384063720703, 0.017514495849609374, 0.017710208892822266, 0.017230655670166014, 0.017035327911376952, 0.01682841682434082, 0.016721088409423827, 0.016757568359375, 0.016611328125, 0.016622848510742187, 0.0166278076171875, 0.016599712371826173, 0.01663327980041504, 0.016728639602661133, 0.016719423294067382, 0.016738752365112304, 0.016596704483032226, 0.016531232833862305, 0.016483936309814453, 0.016652320861816405, 0.016567167282104493, 0.016614463806152342, 0.016575456619262696, 0.016639968872070313, 0.016616960525512696, 0.01653536033630371, 0.016949951171875, 0.01683251190185547, 0.016652191162109375, 0.016705631256103515, 0.01669228744506836, 0.016615999221801757, 0.016253120422363283, 0.016533248901367186, 0.016529855728149415, 0.016482112884521484, 0.016518495559692384, 0.016530368804931642, 0.016568031311035156, 0.016593120574951173, 0.016647968292236328, 0.016569568634033204, 0.01664080047607422, 0.016547903060913086, 0.016668607711791992, 0.016543712615966797, 0.016547872543334962, 0.016527360916137695, 0.016910335540771485, 0.016633823394775392, 0.016740383148193358, 0.01664614486694336, 0.016768287658691407, 0.016892351150512696, 0.016847135543823243, 0.01689174461364746, 0.016660543441772462, 0.016613536834716797, 0.016618431091308592, 0.01701580810546875, 0.016574464797973632, 0.016744447708129884, 0.01665654373168945, 0.016627552032470704, 0.01661510467529297, 0.016554304122924805, 0.01665023994445801, 0.016869344711303712, 0.016758687973022462, 0.016692960739135742, 0.01670729637145996, 0.016629600524902345, 0.016837472915649413, 0.016780799865722656, 0.01662544059753418, 0.0165383358001709, 0.016723167419433593, 0.01660393524169922, 0.01665843200683594, 0.01653718376159668, 0.016742176055908203, 0.016527040481567383, 0.016628608703613282, 0.016588863372802735, 0.016619775772094728, 0.016693119049072266, 0.0168590087890625, 0.016645343780517578, 0.016599391937255858, 0.016529855728149415, 0.016482303619384766, 0.016496639251708984, 0.01661891174316406, 0.016487007141113282, 0.016424959182739257]",tokens/s,59.910032489776356,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2206.498816,2271.084544,0.0,1868.562432,1779.722752,s,1,9.1956044921875,9.1956044921875,0.0,9.1956044921875,9.1956044921875,9.1956044921875,9.1956044921875,[9.1956044921875],,kWh,5.702162377914798e-05,6.2827142298745825e-06,1.756251404999576e-05,8.086685205901832e-05,,MB,2297.995264,2378.039296,0.0,1960.83712,1916.447232,s,10,0.9634465255737305,0.09634465255737304,0.0005154187172517159,0.096394287109375,0.09664055786132812,0.09703864746093749,0.09735711914062499,"[0.09519411468505859, 0.09609849548339844, 0.09743673706054687, 0.09623023986816406, 0.09655209350585937, 0.09641203308105468, 0.09637654113769531, 0.09643942260742187, 0.09628720092773438, 0.09641964721679687]",tokens/s,2657.127232334483,kWh,2.8048670713885964e-06,3.0932694706088443e-07,1.8566311149334095e-06,4.9708251333828906e-06,tokens/kWh,51500504.06737592,MB,2298.5728,2545.811456,0.0,2128.60928,2001.572864,s,10,19.789150390625004,1.9789150390625,0.009842989886318898,1.9752076416015625,1.99432197265625,1.9960130981445312,1.9973659985351562,"[1.981271240234375, 1.9939461669921874, 1.9746187744140624, 1.9757965087890625, 1.9847132568359376, 1.974387451171875, 1.9685323486328126, 1.9977042236328124, 1.9698544921875, 1.968325927734375]",tokens/s,31.835626470273276,kWh,4.38184827756926e-05,4.8322452112811885e-06,2.4665112324666934e-05,7.331584031164072e-05,tokens/kWh,859295.8865670558,,s,630,19.785949651718152,0.03140626928844149,0.0006218747355141671,0.031244416236877442,0.03193429794311524,0.03217912921905518,0.034664887962341316,"[0.03116828727722168, 0.030988800048828126, 0.03141257667541504, 0.031997983932495117, 0.031946399688720706, 0.03176227188110352, 0.0316011848449707, 0.03130524826049805, 0.031012832641601564, 0.031008384704589845, 0.031052608489990235, 0.03103468894958496, 0.030841119766235353, 0.03106038475036621, 0.03524991989135742, 0.031189247131347655, 0.031147680282592773, 0.03092835235595703, 0.03089820861816406, 0.030991199493408204, 0.03098988723754883, 0.031052223205566408, 0.03423231887817383, 0.031213567733764647, 0.031055871963500976, 0.031051647186279296, 0.031549856185913085, 0.031676191329956055, 0.03144806480407715, 0.03168908882141113, 0.03178886413574219, 0.031832544326782224, 0.031129087448120117, 0.030933792114257813, 0.030992191314697267, 0.031174848556518555, 0.03097113609313965, 0.03117919921875, 0.031065439224243162, 0.030972448348999024, 0.03108294486999512, 0.031694623947143556, 0.031500255584716794, 0.03161235237121582, 0.03132643127441406, 0.031021663665771484, 0.031170303344726563, 0.03493305587768555, 0.031106239318847657, 0.03097654342651367, 0.031047903060913085, 0.03113814353942871, 0.030864351272583006, 0.03117535972595215, 0.031264768600463864, 0.03220889663696289, 0.03322675323486328, 0.031329887390136715, 0.031154592514038085, 0.031096160888671873, 0.03126543998718262, 0.03118489646911621, 0.031027551651000976, 0.031526432037353516, 0.03139955139160156, 0.031033567428588867, 0.031154815673828124, 0.031119359970092773, 0.031072256088256835, 0.03095487976074219, 0.030993024826049806, 0.03084492874145508, 0.03095756721496582, 0.030899328231811525, 0.031099775314331054, 0.030934080123901368, 0.031042495727539064, 0.03497574234008789, 0.03115727996826172, 0.030933984756469725, 0.03103299140930176, 0.03081046485900879, 0.031080543518066408, 0.030968864440917968, 0.03193740844726563, 0.032540672302246096, 0.031459327697753905, 0.031188991546630858, 0.031029247283935548, 0.031104352951049803, 0.03143657684326172, 0.031059040069580077, 0.03163030433654785, 0.031186975479125977, 0.03143964767456055, 0.03137513542175293, 0.03154761505126953, 0.031602815628051756, 0.03179251289367676, 0.031656448364257815, 0.03151795196533203, 0.03155401611328125, 0.03176038360595703, 0.03168217658996582, 0.03177347183227539, 0.0315731201171875, 0.03174297523498535, 0.03179475212097168, 0.0319489917755127, 0.034582527160644534, 0.034328575134277346, 0.032204158782958986, 0.03200665664672851, 0.03179327964782715, 0.031929855346679685, 0.03166783905029297, 0.03191910362243652, 0.031885183334350586, 0.031934240341186526, 0.03180268859863281, 0.0317664966583252, 0.032117214202880856, 0.03176668739318848, 0.03155999946594238, 0.031586143493652345, 0.0332432975769043, 0.032161792755126956, 0.03158220863342285, 0.031096832275390625, 0.031254528045654296, 0.031145280838012695, 0.030880704879760742, 0.030893695831298827, 0.030906496047973634, 0.030930047988891603, 0.031024255752563477, 0.03122559928894043, 0.031237695693969728, 0.031186975479125977, 0.03514191818237305, 0.03116556739807129, 0.030978080749511718, 0.03157814407348633, 0.032404129028320315, 0.031237632751464843, 0.030938079833984375, 0.031007551193237306, 0.03110105514526367, 0.031102943420410156, 0.031091903686523436, 0.031054880142211912, 0.030984767913818358, 0.031030975341796874, 0.031003072738647462, 0.030838783264160157, 0.03090630340576172, 0.030922208786010742, 0.030986015319824218, 0.031127456665039063, 0.031189727783203124, 0.03120351982116699, 0.030904319763183592, 0.03105958366394043, 0.03091075134277344, 0.03106211280822754, 0.031008480072021484, 0.03096835136413574, 0.03102899169921875, 0.03088719940185547, 0.031090944290161134, 0.030931072235107424, 0.0313143367767334, 0.03546444702148437, 0.03151123237609863, 0.0315424633026123, 0.031271743774414065, 0.031225568771362306, 0.031189279556274416, 0.03135487937927246, 0.03134409523010254, 0.03110966491699219, 0.03140784072875977, 0.03146956825256347, 0.03123843193054199, 0.03143270492553711, 0.03185830307006836, 0.03171471977233887, 0.03163785552978516, 0.03175625610351562, 0.031858720779418946, 0.03194083213806152, 0.0317540168762207, 0.03174399948120117, 0.031625024795532225, 0.03167660713195801, 0.03158835220336914, 0.03162028884887695, 0.03219331359863281, 0.03125251197814941, 0.03136102485656738, 0.03145270347595215, 0.03220896148681641, 0.03504966354370117, 0.03187241554260254, 0.031628095626831054, 0.03165340805053711, 0.031623231887817384, 0.03161743927001953, 0.031410175323486327, 0.03140812873840332, 0.031389696121215824, 0.03130268859863281, 0.031341087341308596, 0.031136192321777344, 0.031090431213378907, 0.03079145622253418, 0.030951295852661133, 0.030894527435302733, 0.030990495681762695, 0.031153215408325195, 0.031202239990234373, 0.031088640213012695, 0.03103539276123047, 0.030922752380371094, 0.03090015983581543, 0.031070272445678712, 0.031067712783813477, 0.03183990478515625, 0.03083907127380371, 0.03089664077758789, 0.03100876808166504, 0.03077516746520996, 0.030863040924072264, 0.030935487747192382, 0.031069887161254882, 0.03145136070251465, 0.031442495346069334, 0.031099424362182618, 0.0308953914642334, 0.030845663070678712, 0.030887935638427736, 0.03080806350708008, 0.03097769546508789, 0.03094972801208496, 0.030828832626342773, 0.031092063903808594, 0.031250816345214844, 0.032128734588623045, 0.03128486442565918, 0.03186140823364258, 0.03114396858215332, 0.031385568618774416, 0.03197520065307617, 0.031979743957519534, 0.03213734436035156, 0.03215081787109375, 0.03193036842346191, 0.03199852752685547, 0.03161308860778809, 0.03181711959838867, 0.031674976348876956, 0.03191772842407226, 0.031934816360473635, 0.031821823120117186, 0.03184828758239746, 0.03212713623046875, 0.03208748626708984, 0.03170976066589355, 0.03148361587524414, 0.03147772789001465, 0.0314531192779541, 0.031645696640014646, 0.03163523292541504, 0.03169545555114746, 0.03131731224060059, 0.031138496398925783, 0.03120947265625, 0.03110838317871094, 0.030888351440429687, 0.031031551361083983, 0.030976064682006837, 0.03143680000305176, 0.031180639266967774, 0.031072416305541993, 0.030835968017578125, 0.03112556838989258, 0.0311013126373291, 0.03169926452636719, 0.031676416397094724, 0.0315228157043457, 0.031676416397094724, 0.03162931251525879, 0.03149964714050293, 0.031316608428955076, 0.031115488052368166, 0.031490976333618165, 0.03146022415161133, 0.031933792114257814, 0.03266831970214844, 0.0316939525604248, 0.031052671432495117, 0.03097020721435547, 0.03135043144226074, 0.030895231246948242, 0.031038143157958983, 0.03088505554199219, 0.031065088272094726, 0.031014528274536133, 0.03148608016967774, 0.031662336349487306, 0.03162675285339355, 0.03138924789428711, 0.031496320724487305, 0.03140867233276367, 0.031174623489379882, 0.031221471786499023, 0.030953760147094726, 0.030879743576049806, 0.030963136672973634, 0.030867807388305663, 0.03086511993408203, 0.03084339141845703, 0.031164415359497072, 0.03078758430480957, 0.030948671340942382, 0.033630142211914064, 0.03195788764953613, 0.031522687911987306, 0.031618175506591795, 0.03187980842590332, 0.031443199157714846, 0.031528959274291994, 0.03165510368347168, 0.03170115280151367, 0.03147638320922851, 0.031053823471069338, 0.03092255973815918, 0.030910655975341796, 0.03096780776977539, 0.03119308853149414, 0.031006719589233397, 0.03128121566772461, 0.03112953567504883, 0.03155353546142578, 0.03166559982299805, 0.031641183853149416, 0.031986656188964846, 0.031039487838745116, 0.031106271743774415, 0.030832992553710938, 0.031029695510864257, 0.03083263969421387, 0.03095327949523926, 0.030959552764892578, 0.031131359100341798, 0.03137763214111328, 0.031014720916748048, 0.031064159393310548, 0.03387433624267578, 0.031696895599365234, 0.031323328018188476, 0.03139369583129883, 0.031152063369750977, 0.033185024261474606, 0.03214131164550781, 0.03104739189147949, 0.03147980880737305, 0.031152128219604492, 0.03097360038757324, 0.030888288497924805, 0.03099385643005371, 0.031005247116088867, 0.031047264099121095, 0.031178367614746093, 0.031211679458618163, 0.03136102485656738, 0.0312644157409668, 0.031167455673217773, 0.031779071807861325, 0.03188531112670898, 0.03166364860534668, 0.03148134422302246, 0.03139251136779785, 0.03135510444641113, 0.031254528045654296, 0.03124608039855957, 0.031158527374267577, 0.034698528289794923, 0.03244105529785156, 0.032565185546875, 0.03169849586486816, 0.03171788787841797, 0.03109996795654297, 0.03111747169494629, 0.031503135681152344, 0.031327455520629884, 0.030931520462036132, 0.030947551727294922, 0.03093891143798828, 0.030826143264770508, 0.030889728546142577, 0.030984991073608397, 0.0309268798828125, 0.030991359710693358, 0.030978719711303712, 0.031094112396240235, 0.030939584732055665, 0.031093311309814454, 0.030938880920410156, 0.031049856185913084, 0.030799488067626953, 0.030906879425048828, 0.030848608016967774, 0.030826400756835938, 0.030849536895751952, 0.030846399307250978, 0.03095609664916992, 0.03174604797363281, 0.030963712692260743, 0.031002336502075196, 0.031137056350708008, 0.031224639892578124, 0.031000768661499024, 0.030980096817016602, 0.03128278350830078, 0.03126723289489746, 0.03093008041381836, 0.03082713508605957, 0.03081443214416504, 0.030888320922851563, 0.0308504638671875, 0.031097055435180664, 0.03100467109680176, 0.031275007247924806, 0.03122380828857422, 0.03129955291748047, 0.03147068786621094, 0.03135791969299316, 0.03143008041381836, 0.0311506233215332, 0.031080448150634765, 0.031690528869628906, 0.031440895080566404, 0.031217599868774416, 0.03126803207397461, 0.031300479888916016, 0.03417497634887695, 0.032129024505615236, 0.03222118377685547, 0.0317255687713623, 0.03155558395385742, 0.03190336036682129, 0.031884927749633786, 0.03195276832580567, 0.03228147125244141, 0.0323328971862793, 0.031939487457275394, 0.031959039688110355, 0.03187113571166992, 0.03195273590087891, 0.031932416915893554, 0.03188719940185547, 0.03158236885070801, 0.03168003273010254, 0.03150076866149902, 0.03173990440368652, 0.031796735763549806, 0.031608415603637696, 0.03151948738098145, 0.03138918495178223, 0.03144976043701172, 0.03138355255126953, 0.03143270492553711, 0.031291391372680666, 0.031422464370727536, 0.03139743995666504, 0.031432640075683596, 0.03173017692565918, 0.03186793518066406, 0.03173398399353027, 0.031657855987548826, 0.031503231048583984, 0.0315863037109375, 0.03153462409973144, 0.03167004776000976, 0.03160134315490723, 0.031911327362060544, 0.032016193389892575, 0.03162150382995606, 0.03156806373596192, 0.031740127563476564, 0.03176777648925781, 0.03165059280395508, 0.03180371284484863, 0.032253440856933595, 0.03166431999206543, 0.03176959991455078, 0.03189248085021973, 0.03219660949707031, 0.03153424072265625, 0.031105888366699218, 0.03097804832458496, 0.030874784469604493, 0.03096006393432617, 0.03158038330078125, 0.03145747184753418, 0.03141587257385254, 0.031285984039306644, 0.031090272903442382, 0.031090368270874025, 0.03168943977355957, 0.031850496292114255, 0.031870399475097656, 0.03195379257202149, 0.03196240043640137, 0.03168297576904297, 0.03175046348571777, 0.03130745506286621, 0.03105526351928711, 0.03112611198425293, 0.030943231582641603, 0.031031328201293944, 0.031065759658813478, 0.031089023590087892, 0.03099612808227539, 0.03122524833679199, 0.03125139236450195, 0.03136300849914551, 0.030954944610595704, 0.030853248596191405, 0.03127340888977051, 0.030892032623291016, 0.030832096099853514, 0.031123231887817383, 0.030948448181152343, 0.030979904174804687, 0.03091164779663086, 0.030935007095336912, 0.031013599395751955, 0.030904319763183592, 0.03098784065246582, 0.030838272094726563, 0.03099251174926758, 0.031406560897827146, 0.03148604774475098, 0.031839744567871094, 0.0317263355255127, 0.03193638420104981, 0.03149955177307129, 0.03150057601928711, 0.03234259033203125, 0.031227519989013672, 0.030982528686523438, 0.03092848014831543, 0.031166656494140625, 0.03141632080078125, 0.031248863220214845, 0.030969184875488283, 0.030997983932495116, 0.031242752075195314, 0.031021503448486327, 0.03098419189453125, 0.031250591278076174, 0.03157180786132813, 0.03112550354003906, 0.030980096817016602, 0.03115827178955078, 0.03154944038391113, 0.03142598342895508, 0.031203903198242188, 0.03099238395690918, 0.030950559616088866, 0.030991199493408204, 0.03101286315917969, 0.03122380828857422, 0.031229663848876953, 0.03138768005371094, 0.031019264221191407, 0.031005792617797852, 0.031127647399902345, 0.03296953582763672, 0.03131977653503418, 0.031140127182006837, 0.03118838310241699, 0.030959711074829102, 0.03099251174926758, 0.03106559944152832, 0.031001472473144533, 0.031285247802734374, 0.031024831771850586, 0.03132991981506348, 0.031828384399414066, 0.031262304306030275, 0.031316671371459964, 0.03143683242797852, 0.03138531112670898, 0.031080703735351562, 0.030905408859252928, 0.03162553596496582, 0.030923263549804687, 0.030830047607421876, 0.031075103759765625, 0.031226911544799806, 0.030899040222167967, 0.03111280059814453, 0.03110553550720215, 0.03160463905334473, 0.03093708801269531, 0.031440895080566404, 0.031133024215698243, 0.03197599983215332, 0.03165769577026367, 0.03190726470947266, 0.03203571319580078, 0.03161094474792481, 0.03177190399169922, 0.03145305633544922, 0.030981216430664062, 0.030972896575927736, 0.03086739158630371, 0.03131427192687988, 0.03119772720336914, 0.03101487922668457, 0.03097760009765625, 0.030814687728881837, 0.030989696502685547, 0.031058559417724608, 0.030918111801147462, 0.03081679916381836, 0.031197183609008788]",tokens/s,31.840776464590522,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7438.864384,8048.738304,0.0,7646.216192,7627.584,s,1,13.0644892578125,13.0644892578125,0.0,13.0644892578125,13.0644892578125,13.0644892578125,13.0644892578125,[13.0644892578125],,kWh,0.00017518869824164842,1.9317285628127295e-05,5.484393276400379e-05,0.0002493499166337795,,MB,1775.828992,8694.66112,0.0,8277.458944,8199.8592,s,10,3.582872100830078,0.3582872100830078,0.0009407618133292165,0.3583049621582031,0.3590489440917969,0.3596042755126953,0.36004854064941405,"[0.357796875, 0.3574187622070312, 0.35661016845703125, 0.35762802124023435, 0.35818572998046877, 0.358925537109375, 0.3588048095703125, 0.35891839599609376, 0.3601596069335937, 0.3584241943359375]",tokens/s,714.5105736280401,kWh,1.0503318217708586e-05,1.157523856455291e-06,6.951463894499779e-06,1.8612305968663655e-05,tokens/kWh,13754340.834016524,MB,1783.578624,9009.23392,0.0,8592.031744,8476.849152,s,10,29.597494873046877,2.9597494873046877,0.005798273546547342,2.961339599609375,2.965486962890625,2.966429150390625,2.967182900390625,"[2.949198974609375, 2.949369384765625, 2.958435791015625, 2.962334228515625, 2.963216796875, 2.962507568359375, 2.960344970703125, 2.965277587890625, 2.967371337890625, 2.959438232421875]",tokens/s,21.285585239638404,kWh,9.097741691895747e-05,1.0035818115743189e-05,6.064620129470096e-05,0.00016165943632940162,tokens/kWh,389708.15085380786,,s,630,29.590878746032715,0.04696964880322653,0.00042575473763651814,0.046929279327392576,0.04745906562805176,0.04756553688049316,0.04830731567382813,"[0.04735295867919922, 0.04676883316040039, 0.046278846740722655, 0.04642559814453125, 0.04624374389648438, 0.04616624069213867, 0.046163711547851566, 0.04617689514160156, 0.04638719940185547, 0.046460289001464844, 0.0464791374206543, 0.046449504852294925, 0.04642611312866211, 0.04645478439331055, 0.046412799835205076, 0.04653894424438477, 0.04673446273803711, 0.04670767974853516, 0.04686681747436523, 0.04665603256225586, 0.046703712463378906, 0.046770912170410156, 0.046607776641845705, 0.04654550552368164, 0.04661043167114258, 0.04644607925415039, 0.04655769729614258, 0.046655006408691406, 0.0466844482421875, 0.0465401611328125, 0.04675667190551758, 0.04690288162231445, 0.046908992767333985, 0.04690825653076172, 0.04665302276611328, 0.04680335998535156, 0.0469153938293457, 0.046978687286376955, 0.046909343719482424, 0.047063713073730466, 0.047267200469970704, 0.047056705474853515, 0.04704134368896484, 0.04727155303955078, 0.04705926513671875, 0.047044670104980466, 0.04700364685058594, 0.04705062484741211, 0.04701356887817383, 0.04687286376953125, 0.04670431900024414, 0.046948543548583986, 0.0470041618347168, 0.046787841796875, 0.047196704864501955, 0.04719174575805664, 0.047144447326660156, 0.047354686737060545, 0.047503360748291014, 0.04740256118774414, 0.047416927337646485, 0.0471703987121582, 0.04717772674560547, 0.04690534210205078, 0.04663907241821289, 0.04636880111694336, 0.046309375762939455, 0.046274559020996094, 0.046301185607910154, 0.046276607513427735, 0.046516223907470705, 0.04635238265991211, 0.04632950210571289, 0.046389598846435544, 0.04639955139160156, 0.04657145690917969, 0.0465715217590332, 0.046472991943359375, 0.04658812713623047, 0.04647222518920899, 0.04665193557739258, 0.0465863037109375, 0.046698497772216796, 0.046645633697509764, 0.04665462493896484, 0.04661033630371094, 0.04647135925292969, 0.04650393676757812, 0.04673798370361328, 0.04692720031738281, 0.04672521591186524, 0.046827777862548825, 0.046895233154296875, 0.04679884719848633, 0.046802017211914064, 0.04661724853515625, 0.046833919525146483, 0.046895103454589845, 0.04690943908691406, 0.04696473693847656, 0.04692105484008789, 0.046833694458007814, 0.04714089584350586, 0.046938720703125, 0.047017982482910156, 0.047025215148925784, 0.04692473602294922, 0.04684563064575195, 0.04681727981567383, 0.04715552139282227, 0.04704460906982422, 0.047023551940917965, 0.04693868637084961, 0.04712063980102539, 0.04714640045166016, 0.04696451187133789, 0.04708819198608399, 0.04719001770019531, 0.04715520095825195, 0.047241214752197266, 0.04717567825317383, 0.047176959991455075, 0.04731292724609375, 0.04734230422973633, 0.04722876739501953, 0.04746579360961914, 0.04718307113647461, 0.04652521514892578, 0.046327423095703125, 0.04614771270751953, 0.0462215690612793, 0.04663267135620117, 0.046362911224365234, 0.04654819107055664, 0.04735635375976562, 0.04639507293701172, 0.046543521881103514, 0.04756217575073242, 0.046815105438232425, 0.04661727905273438, 0.04660224151611328, 0.04660553741455078, 0.046430496215820315, 0.04665164947509766, 0.0464714241027832, 0.0465918083190918, 0.04681273651123047, 0.046782657623291014, 0.04679875183105469, 0.0466822395324707, 0.046696128845214846, 0.04668899154663086, 0.04723839950561524, 0.047231327056884764, 0.046757503509521486, 0.0474686393737793, 0.04716790390014648, 0.04696092987060547, 0.0472407341003418, 0.046977504730224606, 0.04725279998779297, 0.046940673828125, 0.04676012802124024, 0.046637054443359374, 0.04671263885498047, 0.046860286712646484, 0.046687999725341794, 0.04689555358886719, 0.04693999862670899, 0.04696899032592773, 0.04701388931274414, 0.047247360229492184, 0.046988800048828126, 0.0471860466003418, 0.047231361389160155, 0.047116287231445314, 0.04719001770019531, 0.04829004669189453, 0.04732227325439453, 0.047352447509765624, 0.047390689849853514, 0.047429630279541016, 0.04731430435180664, 0.04727391815185547, 0.047225536346435545, 0.04731276702880859, 0.0474002571105957, 0.04733993530273437, 0.047415615081787106, 0.0488458251953125, 0.04676095962524414, 0.046510143280029295, 0.046462913513183594, 0.04636262512207031, 0.04633910369873047, 0.047025249481201174, 0.04637273788452148, 0.046430206298828124, 0.04649369430541992, 0.04657897567749023, 0.0463961296081543, 0.04643139266967773, 0.046567649841308595, 0.0465035514831543, 0.04661471939086914, 0.04648204803466797, 0.04690918350219726, 0.046782752990722654, 0.046910785675048826, 0.04712739181518555, 0.04689715194702149, 0.04679398345947266, 0.04663750457763672, 0.04655750274658203, 0.046497791290283204, 0.046645023345947265, 0.046700992584228516, 0.04659782409667969, 0.046731361389160155, 0.04675503921508789, 0.046682910919189455, 0.046734432220458984, 0.046686241149902344, 0.04697776031494141, 0.04705462265014648, 0.04664972686767578, 0.04669235229492188, 0.047101951599121096, 0.04717977523803711, 0.04741120147705078, 0.047476001739501957, 0.04744784164428711, 0.047346622467041015, 0.04732723236083984, 0.047230976104736325, 0.04732217788696289, 0.0476640625, 0.047505409240722656, 0.04730646514892578, 0.04740444946289062, 0.04748303985595703, 0.047299297332763675, 0.047083518981933595, 0.04748822402954102, 0.04759836959838867, 0.047416702270507816, 0.047435489654541016, 0.04774115371704102, 0.04753478240966797, 0.04763395309448242, 0.048027393341064456, 0.04793539047241211, 0.04759555053710938, 0.046742366790771483, 0.0464793586730957, 0.04696473693847656, 0.046310783386230465, 0.04646976089477539, 0.046559425354003904, 0.04644217681884766, 0.04657984161376953, 0.046876415252685544, 0.0467193603515625, 0.046641025543212894, 0.047344993591308594, 0.046940673828125, 0.04671241760253906, 0.046544830322265626, 0.04674991989135742, 0.04686454391479492, 0.04663897705078125, 0.05076031875610352, 0.046704383850097654, 0.04679065704345703, 0.047017982482910156, 0.046787872314453124, 0.04652051162719727, 0.04657209777832031, 0.04667087936401367, 0.04663363265991211, 0.04665167999267578, 0.04692089462280274, 0.04685260772705078, 0.04687654495239258, 0.04702444839477539, 0.04707315063476562, 0.046844158172607425, 0.04738032150268555, 0.04696614456176758, 0.047175647735595704, 0.04708953475952148, 0.047153823852539065, 0.047021888732910154, 0.04698380661010742, 0.047120223999023436, 0.046977024078369144, 0.047017982482910156, 0.047187774658203126, 0.04708310317993164, 0.04726559829711914, 0.04704499053955078, 0.04692323303222656, 0.047339553833007815, 0.047288257598876955, 0.04697187042236328, 0.04701180648803711, 0.04736822509765625, 0.047445663452148436, 0.04742614364624023, 0.04734735870361328, 0.04749321746826172, 0.04738457489013672, 0.04732656097412109, 0.047403518676757815, 0.04749123382568359, 0.04724425506591797, 0.04656329727172852, 0.0461223030090332, 0.046400222778320316, 0.04610012817382812, 0.04645308685302734, 0.046655487060546875, 0.04695859146118164, 0.04715315246582031, 0.04772857666015625, 0.04669996643066406, 0.04657436752319336, 0.046730880737304685, 0.047061214447021486, 0.04664115142822266, 0.04675174331665039, 0.046811134338378906, 0.046607486724853514, 0.046680416107177734, 0.046666271209716795, 0.04662879943847656, 0.04690678405761719, 0.04680361557006836, 0.04690943908691406, 0.0466736946105957, 0.04660630416870117, 0.046801151275634764, 0.046829311370849606, 0.04675404739379883, 0.04705625534057617, 0.04838054275512695, 0.04690534210205078, 0.04676620864868164, 0.047003520965576175, 0.046875648498535157, 0.04714188766479492, 0.04705279922485352, 0.04677987289428711, 0.046887039184570316, 0.047204288482666015, 0.046954238891601566, 0.046833473205566405, 0.0472479362487793, 0.04715760040283203, 0.04716857528686524, 0.04694316864013672, 0.047169246673583985, 0.047022369384765624, 0.04703987121582031, 0.04723500823974609, 0.04771500778198242, 0.047065086364746093, 0.04728137588500977, 0.048397087097167966, 0.04746854400634765, 0.04781363296508789, 0.04740832138061524, 0.047274112701416016, 0.047298240661621097, 0.04763417434692383, 0.04733977508544922, 0.04728179168701172, 0.04750553512573242, 0.04831436920166016, 0.046988990783691405, 0.04631788635253906, 0.04640742492675781, 0.04643353652954101, 0.04638937759399414, 0.04643641662597656, 0.04651481628417969, 0.04642604827880859, 0.04653286361694336, 0.04666163253784179, 0.04647731018066406, 0.046430206298828124, 0.046403167724609375, 0.046363040924072264, 0.04628611373901367, 0.04686070251464844, 0.046843456268310546, 0.046733856201171875, 0.04662704086303711, 0.046997344970703125, 0.04702838516235352, 0.04678451156616211, 0.04660838317871094, 0.04663507080078125, 0.046931903839111326, 0.046952449798583984, 0.04691353607177735, 0.046739456176757815, 0.04689715194702149, 0.046811038970947266, 0.04698940658569336, 0.04715520095825195, 0.04701740646362305, 0.04685881423950195, 0.04687411117553711, 0.04700630569458008, 0.04688217544555664, 0.04711388778686523, 0.0470077133178711, 0.04706387329101563, 0.04699350357055664, 0.047164993286132814, 0.04748323059082031, 0.04719830322265625, 0.04713676834106445, 0.04720230484008789, 0.04728044891357422, 0.04734892654418945, 0.047270526885986326, 0.0471912956237793, 0.047489566802978514, 0.04746044921875, 0.04741120147705078, 0.04746614456176758, 0.04742083358764648, 0.0472606086730957, 0.04713983917236328, 0.04740793609619141, 0.04787628936767578, 0.04758323287963867, 0.047467872619628905, 0.04766582489013672, 0.0471545295715332, 0.0466393928527832, 0.04649417495727539, 0.046579616546630856, 0.04707440185546875, 0.046656192779541014, 0.046706912994384765, 0.04655270385742188, 0.04653913497924805, 0.046448638916015625, 0.046845951080322266, 0.04677750396728515, 0.04665190505981445, 0.04651168060302734, 0.04683446502685547, 0.04708163070678711, 0.04679663848876953, 0.04676982498168945, 0.04656777572631836, 0.0466247673034668, 0.04669235229492188, 0.04695449447631836, 0.04667295837402344, 0.046951454162597654, 0.046921310424804685, 0.046653377532958985, 0.04668044662475586, 0.04687843322753906, 0.04670883178710938, 0.046723262786865234, 0.046548992156982424, 0.04670054244995117, 0.04707328033447265, 0.04717567825317383, 0.04688880157470703, 0.046921886444091794, 0.04722383880615234, 0.0473097915649414, 0.04762358474731445, 0.04748553466796875, 0.047368255615234375, 0.04743161773681641, 0.04719945526123047, 0.047473438262939455, 0.04764057540893555, 0.04719772720336914, 0.04734409713745117, 0.047390689849853514, 0.04728358459472656, 0.047263423919677736, 0.04760470581054688, 0.047421440124511716, 0.047399009704589844, 0.047445919036865236, 0.04747673416137695, 0.04754841613769531, 0.04738041687011719, 0.04747398376464844, 0.047517726898193356, 0.047910751342773436, 0.04748102569580078, 0.047591838836669925, 0.047610145568847656, 0.04718057632446289, 0.04664934539794922, 0.04639539337158203, 0.046884735107421874, 0.04667552185058594, 0.04674617767333984, 0.04668809509277344, 0.046852256774902346, 0.04681932830810547, 0.04779827117919922, 0.04693135833740234, 0.04686105728149414, 0.04670038223266602, 0.0467182731628418, 0.04705523300170898, 0.046779903411865234, 0.04667679977416992, 0.04676812744140625, 0.046742752075195314, 0.04702288055419922, 0.047146465301513674, 0.046940353393554686, 0.04679919815063477, 0.046747615814208984, 0.04690537643432617, 0.04680908966064453, 0.046688255310058595, 0.0466954231262207, 0.046617599487304685, 0.046919456481933595, 0.047157470703125, 0.047881439208984376, 0.046879520416259764, 0.046893054962158204, 0.046943519592285154, 0.04730569458007813, 0.04707302474975586, 0.04688617706298828, 0.04697724914550781, 0.047329792022705076, 0.047292289733886717, 0.04701196670532227, 0.04706633758544922, 0.04704044723510742, 0.04725228881835938, 0.047263774871826175, 0.047171520233154296, 0.04706719970703125, 0.04717158508300781, 0.04725132751464844, 0.04750899124145508, 0.04743436813354492, 0.04701513671875, 0.047119136810302734, 0.04907212829589844, 0.048502784729003906, 0.047373600006103515, 0.04726201629638672, 0.04739932632446289, 0.04754022216796875, 0.047502880096435544, 0.04738505554199219, 0.04744003295898438, 0.04719555282592774, 0.0477130241394043, 0.04638131332397461, 0.046494720458984375, 0.046365440368652346, 0.046429439544677736, 0.04642483139038086, 0.046534656524658206, 0.046499839782714845, 0.04645273590087891, 0.046662017822265624, 0.04682918548583984, 0.04681894302368164, 0.04673116683959961, 0.046631614685058595, 0.04678844833374023, 0.04648748779296875, 0.046698497772216796, 0.04687446212768555, 0.04698128128051758, 0.04684799957275391, 0.04678656005859375, 0.04707942581176758, 0.04674505615234375, 0.046623104095458986, 0.046588062286376956, 0.04666777420043945, 0.0469381103515625, 0.04682137680053711, 0.04671023941040039, 0.04666537475585938, 0.04676803207397461, 0.046774303436279294, 0.04698012924194336, 0.0468287353515625, 0.0470759048461914, 0.04678876876831055, 0.04698726272583008, 0.047067134857177735, 0.04730812835693359, 0.047069854736328125, 0.04733302307128906, 0.04731343841552734, 0.04738844680786133, 0.047263774871826175, 0.04740915298461914, 0.04709580612182617, 0.047042560577392575, 0.04706057739257812, 0.04707900619506836, 0.047497215270996096, 0.04738051223754883, 0.047102752685546874, 0.04703343963623047, 0.04708211135864258, 0.047398303985595705, 0.047518463134765626, 0.04734291076660156, 0.047387454986572264, 0.04756828689575195, 0.04745891189575195, 0.04739276885986328, 0.04746198272705078]",tokens/s,21.290344413461018,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,876.371968,662.56896,0.0,260.046848,258.555392,s,1,7.6930947265625,7.6930947265625,0.0,7.6930947265625,7.6930947265625,7.6930947265625,7.6930947265625,[7.6930947265625],,kWh,1.5043263316647427e-05,1.652119122551898e-06,4.5366702959986904e-06,2.1232052735198017e-05,,MB,1329.893376,759.037952,0.0,341.835776,317.950464,s,14,0.19584982299804687,0.013989273071289063,0.0003924698149841576,0.013858880043029785,0.01405989751815796,0.014529316711425782,0.015200724182128906,"[0.013813887596130371, 0.01387116813659668, 0.013836671829223633, 0.015368576049804688, 0.013822400093078613, 0.013848992347717285, 0.013828767776489257, 0.01373084831237793, 0.013868767738342285, 0.013844736099243164, 0.013954431533813476, 0.014019040107727051, 0.014077407836914062, 0.01396412754058838]",tokens/s,18299.735711457557,kWh,4.0486558713593135e-07,4.463948137609203e-08,2.160912500804278e-07,6.655963185924512e-07,tokens/kWh,384617511.9197894,MB,1370.025984,786.300928,0.0,369.098752,317.953024,s,14,10.2091162109375,0.7292225864955357,0.008958890656735363,0.7251283569335938,0.7411813537597657,0.7439443481445313,0.7479024926757812,"[0.7261859741210938, 0.7228895874023438, 0.724860595703125, 0.7224734497070312, 0.72203466796875, 0.7253961181640625, 0.7238218994140625, 0.720133056640625, 0.7216100463867188, 0.7291727294921875, 0.7394151611328125, 0.74095068359375, 0.7488920288085937, 0.7412802124023438]",tokens/s,86.39337448770272,kWh,2.103134934619995e-05,2.3193967290330682e-06,7.711831758919987e-06,3.1062577834153e-05,tokens/kWh,2028163.9320588557,,s,882,10.201798299789454,0.01156666473899028,0.0002727617723053447,0.0114759521484375,0.011841481208801269,0.01195679063796997,0.012620402498245236,"[0.011280672073364258, 0.01143609619140625, 0.011467647552490235, 0.011575360298156739, 0.011414688110351562, 0.011553312301635742, 0.011497471809387207, 0.01141055965423584, 0.011464672088623047, 0.01140329647064209, 0.011447263717651366, 0.011435296058654785, 0.011434080123901368, 0.011496000289916992, 0.011486528396606445, 0.011486016273498535, 0.011487071990966796, 0.011435040473937988, 0.011405952453613281, 0.01141590404510498, 0.011891008377075196, 0.012071871757507324, 0.011557632446289063, 0.01324448013305664, 0.011922816276550292, 0.011495136260986328, 0.011463808059692384, 0.0114234561920166, 0.011446080207824706, 0.011587455749511718, 0.01139958381652832, 0.011430144309997559, 0.011369279861450195, 0.011319808006286621, 0.01153264045715332, 0.011394847869873046, 0.011343647956848144, 0.01135251235961914, 0.01145036792755127, 0.011347968101501465, 0.011367456436157226, 0.011602911949157715, 0.01140940761566162, 0.011353631973266602, 0.011377056121826172, 0.011391039848327636, 0.011345248222351074, 0.011444895744323731, 0.011388544082641602, 0.01136473560333252, 0.011734751701354981, 0.011892255783081054, 0.011707072257995605, 0.011627903938293457, 0.011672256469726562, 0.011705696105957031, 0.011473055839538573, 0.011424320220947266, 0.011365951538085938, 0.011412063598632812, 0.011431776046752929, 0.011532575607299804, 0.011590368270874023, 0.011230655670166016, 0.011460895538330079, 0.011530048370361328, 0.011467583656311035, 0.011482751846313477, 0.011600255966186523, 0.01155388832092285, 0.011529120445251465, 0.01153609561920166, 0.011424032211303711, 0.011488863945007324, 0.01143235206604004, 0.011464703559875488, 0.011581695556640625, 0.011636480331420899, 0.011620351791381836, 0.01155459213256836, 0.011434207916259766, 0.011389984130859374, 0.011430879592895508, 0.011433823585510254, 0.011470656394958496, 0.011444543838500977, 0.011459872245788575, 0.011603008270263672, 0.011431167602539063, 0.011397567749023438, 0.011462464332580566, 0.01150166416168213, 0.011547039985656739, 0.011461536407470703, 0.011407551765441894, 0.011420096397399902, 0.011425215721130371, 0.011423583984375, 0.011437952041625976, 0.011453760147094727, 0.011449695587158203, 0.011470623970031738, 0.011385408401489258, 0.011411680221557616, 0.011413311958312989, 0.011433952331542969, 0.011501567840576172, 0.011554335594177246, 0.011477375984191894, 0.011409472465515137, 0.011413503646850585, 0.011410688400268555, 0.011483296394348145, 0.01152064037322998, 0.011644576072692871, 0.011560416221618653, 0.011514752388000489, 0.011439519882202149, 0.011387680053710937, 0.011384160041809081, 0.011530048370361328, 0.011416223526000977, 0.011446271896362305, 0.011372384071350097, 0.011432448387145995, 0.01131811237335205, 0.011204031944274903, 0.011391584396362304, 0.011388287544250489, 0.011385439872741699, 0.011404895782470703, 0.01139743995666504, 0.011375967979431151, 0.011392064094543457, 0.011361696243286134, 0.012749471664428712, 0.014009920120239258, 0.011528287887573242, 0.011454431533813477, 0.011466976165771484, 0.011646623611450196, 0.011444704055786133, 0.011347647666931152, 0.011435456275939941, 0.011399744033813476, 0.011507712364196777, 0.01137174415588379, 0.01135807991027832, 0.011332575798034667, 0.011433919906616212, 0.011407679557800292, 0.011383999824523925, 0.011422207832336426, 0.0114171199798584, 0.011350015640258788, 0.011354911804199218, 0.011351072311401367, 0.011443103790283203, 0.011448287963867187, 0.011453632354736329, 0.011332192420959473, 0.011374048233032227, 0.011354816436767579, 0.011390815734863282, 0.011396191596984863, 0.01137712001800537, 0.011374367713928222, 0.011362463951110839, 0.011343935966491699, 0.011434399604797363, 0.011458016395568848, 0.011499327659606933, 0.01142630386352539, 0.011550592422485352, 0.011491392135620117, 0.011476927757263184, 0.01139955234527588, 0.011421471595764161, 0.011448575973510742, 0.011449567794799805, 0.011501952171325684, 0.011411104202270508, 0.011406047821044922, 0.011436032295227052, 0.01146675205230713, 0.011406911849975585, 0.011486783981323242, 0.012123007774353028, 0.011894432067871094, 0.011440416336059571, 0.011511360168457031, 0.011653727531433105, 0.011460127830505372, 0.011614399909973145, 0.011397407531738282, 0.011444512367248536, 0.011411168098449708, 0.01140940761566162, 0.011380767822265625, 0.011544384002685548, 0.011464863777160645, 0.011553952217102051, 0.011493760108947754, 0.011480959892272949, 0.01158198356628418, 0.011759679794311523, 0.011478591918945312, 0.01139087963104248, 0.01143171215057373, 0.01141222381591797, 0.011536383628845214, 0.011612288475036621, 0.01147270393371582, 0.011407232284545899, 0.011385024070739746, 0.011390975952148438, 0.011359807968139648, 0.011577792167663574, 0.011413503646850585, 0.011419072151184081, 0.011385087966918945, 0.011366496086120606, 0.011543935775756835, 0.011405856132507323, 0.011431872367858887, 0.011389311790466308, 0.011671360015869141, 0.011407551765441894, 0.011421695709228515, 0.011954175949096679, 0.01141318416595459, 0.011413824081420899, 0.01137657642364502, 0.01140937614440918, 0.011419743537902831, 0.011413503646850585, 0.011470848083496094, 0.01139417552947998, 0.011335552215576172, 0.011482399940490723, 0.011486623764038086, 0.011462559700012207, 0.011372960090637207, 0.011372384071350097, 0.011397088050842285, 0.011474559783935547, 0.011462783813476563, 0.011450207710266114, 0.011367008209228516, 0.011384832382202148, 0.011385120391845704, 0.011357503890991211, 0.011169792175292969, 0.011491328239440919, 0.01152790355682373, 0.011446656227111817, 0.011398847579956056, 0.011423199653625489, 0.011426560401916504, 0.011403455734252929, 0.011454048156738282, 0.011421952247619629, 0.011360223770141602, 0.011440192222595215, 0.011433792114257813, 0.011409536361694336, 0.011411456108093262, 0.011458847999572754, 0.011404512405395507, 0.011397472381591797, 0.01146281623840332, 0.011385919570922852, 0.011373151779174804, 0.011366656303405762, 0.011574624061584472, 0.011387007713317871, 0.011357088088989258, 0.01138640022277832, 0.011394463539123535, 0.011485504150390626, 0.011461088180541992, 0.011463744163513184, 0.011469120025634766, 0.011395711898803711, 0.011499008178710938, 0.011552576065063477, 0.011461248397827148, 0.011448224067687989, 0.01149289608001709, 0.011428480148315429, 0.011415391921997071, 0.011450431823730468, 0.011450400352478027, 0.01137241554260254, 0.01144051170349121, 0.011461695671081544, 0.011409631729125977, 0.01148572826385498, 0.01154047966003418, 0.011376704216003418, 0.011448575973510742, 0.011425472259521485, 0.011483136177062989, 0.011437567710876465, 0.01141811180114746, 0.011478495597839356, 0.011442720413208008, 0.011446271896362305, 0.011619935989379883, 0.011437855720520019, 0.011473535537719726, 0.011438079833984375, 0.011718655586242676, 0.011423744201660157, 0.012093600273132324, 0.01122537612915039, 0.011497471809387207, 0.011438303947448731, 0.011398591995239259, 0.011434304237365722, 0.01140944004058838, 0.011405311584472656, 0.012071200370788574, 0.011437631607055665, 0.011394399642944336, 0.011360608100891113, 0.011360575675964355, 0.011399328231811523, 0.011463839530944825, 0.011438591957092285, 0.011481439590454102, 0.011382911682128906, 0.011454336166381836, 0.01134166431427002, 0.011450528144836426, 0.011429887771606445, 0.011447711944580078, 0.01148579216003418, 0.011884544372558594, 0.011476767539978028, 0.011409312248229981, 0.011364288330078125, 0.011432319641113282, 0.011411775588989257, 0.011449024200439454, 0.011369343757629394, 0.011384575843811036, 0.011416031837463378, 0.011466464042663575, 0.011368736267089844, 0.011376511573791505, 0.011434080123901368, 0.011406720161437988, 0.01142246437072754, 0.01147283172607422, 0.011421919822692871, 0.011412704467773437, 0.011454336166381836, 0.011430432319641113, 0.01143164825439453, 0.011381279945373536, 0.011398943901062011, 0.011549920082092284, 0.014902079582214355, 0.011423711776733398, 0.011986944198608398, 0.011581439971923829, 0.01144422435760498, 0.011448351860046387, 0.01132464027404785, 0.011428640365600587, 0.01140937614440918, 0.011421695709228515, 0.011384832382202148, 0.011505887985229493, 0.01152790355682373, 0.011466815948486328, 0.011421695709228515, 0.011296544075012207, 0.0120513916015625, 0.011484800338745117, 0.011505311965942382, 0.01169001579284668, 0.01147980785369873, 0.011468735694885254, 0.011420703887939454, 0.011381919860839845, 0.01138156795501709, 0.01145139217376709, 0.011390975952148438, 0.01140332794189453, 0.0113721923828125, 0.011410816192626953, 0.01167852783203125, 0.011392767906188966, 0.011423263549804687, 0.01147593593597412, 0.011507616043090821, 0.01145139217376709, 0.011500639915466309, 0.01132323169708252, 0.011370559692382813, 0.011409343719482422, 0.011332991600036622, 0.011385472297668457, 0.01135647964477539, 0.011378560066223144, 0.011341631889343262, 0.011546784400939942, 0.01153164768218994, 0.011429887771606445, 0.011422176361083985, 0.01145036792755127, 0.01152019214630127, 0.011511615753173828, 0.011633983612060547, 0.012548959732055665, 0.011485024452209473, 0.011396767616271973, 0.011364704132080078, 0.011490431785583496, 0.011499967575073242, 0.011526592254638672, 0.011436032295227052, 0.01141759967803955, 0.011491328239440919, 0.011481087684631347, 0.011569151878356934, 0.011497376441955566, 0.011503711700439453, 0.011469023704528809, 0.011443967819213868, 0.011606240272521972, 0.011437664031982422, 0.011581664085388183, 0.011406911849975585, 0.011415200233459473, 0.011417759895324706, 0.011448960304260253, 0.01140121555328369, 0.01142579174041748, 0.011166272163391113, 0.01140351963043213, 0.011370688438415528, 0.011411295890808106, 0.011405599594116212, 0.01144803237915039, 0.011428959846496582, 0.011375455856323243, 0.011408896446228027, 0.011442720413208008, 0.01144422435760498, 0.011374591827392578, 0.011460607528686523, 0.011405407905578613, 0.011433024406433106, 0.011461055755615235, 0.01140163230895996, 0.011372544288635255, 0.01143331241607666, 0.011375264167785644, 0.011553088188171386, 0.011400896072387695, 0.011419360160827636, 0.011374879837036132, 0.011362303733825683, 0.011309056282043458, 0.011390975952148438, 0.011421152114868164, 0.011411392211914062, 0.011398752212524415, 0.011349151611328125, 0.011410431861877441, 0.011385855674743652, 0.011485024452209473, 0.011396320343017578, 0.011417984008789062, 0.011415200233459473, 0.011395680427551269, 0.011411616325378418, 0.012310336112976075, 0.011358624458312988, 0.011478816032409668, 0.011616512298583985, 0.011453696250915527, 0.011536895751953125, 0.011441887855529785, 0.01143836784362793, 0.011329631805419921, 0.01135974407196045, 0.011380415916442871, 0.011492032051086426, 0.011421216011047364, 0.011360223770141602, 0.011456607818603515, 0.011395071983337402, 0.011428288459777832, 0.011386112213134765, 0.01136451244354248, 0.011326047897338867, 0.011358207702636718, 0.011356160163879395, 0.011370240211486816, 0.011495679855346679, 0.011184415817260741, 0.01139094352722168, 0.01224723243713379, 0.012143839836120605, 0.011464703559875488, 0.011524864196777344, 0.011449343681335449, 0.011480128288269043, 0.011503711700439453, 0.011398048400878906, 0.011416735649108887, 0.011394847869873046, 0.011400320053100587, 0.011430784225463867, 0.011472576141357422, 0.011401408195495606, 0.011382335662841797, 0.01132806396484375, 0.01142137622833252, 0.011378751754760743, 0.011362751960754395, 0.01138054370880127, 0.011390303611755371, 0.011376895904541016, 0.011456831932067871, 0.011472864151000977, 0.011430015563964844, 0.011408831596374512, 0.011416128158569336, 0.011374400138854981, 0.011360447883605957, 0.011372544288635255, 0.011776255607604981, 0.011474592208862305, 0.011455936431884766, 0.011393823623657227, 0.011417535781860352, 0.011364192008972167, 0.011515999794006348, 0.011376383781433106, 0.011366656303405762, 0.011361632347106933, 0.011368191719055176, 0.011391712188720702, 0.011425984382629395, 0.011374591827392578, 0.011386591911315918, 0.011378047943115234, 0.011386848449707032, 0.011348608016967774, 0.011574687957763672, 0.011410207748413085, 0.011380255699157715, 0.011391584396362304, 0.011448672294616699, 0.011368096351623535, 0.011458720207214355, 0.011421536445617676, 0.011455583572387695, 0.011442079544067382, 0.011475968360900878, 0.01161196804046631, 0.01150499153137207, 0.011217984199523925, 0.011481887817382812, 0.011534687995910644, 0.01152787208557129, 0.01157705593109131, 0.011540127754211425, 0.011476991653442382, 0.011497088432312012, 0.01151689624786377, 0.011413824081420899, 0.01151353645324707, 0.011429823875427247, 0.01142950439453125, 0.011483424186706543, 0.011428000450134278, 0.011416607856750489, 0.011412544250488281, 0.011445311546325684, 0.011426655769348145, 0.011403264045715332, 0.011425408363342285, 0.011401280403137207, 0.01141107177734375, 0.01143068790435791, 0.011397024154663087, 0.011511327743530274, 0.011508288383483887, 0.011478303909301758, 0.011481727600097656, 0.011504863739013672, 0.011442879676818847, 0.011505760192871094, 0.011593728065490723, 0.011466719627380372, 0.011425824165344238, 0.011429856300354004, 0.011437888145446778, 0.01156937599182129, 0.011603648185729981, 0.011594047546386719, 0.011525407791137695, 0.011521087646484376, 0.01162399959564209, 0.011687328338623047, 0.011928256034851075, 0.011509504318237305, 0.011528512001037598, 0.011648287773132324, 0.011729567527770997, 0.011790335655212402, 0.01175551986694336, 0.011841792106628418, 0.011788031578063966, 0.011799679756164552, 0.011742079734802246, 0.011803872108459472, 0.01205894374847412, 0.011772224426269531, 0.011770175933837891, 0.011752320289611816, 0.011747648239135742, 0.011856032371520995, 0.01163929557800293, 0.011681119918823242, 0.011799200057983399, 0.01177567958831787, 0.011716927528381348, 0.011702272415161133, 0.011720831871032716, 0.011759296417236328, 0.011654656410217285, 0.011956928253173828, 0.013447168350219727, 0.011862208366394043, 0.011871264457702637, 0.011850432395935058, 0.011982111930847169, 0.01188486385345459, 0.011792896270751953, 0.01179593563079834, 0.011817503929138183, 0.011806943893432616, 0.012005503654479981, 0.011785663604736329, 0.011714112281799317, 0.011648927688598633, 0.011772607803344727, 0.011740320205688477, 0.011744159698486328, 0.011697440147399903, 0.011762399673461914, 0.01163987159729004, 0.011666687965393066, 0.01170736026763916, 0.01175823974609375, 0.011782143592834473, 0.01170867156982422, 0.0116528959274292, 0.011612031936645508, 0.011655327796936035, 0.011620351791381836, 0.011646016120910645, 0.011598719596862792, 0.011577407836914062, 0.011607872009277344, 0.011615872383117676, 0.011668095588684082, 0.011873567581176758, 0.0116594877243042, 0.011767295837402344, 0.011596735954284667, 0.011575296401977539, 0.011535712242126465, 0.011543007850646972, 0.01150380802154541, 0.01153983974456787, 0.011532320022583007, 0.011561663627624511, 0.011653023719787598, 0.011688096046447754, 0.011695967674255372, 0.011603167533874512, 0.011545375823974609, 0.011603967666625976, 0.011589920043945313, 0.011568384170532226, 0.01142198371887207, 0.011561920166015625, 0.011589664459228516, 0.011582367897033692, 0.011589216232299806, 0.011681183815002442, 0.011756352424621582, 0.011714591979980468, 0.011728896141052245, 0.011718655586242676, 0.01176371192932129, 0.011889663696289063, 0.01170467185974121, 0.011771776199340821, 0.011891488075256348, 0.011886336326599122, 0.011810144424438477, 0.011815839767456055, 0.01173692798614502, 0.0117161283493042, 0.011837311744689941, 0.011741951942443847, 0.011673600196838378, 0.011766816139221191, 0.011729887962341309, 0.01166438388824463, 0.011670528411865234, 0.01174937629699707, 0.011677696228027343, 0.01198857593536377, 0.011759967803955078, 0.011734944343566894, 0.011780256271362305, 0.011650464057922364, 0.011713120460510254, 0.011814911842346192, 0.011681280136108398, 0.01175500774383545, 0.011613183975219727, 0.011630080223083495, 0.011818559646606445, 0.011701184272766113, 0.011673312187194824, 0.011630144119262695, 0.01167024040222168, 0.011866144180297852, 0.011825311660766601, 0.011851455688476563, 0.011851743698120117, 0.011794848442077637, 0.011675423622131347, 0.011628512382507325, 0.011655167579650879, 0.011655200004577636, 0.011712608337402343, 0.011691904067993164, 0.011784192085266113, 0.011902048110961913, 0.012000288009643555, 0.012063648223876953, 0.012049375534057617, 0.012038463592529297, 0.011877984046936034, 0.011529248237609863, 0.011745823860168456, 0.011690431594848633, 0.011673503875732422, 0.011702303886413573, 0.011712287902832031, 0.011565407752990723, 0.011609248161315918, 0.011615008354187011, 0.011720704078674317, 0.011853216171264648, 0.011808575630187989, 0.011900704383850097, 0.011817376136779785, 0.011797439575195313, 0.011912863731384277, 0.012006912231445312, 0.012069375991821289, 0.011903200149536133, 0.011986271858215332, 0.012253631591796875, 0.011899968147277833, 0.01196127986907959, 0.01196345615386963, 0.01184659194946289, 0.011800576210021972, 0.01182431983947754, 0.01184447956085205, 0.011840991973876953, 0.01234716796875, 0.011993791580200195, 0.012664544105529784, 0.011915552139282226, 0.011964320182800293, 0.011914688110351563, 0.011912192344665527, 0.011817824363708497, 0.01180345630645752, 0.011914976119995117, 0.011818367958068848, 0.01186297607421875, 0.011826399803161622, 0.011857664108276367, 0.012794143676757812, 0.01353212833404541, 0.011915295600891114, 0.012015328407287598, 0.011821120262145997, 0.011751359939575195, 0.011841535568237305, 0.01177190399169922, 0.01175152015686035, 0.011716511726379395, 0.011683839797973633, 0.01164310359954834, 0.011763456344604493, 0.011628000259399415, 0.011869791984558106, 0.011735584259033203, 0.011606464385986328, 0.011710463523864746, 0.011661312103271485, 0.0116428804397583, 0.011378687858581543, 0.011638784408569336, 0.01157487964630127, 0.01162281608581543, 0.011809951782226562, 0.011669983863830566, 0.011690367698669433, 0.011708415985107423, 0.011613696098327637, 0.011624959945678711, 0.011583456039428711, 0.011534367561340332, 0.011579744338989258, 0.011768832206726074, 0.011690655708312988, 0.01163263988494873, 0.01160534381866455, 0.01170860767364502, 0.011672032356262208, 0.01156719970703125, 0.011683744430541992, 0.011636608123779297, 0.011633952140808106, 0.011686752319335938, 0.01168553638458252, 0.011629216194152832, 0.011641759872436524, 0.011762207984924316, 0.011719167709350586, 0.011904447555541993, 0.011854207992553711, 0.011773887634277344, 0.011786239624023438, 0.011850048065185546, 0.011808032035827637, 0.011715200424194336, 0.011873056411743164, 0.011717535972595216, 0.011789983749389648, 0.011866847991943359, 0.011854880332946778, 0.011802399635314941, 0.01170729637145996, 0.011666912078857422, 0.011625184059143066, 0.011591423988342285, 0.011601984024047851, 0.011603615760803222, 0.011585887908935546, 0.011605664253234864, 0.012327263832092285, 0.01274675178527832, 0.011984288215637207, 0.012610048294067382, 0.011717023849487305, 0.011705727577209472, 0.011675647735595703, 0.011624128341674804, 0.011754112243652344, 0.011982848167419433, 0.012105119705200196, 0.012055135726928711, 0.012025407791137695]",tokens/s,86.4553458205702,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1511.657472,1383.989248,0.0,981.467136,978.805248,s,1,8.3412880859375,8.3412880859375,0.0,8.3412880859375,8.3412880859375,8.3412880859375,8.3412880859375,[8.3412880859375],,kWh,3.797005896249932e-05,4.180768217604495e-06,1.196973179801164e-05,5.412055897811546e-05,,MB,1607.778304,1526.595584,0.0,1109.393408,1064.45312,s,10,0.5321865921020508,0.05321865921020508,0.0004651977802656137,0.05325665664672852,0.05349269599914551,0.0538780122756958,0.054186265296936036,"[0.052375648498535154, 0.053075393676757815, 0.05310598373413086, 0.0532872314453125, 0.05272351837158203, 0.05322608184814453, 0.05339878463745117, 0.054263328552246096, 0.05332355117797852, 0.05340707015991211]",tokens/s,4810.342909783606,kWh,1.5634709008021178e-06,1.724222064375655e-07,1.0337530551657167e-06,2.7696461624054e-06,tokens/kWh,92430579.57182065,MB,1615.23712,1610.481664,0.0,1193.279488,1117.180928,s,10,11.678140869140625,1.1678140869140625,0.0069589869400435095,1.1681026000976562,1.1769537963867187,1.1782032165527343,1.1792027526855469,"[1.156515625, 1.17945263671875, 1.172553955078125, 1.17076025390625, 1.16179296875, 1.16678369140625, 1.1766761474609375, 1.1694215087890625, 1.1603458251953125, 1.1638382568359376]",tokens/s,53.94694301597003,kWh,3.373011068586522e-05,3.7199887348930535e-06,1.6577372144833972e-05,5.402747156559223e-05,tokens/kWh,1166073.4469780738,,s,630,11.675702033996577,0.018532860371423147,0.0003262209306607475,0.018440768241882323,0.018919833564758303,0.01911366491317749,0.019826448116302492,"[0.018441856384277342, 0.018208864212036133, 0.018340415954589843, 0.018198528289794923, 0.018394847869873047, 0.018226783752441408, 0.018240192413330077, 0.018374591827392577, 0.018359392166137696, 0.01826300811767578, 0.018231039047241212, 0.018202335357666015, 0.01826665687561035, 0.01824563217163086, 0.018261184692382814, 0.018301759719848633, 0.01842790412902832, 0.018337791442871093, 0.01823539161682129, 0.01830633544921875, 0.018266847610473633, 0.018556928634643553, 0.018323295593261717, 0.018418943405151367, 0.018303903579711914, 0.018251264572143554, 0.018295295715332033, 0.01813443183898926, 0.018293344497680664, 0.018950143814086915, 0.018548736572265623, 0.01849932861328125, 0.018442495346069336, 0.01827631950378418, 0.01820879936218262, 0.018153280258178712, 0.01820857620239258, 0.018350400924682618, 0.01827155113220215, 0.018178815841674804, 0.018318912506103516, 0.02049667167663574, 0.018788511276245118, 0.018319360733032225, 0.018552831649780274, 0.018388992309570314, 0.0182392635345459, 0.01825923156738281, 0.018224063873291015, 0.018294048309326173, 0.018339584350585938, 0.01826915168762207, 0.018315263748168945, 0.018339136123657226, 0.018387584686279296, 0.01813920021057129, 0.018277664184570313, 0.018146015167236327, 0.018233184814453126, 0.018247840881347656, 0.01823855972290039, 0.018361248016357423, 0.018316736221313478, 0.018247968673706056, 0.018533920288085936, 0.0182194881439209, 0.018195648193359375, 0.018198911666870116, 0.018267808914184572, 0.01823619270324707, 0.01826201629638672, 0.018298879623413086, 0.018394496917724608, 0.01825404739379883, 0.01824764823913574, 0.018343744277954103, 0.01824729537963867, 0.01985353660583496, 0.020100095748901366, 0.018572479248046874, 0.018655519485473632, 0.018948415756225585, 0.018391168594360352, 0.018406272888183594, 0.018332672119140626, 0.01839619255065918, 0.018404064178466798, 0.01817795181274414, 0.018436479568481445, 0.018294752120971678, 0.01829644775390625, 0.019708288192749022, 0.01881920051574707, 0.019156864166259766, 0.018585599899291993, 0.019682880401611327, 0.018807231903076174, 0.018696191787719727, 0.018601375579833983, 0.018491392135620118, 0.018436704635620117, 0.01850761604309082, 0.01853455924987793, 0.018618368148803712, 0.018948095321655273, 0.019390464782714844, 0.019417087554931642, 0.019527679443359376, 0.01942323112487793, 0.01923891258239746, 0.0192491512298584, 0.019168832778930663, 0.019421312332153322, 0.019174848556518555, 0.019121023178100587, 0.018919424057006837, 0.018876319885253907, 0.01892092704772949, 0.018879104614257812, 0.01877382469177246, 0.0187127685546875, 0.018526208877563476, 0.01831920051574707, 0.018388448715209962, 0.018483903884887694, 0.01845792007446289, 0.01866268730163574, 0.018422271728515623, 0.01843631935119629, 0.018509824752807616, 0.01843814468383789, 0.018579456329345705, 0.01841971206665039, 0.018276351928710938, 0.018350080490112306, 0.01826950454711914, 0.018296575546264647, 0.01832851219177246, 0.018231296539306642, 0.01846793556213379, 0.018310047149658202, 0.01842585563659668, 0.01899519920349121, 0.01842508888244629, 0.018380863189697266, 0.018322111129760742, 0.01850927925109863, 0.01843459129333496, 0.01846601676940918, 0.018418527603149413, 0.01828646469116211, 0.01828256034851074, 0.018226367950439453, 0.018228031158447264, 0.01824358367919922, 0.018257919311523436, 0.01821696090698242, 0.018257408142089843, 0.018393600463867187, 0.018769920349121092, 0.018700288772583007, 0.018664608001708983, 0.01921455955505371, 0.018997215270996095, 0.018946720123291017, 0.018822656631469727, 0.018847360610961914, 0.018824064254760742, 0.018892736434936525, 0.01893382453918457, 0.018873664855957033, 0.018889408111572265, 0.01879859161376953, 0.018873664855957033, 0.01904096031188965, 0.01902796745300293, 0.018860031127929687, 0.018988479614257814, 0.018870847702026367, 0.0190380802154541, 0.01913868713378906, 0.01894105529785156, 0.018824064254760742, 0.0188272647857666, 0.018771968841552734, 0.018777952194213868, 0.01855459213256836, 0.01846112060546875, 0.0183767032623291, 0.018501632690429686, 0.018454143524169922, 0.01885830307006836, 0.018364479064941406, 0.01819385528564453, 0.018682432174682618, 0.018380800247192384, 0.018222431182861328, 0.01854108810424805, 0.018086015701293947, 0.01842790412902832, 0.01874665641784668, 0.018866912841796875, 0.018759679794311524, 0.018445472717285156, 0.018287456512451172, 0.018257183074951173, 0.01831395149230957, 0.01860121536254883, 0.018622335433959962, 0.01870732879638672, 0.0186429443359375, 0.018518016815185546, 0.018370559692382812, 0.018493440628051756, 0.01832899284362793, 0.018633087158203124, 0.01843222427368164, 0.01850707244873047, 0.01840608024597168, 0.018700288772583007, 0.01828771209716797, 0.018236095428466798, 0.01825814437866211, 0.01879654312133789, 0.0183175048828125, 0.018596799850463867, 0.018657728195190428, 0.019962303161621092, 0.01893708801269531, 0.018798431396484374, 0.018919712066650392, 0.018992767333984376, 0.01882828712463379, 0.018890495300292968, 0.018713888168334962, 0.018475999832153322, 0.01843132781982422, 0.018243871688842773, 0.018257408142089843, 0.018383264541625977, 0.018558752059936522, 0.01861292839050293, 0.018969951629638673, 0.018518688201904297, 0.01863804817199707, 0.018524959564208986, 0.018404767990112304, 0.01837936019897461, 0.01843987274169922, 0.020199199676513672, 0.018424352645874022, 0.018511871337890624, 0.019564735412597657, 0.018532032012939452, 0.018603519439697267, 0.018488128662109374, 0.018323455810546875, 0.018362367630004883, 0.0182762565612793, 0.018263935089111328, 0.01823356819152832, 0.018239488601684572, 0.018236799240112303, 0.018178688049316407, 0.01831488037109375, 0.018278783798217773, 0.018519872665405272, 0.01846905517578125, 0.018577407836914063, 0.01858902359008789, 0.01987446403503418, 0.018405376434326173, 0.01842790412902832, 0.018390399932861328, 0.018416255950927735, 0.018366464614868162, 0.0182476806640625, 0.018325504302978517, 0.01841152000427246, 0.01838204765319824, 0.018303680419921874, 0.018341472625732422, 0.018248191833496095, 0.01835212707519531, 0.018255872726440428, 0.0186429443359375, 0.01820467185974121, 0.018992256164550782, 0.018555776596069336, 0.01826201629638672, 0.01845248031616211, 0.01817318344116211, 0.01839923286437988, 0.01828326416015625, 0.018184032440185547, 0.018485408782958984, 0.018275360107421874, 0.018198495864868165, 0.01815449523925781, 0.018503423690795898, 0.01824995231628418, 0.018184192657470705, 0.018597919464111327, 0.01821900749206543, 0.018192384719848635, 0.01838489532470703, 0.018358272552490236, 0.02067865562438965, 0.018562463760375975, 0.018333568572998046, 0.018301631927490233, 0.018230464935302733, 0.018284767150878907, 0.018236032485961916, 0.0181790714263916, 0.018367776870727538, 0.018260543823242187, 0.018247520446777344, 0.018187456130981446, 0.018201568603515624, 0.018305023193359374, 0.01823289680480957, 0.01825836753845215, 0.01829478454589844, 0.01839308738708496, 0.018234783172607422, 0.01830521583557129, 0.018235328674316407, 0.018282976150512695, 0.018415615081787108, 0.018238975524902345, 0.018207231521606446, 0.018298879623413086, 0.018583263397216797, 0.01860416030883789, 0.01851398468017578, 0.018690143585205078, 0.019058143615722656, 0.018557472229003905, 0.01838800048828125, 0.018806751251220704, 0.01860710334777832, 0.018988479614257814, 0.018475584030151367, 0.018505599975585936, 0.018459999084472656, 0.01884182357788086, 0.018497215270996094, 0.01843699264526367, 0.018449600219726563, 0.018420320510864258, 0.018462879180908203, 0.018437471389770508, 0.018382591247558595, 0.0184616641998291, 0.018610015869140625, 0.018691328048706053, 0.018881311416625978, 0.01880099105834961, 0.0186977596282959, 0.01876780891418457, 0.018782432556152345, 0.018864160537719728, 0.018888383865356444, 0.01882691192626953, 0.018655967712402344, 0.018522111892700196, 0.018520063400268554, 0.018464767456054687, 0.018485248565673826, 0.018597888946533202, 0.018524160385131837, 0.01853753662109375, 0.01853536033630371, 0.018541759490966796, 0.01859667205810547, 0.018685951232910156, 0.018435840606689454, 0.018817024230957033, 0.018861663818359374, 0.018587263107299804, 0.01858745574951172, 0.018434976577758787, 0.018482271194458007, 0.018500576019287108, 0.018497312545776367, 0.01852796745300293, 0.01864137649536133, 0.018638879776000976, 0.018554880142211915, 0.018601984024047852, 0.018651136398315428, 0.018386688232421875, 0.018501119613647463, 0.018563327789306642, 0.018473472595214844, 0.01864499282836914, 0.018780160903930664, 0.01844166374206543, 0.018844127655029297, 0.018826623916625977, 0.018983648300170897, 0.0188023681640625, 0.018550207138061523, 0.018678495407104492, 0.018471071243286133, 0.01847420883178711, 0.0184102725982666, 0.018433855056762694, 0.018457952499389647, 0.018587583541870116, 0.019025888442993164, 0.01869919967651367, 0.018595199584960937, 0.018686592102050783, 0.018747392654418944, 0.018973791122436523, 0.019104671478271485, 0.019001344680786132, 0.01882111930847168, 0.019021823883056642, 0.019013504028320312, 0.01893507194519043, 0.018825279235839844, 0.018659551620483397, 0.018708320617675783, 0.018615007400512695, 0.01850092887878418, 0.018635103225708008, 0.018512224197387694, 0.018585599899291993, 0.01866547203063965, 0.018503679275512695, 0.018618368148803712, 0.018949567794799806, 0.018911487579345704, 0.018907039642333985, 0.01856550407409668, 0.018587488174438477, 0.018722623825073243, 0.01861631965637207, 0.01917241668701172, 0.01901628875732422, 0.01884102439880371, 0.01921116828918457, 0.0186998405456543, 0.018583999633789063, 0.018868223190307617, 0.018538496017456055, 0.018503488540649413, 0.01837238311767578, 0.01845903968811035, 0.018547775268554688, 0.018616832733154298, 0.018520288467407227, 0.018810239791870118, 0.018360288619995117, 0.018305791854858398, 0.01839516830444336, 0.018374303817749023, 0.018323776245117187, 0.018362495422363283, 0.01839923286437988, 0.018880832672119142, 0.018446016311645507, 0.018311168670654295, 0.018317312240600587, 0.01843814468383789, 0.01850137519836426, 0.018527999877929687, 0.018417407989501953, 0.019593984603881835, 0.018415615081787108, 0.018413568496704103, 0.01848262405395508, 0.018376447677612304, 0.018893632888793945, 0.018487104415893553, 0.018350048065185545, 0.01863702392578125, 0.018416799545288087, 0.018365055084228515, 0.018274303436279296, 0.018292192459106446, 0.01829305648803711, 0.018479007720947266, 0.01864143943786621, 0.018274303436279296, 0.01841548728942871, 0.018356096267700194, 0.018419967651367188, 0.01859993553161621, 0.018509824752807616, 0.019441440582275392, 0.019169504165649415, 0.019035263061523436, 0.018635648727416992, 0.018529792785644532, 0.018678272247314453, 0.018284543991088868, 0.018212480545043944, 0.01852009582519531, 0.018286943435668945, 0.018281919479370117, 0.018984960556030273, 0.01825484848022461, 0.019760128021240234, 0.018395135879516602, 0.018348031997680665, 0.018233343124389647, 0.018306144714355467, 0.018273056030273436, 0.018298656463623046, 0.018178144454956056, 0.018311424255371092, 0.01824358367919922, 0.01826201629638672, 0.01830297660827637, 0.018271392822265625, 0.018273120880126954, 0.018306432723999025, 0.018463359832763673, 0.018472192764282226, 0.01834060859680176, 0.018429088592529296, 0.018348896026611328, 0.018322656631469727, 0.018362655639648437, 0.01832806396484375, 0.018354175567626953, 0.018354175567626953, 0.018304191589355468, 0.01831177520751953, 0.018300832748413084, 0.01830944061279297, 0.018319360733032225, 0.01868182373046875, 0.0182968635559082, 0.01843404769897461, 0.018423040390014647, 0.01842799949645996, 0.018452192306518556, 0.01834899139404297, 0.01843814468383789, 0.018909503936767578, 0.01849228858947754, 0.018508480072021483, 0.018466943740844725, 0.018511871337890624, 0.01845996856689453, 0.018453184127807616, 0.018327360153198243, 0.018343551635742188, 0.018588224411010743, 0.01847091293334961, 0.018505439758300782, 0.0182741756439209, 0.0183536319732666, 0.018299840927124025, 0.018335615158081055, 0.018356224060058594, 0.018603872299194336, 0.01838870429992676, 0.01841619110107422, 0.01841766357421875, 0.01839027214050293, 0.018404096603393556, 0.01863596725463867, 0.018556800842285157, 0.01838585662841797, 0.01843404769897461, 0.018392608642578124, 0.018436576843261717, 0.018480735778808592, 0.018562719345092772, 0.018627328872680662, 0.018659328460693358, 0.01836851119995117, 0.01846067237854004, 0.018524160385131837, 0.018472959518432617, 0.018395135879516602, 0.018296831130981444, 0.018311168670654295, 0.018350080490112306, 0.018290687561035156, 0.018526208877563476, 0.01831110382080078, 0.01830672073364258, 0.018269695281982423, 0.018330432891845702, 0.018202720642089845, 0.019333023071289063, 0.01930863952636719, 0.018664543151855468, 0.019249568939208983, 0.018667999267578124, 0.018732799530029296, 0.01870262336730957, 0.01861952018737793, 0.01867830467224121, 0.018692319869995117, 0.018562240600585936, 0.01848579216003418, 0.01848361587524414, 0.018478080749511717, 0.01844326400756836, 0.018372608184814454, 0.018647039413452148, 0.018476160049438476, 0.018350976943969727, 0.018371904373168945, 0.018379199981689454, 0.018286848068237306, 0.01836604881286621, 0.01828700828552246, 0.018318912506103516, 0.018353599548339844, 0.018346303939819335, 0.01833603286743164, 0.018243135452270506, 0.01834480094909668, 0.018292512893676758, 0.01826838493347168, 0.018300928115844727, 0.0182857608795166, 0.01829151916503906, 0.01829478454589844, 0.018309024810791014, 0.018380479812622072]",tokens/s,53.95821152043836,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4915.286016,5995.626496,0.0,5593.104384,5582.857216,s,1,11.58011328125,11.58011328125,0.0,11.58011328125,11.58011328125,11.58011328125,11.58011328125,[11.58011328125],,kWh,0.00011850505552081359,1.3064226601279462e-05,3.4774194485998056e-05,0.0001663434766080911,,MB,1621.553152,6333.267968,0.0,5916.065792,5844.559872,s,10,2.0788035583496094,0.20788035583496095,0.0003135440088366229,0.20791741943359376,0.20820903778076172,0.20829984664916992,0.2083724937438965,"[0.2075264892578125, 0.20839065551757813, 0.20769354248046876, 0.20744111633300782, 0.20788377380371093, 0.20817628479003905, 0.20805877685546875, 0.20749299621582032, 0.20795106506347658, 0.20818885803222656]",tokens/s,1231.4775918665537,kWh,6.097548971961923e-06,6.724245428674384e-07,4.055992828124894e-06,1.0825966342954257e-05,tokens/kWh,23646849.795224946,MB,1629.88032,6333.267968,0.0,5916.065792,5844.562432,s,10,16.1421396484375,1.61421396484375,0.002667933527098197,1.6145574951171875,1.617074658203125,1.617607275390625,1.618033369140625,"[1.614301025390625, 1.61604638671875, 1.609155517578125, 1.6145924072265625, 1.615279541015625, 1.616956298828125, 1.618139892578125, 1.612981689453125, 1.6145225830078125, 1.610164306640625]",tokens/s,39.02828334538549,kWh,4.72720205709524e-05,5.213861725912573e-06,3.13388410432759e-05,8.382472334014087e-05,tokens/kWh,751568.2425143346,,s,630,16.13977819252014,0.025618695543682762,0.0001650906458492373,0.02560095977783203,0.025801426315307616,0.025880374336242674,0.026148834533691406,"[0.025933439254760743, 0.02575164794921875, 0.02551807975769043, 0.02543235206604004, 0.0255795841217041, 0.025470912933349608, 0.02549964714050293, 0.025438144683837892, 0.025399391174316405, 0.025454048156738282, 0.025331520080566407, 0.025420480728149415, 0.025511072158813475, 0.02544879913330078, 0.025344671249389647, 0.025509727478027343, 0.02553446388244629, 0.025549184799194335, 0.025388671875, 0.0255098876953125, 0.02555084800720215, 0.025468063354492188, 0.025329599380493163, 0.02546518325805664, 0.02548796844482422, 0.025485279083251954, 0.025614336013793947, 0.02568806457519531, 0.025556991577148438, 0.025474943161010743, 0.025446815490722655, 0.025486112594604492, 0.025643104553222655, 0.025586143493652343, 0.025753984451293944, 0.02557542419433594, 0.025535520553588868, 0.0260720329284668, 0.025694143295288085, 0.025757568359375, 0.025732320785522463, 0.02572591972351074, 0.025829376220703124, 0.0261114559173584, 0.025690271377563478, 0.02564691162109375, 0.025801279067993163, 0.02569772720336914, 0.02561862373352051, 0.025642751693725586, 0.025682559967041017, 0.02560207939147949, 0.025714847564697267, 0.02574127960205078, 0.025706335067749022, 0.025599231719970705, 0.025852256774902344, 0.025922048568725587, 0.025772960662841796, 0.025789440155029295, 0.025739007949829102, 0.025880128860473632, 0.025856544494628906, 0.02597068786621094, 0.025466943740844728, 0.025452064514160155, 0.025424287796020507, 0.025436351776123047, 0.025377695083618163, 0.025316255569458008, 0.025475072860717773, 0.025659391403198242, 0.025589759826660157, 0.025509056091308595, 0.02557606315612793, 0.02547248077392578, 0.025591999053955077, 0.02541417694091797, 0.025519712448120117, 0.025639328002929687, 0.025866239547729493, 0.025446399688720703, 0.025862144470214843, 0.02672435188293457, 0.025578624725341798, 0.025428863525390626, 0.025407039642333984, 0.025425535202026367, 0.025530208587646486, 0.02553340721130371, 0.025578943252563477, 0.025739904403686523, 0.025549919128417968, 0.0256625919342041, 0.02564476776123047, 0.025552896499633788, 0.02568806457519531, 0.025625696182250978, 0.02588972854614258, 0.025937887191772462, 0.025631776809692385, 0.02555766487121582, 0.025915712356567384, 0.025774080276489256, 0.02575564765930176, 0.025700063705444337, 0.025671968460083006, 0.025833471298217774, 0.025663808822631837, 0.025667264938354493, 0.02569011116027832, 0.02578166389465332, 0.025777055740356446, 0.025581247329711915, 0.025736671447753906, 0.02571932792663574, 0.025667680740356445, 0.02556287956237793, 0.025710752487182617, 0.025726720809936522, 0.025643264770507813, 0.02564908790588379, 0.025751615524291994, 0.02571295928955078, 0.02567510414123535, 0.02568608093261719, 0.025724895477294923, 0.025483072280883787, 0.025553119659423827, 0.025583904266357423, 0.025627552032470705, 0.025462944030761717, 0.02540611267089844, 0.025561088562011718, 0.025464832305908205, 0.02558361625671387, 0.02555904006958008, 0.025554943084716796, 0.02559561538696289, 0.02566172790527344, 0.025636320114135743, 0.0256200008392334, 0.025609407424926758, 0.025613759994506834, 0.02552252769470215, 0.02542799949645996, 0.025808895111083984, 0.025561088562011718, 0.02542211151123047, 0.025370336532592772, 0.025640960693359374, 0.02554582405090332, 0.025451648712158204, 0.02533967971801758, 0.025437471389770507, 0.02544713592529297, 0.02543539237976074, 0.02538368034362793, 0.025454208374023436, 0.025441856384277345, 0.0254102725982666, 0.02533731269836426, 0.025461376190185545, 0.02546073532104492, 0.025429056167602538, 0.02561961555480957, 0.025637792587280273, 0.02553945541381836, 0.025515424728393556, 0.025443967819213868, 0.025484256744384766, 0.025544639587402343, 0.025487424850463868, 0.025632511138916014, 0.025600160598754883, 0.025527488708496093, 0.025483680725097657, 0.025571840286254883, 0.025613887786865235, 0.02568227195739746, 0.025573631286621094, 0.02564009666442871, 0.02562518310546875, 0.02558892822265625, 0.025588640213012694, 0.025598047256469726, 0.025607328414916992, 0.02556707191467285, 0.025688352584838866, 0.025867263793945314, 0.025406143188476563, 0.025511936187744142, 0.02535424041748047, 0.02530303955078125, 0.025446399688720703, 0.02545430374145508, 0.0254866886138916, 0.025379360198974608, 0.02543020820617676, 0.025421472549438478, 0.025463359832763672, 0.02547420883178711, 0.02565616035461426, 0.026009599685668947, 0.025430015563964844, 0.025448448181152345, 0.02551807975769043, 0.025544704437255858, 0.02547020721435547, 0.02551433563232422, 0.025676191329956053, 0.025540607452392578, 0.025517887115478515, 0.0255346565246582, 0.025585664749145507, 0.025571136474609374, 0.025656831741333007, 0.025547552108764648, 0.025570560455322265, 0.025483936309814454, 0.025575391769409178, 0.02553353691101074, 0.02558252716064453, 0.025573375701904297, 0.025612287521362305, 0.025675775527954102, 0.0256080322265625, 0.025587871551513673, 0.025738239288330078, 0.025768192291259765, 0.025725408554077147, 0.025717023849487305, 0.026238975524902345, 0.025587104797363282, 0.02559555244445801, 0.025686975479125976, 0.02585785675048828, 0.02578825569152832, 0.02573142433166504, 0.025688127517700197, 0.025795936584472656, 0.025706720352172852, 0.02565158462524414, 0.02572083282470703, 0.025827392578125, 0.025701759338378906, 0.02579542350769043, 0.025792224884033203, 0.025874431610107423, 0.025657344818115234, 0.025839616775512695, 0.025843711853027345, 0.026189376831054687, 0.025717056274414063, 0.025566911697387694, 0.025571840286254883, 0.025484960556030275, 0.025536672592163086, 0.025513376235961914, 0.025467584609985352, 0.02550774383544922, 0.02559712028503418, 0.02548761558532715, 0.02535260772705078, 0.025497760772705078, 0.025593856811523437, 0.025483455657958985, 0.02534614372253418, 0.025449567794799805, 0.02546771240234375, 0.025482912063598633, 0.025665695190429688, 0.025649152755737304, 0.025602048873901367, 0.025528383255004884, 0.02570364761352539, 0.025708736419677733, 0.025613855361938477, 0.025509920120239258, 0.025709728240966796, 0.02565315246582031, 0.025664928436279297, 0.025530879974365234, 0.02559951972961426, 0.025589824676513672, 0.02553868865966797, 0.025667680740356445, 0.02558790397644043, 0.02572697639465332, 0.02562803268432617, 0.025801183700561524, 0.02575708770751953, 0.025701343536376955, 0.025646240234375, 0.025564832687377928, 0.02575849533081055, 0.025737407684326172, 0.02571392059326172, 0.025709312438964845, 0.025665567398071288, 0.025589632034301757, 0.025775487899780274, 0.025759519577026366, 0.025715072631835936, 0.025690784454345705, 0.025644447326660155, 0.025745119094848633, 0.025634719848632814, 0.025629568099975585, 0.02568329620361328, 0.02571536064147949, 0.02569215965270996, 0.02588057518005371, 0.02579574394226074, 0.02583635139465332, 0.025951744079589844, 0.02564476776123047, 0.025449247360229493, 0.02542790412902832, 0.025413440704345702, 0.02542207908630371, 0.0254270076751709, 0.025748416900634764, 0.025631872177124024, 0.02550668716430664, 0.025425920486450194, 0.025534175872802736, 0.025476512908935548, 0.02539126396179199, 0.025553792953491212, 0.025894752502441408, 0.025695648193359375, 0.025541215896606444, 0.02551571273803711, 0.025624895095825197, 0.025399295806884766, 0.02540719985961914, 0.025489503860473633, 0.025557119369506835, 0.025554847717285157, 0.02547443199157715, 0.025618495941162108, 0.025752288818359375, 0.02569398307800293, 0.02554902458190918, 0.025700544357299803, 0.025587327957153322, 0.025516223907470704, 0.025653247833251954, 0.025775936126708983, 0.025729215621948243, 0.025667583465576172, 0.02573107147216797, 0.02575391960144043, 0.02574236869812012, 0.025614816665649413, 0.02598476791381836, 0.025622880935668946, 0.02558064079284668, 0.0256646728515625, 0.025777503967285155, 0.025879039764404296, 0.025724128723144533, 0.025666624069213866, 0.02582512092590332, 0.025667583465576172, 0.025810815811157226, 0.0258939208984375, 0.02588502311706543, 0.02582748794555664, 0.0257174072265625, 0.025693952560424806, 0.02568150329589844, 0.02560063934326172, 0.025767776489257814, 0.02583478355407715, 0.026139360427856445, 0.026220544815063477, 0.025931711196899413, 0.02545846366882324, 0.025493215560913086, 0.025467008590698243, 0.025438751220703125, 0.025544544219970704, 0.025583295822143554, 0.025833791732788085, 0.025544704437255858, 0.02542313575744629, 0.025628896713256837, 0.025571840286254883, 0.025552896499633788, 0.025575168609619142, 0.025540607452392578, 0.025493759155273438, 0.025450080871582032, 0.025540128707885742, 0.025525375366210936, 0.02551171112060547, 0.025465951919555665, 0.025583776473999023, 0.025799392700195312, 0.025585664749145507, 0.025447519302368164, 0.025566112518310546, 0.02570240020751953, 0.025552127838134767, 0.025602815628051757, 0.025667583465576172, 0.025604095458984375, 0.025561088562011718, 0.025704639434814453, 0.025675296783447266, 0.02560758399963379, 0.02559577560424805, 0.02562495994567871, 0.025689823150634766, 0.025600927352905273, 0.025556991577148438, 0.025737152099609376, 0.025917503356933595, 0.0257126407623291, 0.025790367126464844, 0.02579055976867676, 0.025936927795410156, 0.025743520736694336, 0.025817920684814453, 0.02578963279724121, 0.025742143630981446, 0.02571878433227539, 0.026427072525024416, 0.026152704238891603, 0.02573369598388672, 0.02581443214416504, 0.025731679916381835, 0.02666268730163574, 0.025661792755126953, 0.025912288665771485, 0.02575391960144043, 0.02564476776123047, 0.025721088409423828, 0.025706464767456055, 0.025761791229248047, 0.025608192443847655, 0.025647104263305662, 0.02547711944580078, 0.025485279083251954, 0.0254116153717041, 0.02539641571044922, 0.025389888763427734, 0.025387008666992186, 0.025409536361694338, 0.025413152694702148, 0.025360864639282228, 0.025384960174560548, 0.025411584854125976, 0.02544144058227539, 0.025336671829223632, 0.02555036735534668, 0.025554431915283202, 0.025502687454223634, 0.025413631439208984, 0.025638080596923827, 0.025633663177490235, 0.025556928634643556, 0.02572195243835449, 0.025613216400146483, 0.025582624435424806, 0.02560099220275879, 0.02573244857788086, 0.02574403190612793, 0.025624576568603515, 0.025443519592285156, 0.025551679611206055, 0.02561408042907715, 0.025550399780273438, 0.025445056915283204, 0.025514047622680665, 0.025613439559936522, 0.025572063446044922, 0.025581056594848633, 0.025669855117797853, 0.025670015335083007, 0.025628543853759764, 0.025683263778686523, 0.025612255096435547, 0.02559056091308594, 0.025556928634643556, 0.025704416275024414, 0.025683935165405274, 0.025662879943847656, 0.025762592315673828, 0.025745759963989256, 0.025835391998291015, 0.025671520233154298, 0.025679647445678713, 0.025735328674316407, 0.02572457695007324, 0.025610368728637697, 0.025774303436279296, 0.025868320465087892, 0.025896127700805665, 0.02579750442504883, 0.025737119674682618, 0.02575766372680664, 0.025872575759887696, 0.025612096786499023, 0.02543529510498047, 0.02536025619506836, 0.025373151779174805, 0.025428287506103514, 0.025409791946411135, 0.02565318489074707, 0.02558361625671387, 0.02569593620300293, 0.025510208129882812, 0.02555084800720215, 0.02556220817565918, 0.02564803123474121, 0.025399232864379884, 0.025473215103149413, 0.025496736526489257, 0.025537248611450195, 0.02551366424560547, 0.02562623977661133, 0.02556924819946289, 0.02554140853881836, 0.025593791961669922, 0.025571327209472656, 0.02555904006958008, 0.025505760192871093, 0.025524639129638673, 0.02558028793334961, 0.025496448516845703, 0.025511936187744142, 0.025702688217163087, 0.025632575988769533, 0.025622432708740234, 0.025579519271850586, 0.0255467529296875, 0.02555824089050293, 0.025549375534057617, 0.025839263916015626, 0.025647680282592775, 0.02562784004211426, 0.025554943084716796, 0.025510719299316406, 0.02559404754638672, 0.025567039489746094, 0.025536512374877928, 0.025683967590332032, 0.025696128845214845, 0.025997440338134767, 0.02586419105529785, 0.025792768478393555, 0.02566649627685547, 0.025617376327514648, 0.025728511810302734, 0.025741024017333983, 0.02570732879638672, 0.025698112487792968, 0.0257392635345459, 0.02568806457519531, 0.025764991760253906, 0.025992063522338866, 0.02591744041442871, 0.02591744041442871, 0.025802751541137696, 0.025663423538208007, 0.02545952033996582, 0.02536025619506836, 0.025353984832763674, 0.025299455642700194, 0.025468799591064455, 0.025828800201416015, 0.025534528732299805, 0.02547110366821289, 0.02533932876586914, 0.025551807403564452, 0.025495552062988282, 0.025413631439208984, 0.02540460777282715, 0.025479135513305665, 0.025490400314331054, 0.025456640243530275, 0.02542745590209961, 0.025411136627197267, 0.02551296043395996, 0.025404352188110352, 0.025459583282470704, 0.025438144683837892, 0.025761856079101562, 0.025455808639526366, 0.025395647048950195, 0.02546726417541504, 0.02555897521972656, 0.025404800415039064, 0.025491872787475587, 0.02562281608581543, 0.025503744125366212, 0.025417728424072264, 0.02547302436828613, 0.02557267189025879, 0.025582048416137697, 0.025526496887207033, 0.025604000091552736, 0.025622623443603516, 0.025605247497558593, 0.025602943420410158, 0.025570560455322265, 0.025688192367553712, 0.02556787109375, 0.02571628761291504, 0.025679935455322267, 0.02563520050048828, 0.025563135147094726, 0.02556707191467285, 0.0256691837310791, 0.025573471069335937, 0.02557798385620117, 0.025743392944335936, 0.025665407180786134, 0.025649248123168947, 0.02571468734741211, 0.025790464401245116, 0.025748863220214843, 0.025533056259155272, 0.02572083282470703, 0.025689504623413087, 0.025813600540161134, 0.025648799896240235]",tokens/s,39.033993682265645,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2174.373888,2194.604032,0.0,1816.133632,1727.29344,s,1,8.94433203125,8.94433203125,0.0,8.94433203125,8.94433203125,8.94433203125,8.94433203125,[8.94433203125],,kWh,5.511606295416034e-05,6.0725403847365365e-06,1.7106958129980976e-05,7.829556146887786e-05,,MB,2227.195904,2406.416384,0.0,1998.585856,1980.448768,s,10,3.2092273559570312,0.3209227355957031,0.0008386638202555853,0.3211023712158203,0.32185628967285157,0.32205032196044925,0.32220554779052735,"[0.3199826354980469, 0.32017123413085935, 0.31957742309570314, 0.32088507080078127, 0.32027764892578126, 0.32133026123046876, 0.3218131713867188, 0.3213196716308594, 0.32224435424804687, 0.32162588500976563]",tokens/s,797.6997937675178,kWh,9.385472596354085e-06,1.0347539388508268e-06,6.223433798187121e-06,1.6643660333392034e-05,tokens/kWh,15381231.944897924,MB,2235.76064,2597.257216,0.0,2189.426688,2078.022144,s,10,177.43118554687499,17.7431185546875,0.033526370132177866,17.758843749999997,17.7714302734375,17.77307646484375,17.77439341796875,"[17.675099609375, 17.694265625, 17.717541015625, 17.745, 17.7532890625, 17.7643984375, 17.769435546875, 17.766369140625, 17.771064453125, 17.77472265625]",tokens/s,3.5506723243618428,kWh,0.0005187076917844781,5.721722755934871e-05,0.0003448937915398119,0.0009208187108836387,tokens/kWh,68417.37603218744,,s,630,177.42690451049805,0.2816300071595207,0.0006598392608408472,0.2817473449707031,0.28242669372558593,0.28255672454833985,0.2828116159057617,"[0.2800423889160156, 0.2798489685058594, 0.27987677001953126, 0.28069677734375, 0.2809182739257812, 0.2799151611328125, 0.2804708557128906, 0.2804883117675781, 0.280727294921875, 0.28029803466796877, 0.2804562683105469, 0.2802956237792969, 0.28037606811523436, 0.2803502502441406, 0.2799678955078125, 0.28061727905273437, 0.27993701171875, 0.28083200073242187, 0.28064358520507815, 0.27996954345703123, 0.2806561279296875, 0.28070816040039065, 0.28012542724609374, 0.2807080383300781, 0.2807132263183594, 0.28060049438476564, 0.28059661865234375, 0.2804834289550781, 0.2809849548339844, 0.28008346557617186, 0.28019439697265625, 0.28069955444335937, 0.28046044921875, 0.2801589660644531, 0.28077825927734373, 0.2805921020507812, 0.28040835571289063, 0.28039385986328125, 0.28098391723632815, 0.280465087890625, 0.28005825805664064, 0.28069232177734377, 0.2807298278808594, 0.2803466796875, 0.280330322265625, 0.2809014892578125, 0.2810062255859375, 0.2803957824707031, 0.2808667602539062, 0.28111810302734375, 0.2807504272460937, 0.28062753295898435, 0.28100579833984374, 0.28070892333984376, 0.2809819946289063, 0.28046047973632815, 0.2810714111328125, 0.2802329711914063, 0.2805138854980469, 0.28088592529296874, 0.28113006591796874, 0.2816296081542969, 0.28068658447265626, 0.281005859375, 0.28015133666992187, 0.2804008178710938, 0.28073983764648436, 0.28049612426757814, 0.28073321533203127, 0.28063790893554685, 0.28020941162109375, 0.2803833618164063, 0.2812335510253906, 0.28066201782226563, 0.2800002136230469, 0.28072726440429685, 0.2811516418457031, 0.2803818664550781, 0.2807459716796875, 0.28107571411132815, 0.2807296142578125, 0.280090087890625, 0.28065847778320313, 0.280848388671875, 0.2805125122070313, 0.2809405517578125, 0.2810262451171875, 0.28134841918945314, 0.28045413208007813, 0.28098153686523436, 0.2812651062011719, 0.28072549438476563, 0.28061270141601563, 0.2810942993164062, 0.28120046997070314, 0.2811516418457031, 0.28120883178710937, 0.28136788940429686, 0.28082965087890627, 0.2804236755371094, 0.28094329833984377, 0.2808046264648438, 0.2805627746582031, 0.28110296630859377, 0.28122930908203125, 0.2805247802734375, 0.2808934631347656, 0.28068658447265626, 0.2811058349609375, 0.28089788818359374, 0.2809177551269531, 0.2811558837890625, 0.2807339172363281, 0.28060467529296873, 0.28165310668945315, 0.2810533142089844, 0.280748046875, 0.2808401794433594, 0.28158770751953127, 0.28121279907226565, 0.2812069091796875, 0.2809117431640625, 0.28113461303710935, 0.2811109008789062, 0.2807441711425781, 0.2812673645019531, 0.2806640625, 0.2807945556640625, 0.28124627685546877, 0.28088262939453124, 0.281145751953125, 0.28130291748046876, 0.2816937255859375, 0.2803898620605469, 0.28093280029296874, 0.2809283447265625, 0.28078048706054687, 0.280913330078125, 0.2810458984375, 0.28148532104492185, 0.280922119140625, 0.2811125793457031, 0.28112240600585936, 0.2808047790527344, 0.2807490539550781, 0.28160614013671875, 0.281249755859375, 0.28072171020507813, 0.2812303771972656, 0.2810395812988281, 0.28151602172851564, 0.2808606872558594, 0.28136856079101563, 0.2807767028808594, 0.2813706359863281, 0.28103884887695313, 0.2810142822265625, 0.28121701049804687, 0.28157131958007814, 0.28107366943359374, 0.28137210083007813, 0.2811848449707031, 0.2810491027832031, 0.281143310546875, 0.28145895385742187, 0.28184756469726563, 0.28124978637695314, 0.2812653503417969, 0.28144723510742187, 0.2813807678222656, 0.2816123962402344, 0.28132147216796877, 0.2814075012207031, 0.28182464599609375, 0.28112857055664064, 0.2807162780761719, 0.2815958251953125, 0.2814337158203125, 0.28157586669921875, 0.2812951049804687, 0.28182913208007815, 0.2811023254394531, 0.2814849853515625, 0.2818131103515625, 0.2814261169433594, 0.2813286437988281, 0.28186517333984373, 0.2815753173828125, 0.2808505554199219, 0.2822403564453125, 0.2807236938476563, 0.2805497741699219, 0.2815038146972656, 0.28116583251953126, 0.28076220703125, 0.2811209411621094, 0.2815037536621094, 0.28119210815429685, 0.28136688232421875, 0.28179364013671876, 0.28147393798828124, 0.2810163269042969, 0.2815672302246094, 0.281280517578125, 0.28149090576171876, 0.28075677490234374, 0.28159796142578125, 0.28178399658203124, 0.2808909912109375, 0.2817575073242187, 0.2825286865234375, 0.28195806884765623, 0.28188088989257815, 0.28218746948242185, 0.28228436279296876, 0.28108184814453124, 0.28176177978515626, 0.282838134765625, 0.2814873352050781, 0.2818338012695312, 0.2822354736328125, 0.28211404418945313, 0.28158770751953127, 0.2818495788574219, 0.28214300537109377, 0.2822717590332031, 0.28159591674804685, 0.2825904541015625, 0.2814175720214844, 0.2816391296386719, 0.2814712219238281, 0.2818414611816406, 0.2815576171875, 0.28091595458984375, 0.28136032104492187, 0.2823326416015625, 0.28136099243164064, 0.28139706420898436, 0.28150320434570314, 0.28204721069335936, 0.28181292724609375, 0.28150790405273435, 0.28198297119140625, 0.2825850830078125, 0.2817843322753906, 0.28202392578125, 0.28227609252929686, 0.28162753295898435, 0.28117041015625, 0.2816596374511719, 0.2821185607910156, 0.28147891235351563, 0.28207513427734376, 0.28206472778320313, 0.2810799560546875, 0.2822287292480469, 0.281380859375, 0.28096307373046875, 0.28201495361328127, 0.28191543579101563, 0.2808220825195312, 0.2812441101074219, 0.2820792236328125, 0.28129074096679685, 0.2807245483398437, 0.2816378173828125, 0.28133172607421875, 0.2814259338378906, 0.2809405517578125, 0.2820807189941406, 0.2818770446777344, 0.2812231750488281, 0.28140945434570314, 0.2818765869140625, 0.281585205078125, 0.2813772277832031, 0.28210791015625, 0.2815528869628906, 0.28134954833984377, 0.281118408203125, 0.28153668212890626, 0.2811440124511719, 0.281315185546875, 0.28166574096679686, 0.28222409057617187, 0.2818480529785156, 0.28228182983398437, 0.2820284729003906, 0.28187362670898436, 0.2818568115234375, 0.28186767578125, 0.28210015869140626, 0.2811656494140625, 0.2817580261230469, 0.2824253540039062, 0.2816795654296875, 0.28214822387695315, 0.282633056640625, 0.28246231079101564, 0.28224920654296876, 0.28219390869140626, 0.28176177978515626, 0.28234295654296876, 0.281440673828125, 0.28233282470703125, 0.28235589599609373, 0.28176202392578126, 0.28263742065429687, 0.2827757263183594, 0.28194482421875, 0.2814791564941406, 0.28246823120117187, 0.28238861083984373, 0.28188467407226564, 0.2819947509765625, 0.2818296813964844, 0.28161227416992185, 0.28164312744140624, 0.2824798583984375, 0.28089599609375, 0.2816155395507812, 0.282393310546875, 0.28163201904296875, 0.28096578979492187, 0.2822208251953125, 0.2825495910644531, 0.28141635131835935, 0.2813686218261719, 0.28233929443359373, 0.28234951782226564, 0.2814049377441406, 0.2820491027832031, 0.2822655944824219, 0.28181671142578124, 0.28181951904296876, 0.282474365234375, 0.2828248291015625, 0.28128375244140624, 0.2819653930664062, 0.28253378295898435, 0.28245318603515623, 0.28302224731445313, 0.2817261962890625, 0.2820779418945312, 0.28135592651367186, 0.28173553466796875, 0.2820218811035156, 0.2824163818359375, 0.281559814453125, 0.2818511962890625, 0.281985595703125, 0.28193911743164063, 0.2824345397949219, 0.2821079406738281, 0.28175128173828123, 0.28165960693359376, 0.28184490966796877, 0.2818196411132812, 0.2816590576171875, 0.28129541015625, 0.2822452087402344, 0.282148681640625, 0.28179647827148435, 0.2818431701660156, 0.281916259765625, 0.282071044921875, 0.28207308959960936, 0.28224102783203125, 0.28240692138671875, 0.28170367431640625, 0.28196322631835935, 0.2822586975097656, 0.2821107482910156, 0.2816573486328125, 0.28200115966796874, 0.28276107788085936, 0.28166796875, 0.28264389038085935, 0.28165985107421876, 0.2816119079589844, 0.2818493041992188, 0.28241342163085936, 0.28160269165039065, 0.2821709289550781, 0.28233157348632815, 0.281638916015625, 0.2820690002441406, 0.2820444030761719, 0.2819522705078125, 0.2817003479003906, 0.28188671875, 0.2821754760742187, 0.28198440551757814, 0.28190985107421873, 0.28162771606445314, 0.28219027709960937, 0.2819363708496094, 0.28173867797851565, 0.2817145080566406, 0.2827763061523437, 0.2823818664550781, 0.282061279296875, 0.2822955322265625, 0.28185458374023437, 0.2820845947265625, 0.28183645629882814, 0.2820997009277344, 0.28219384765625, 0.2820178527832031, 0.28301669311523436, 0.2816857604980469, 0.2826753845214844, 0.28165997314453123, 0.2818326110839844, 0.28254217529296877, 0.28154898071289064, 0.2824791564941406, 0.28230215454101565, 0.282519287109375, 0.282200439453125, 0.2817508544921875, 0.28248358154296876, 0.2819420166015625, 0.2820316467285156, 0.2820674133300781, 0.2824532775878906, 0.2819747619628906, 0.281317626953125, 0.2821370849609375, 0.28193997192382814, 0.28199856567382814, 0.282194580078125, 0.28177011108398436, 0.2821119995117187, 0.2824540100097656, 0.2815442199707031, 0.28249545288085937, 0.2822952880859375, 0.281781005859375, 0.28232345581054685, 0.2816731872558594, 0.2817425842285156, 0.2815733032226562, 0.282046875, 0.28263876342773436, 0.2819246826171875, 0.280864990234375, 0.2823441467285156, 0.2817774047851562, 0.28158438110351564, 0.281775390625, 0.2825755920410156, 0.281280517578125, 0.28094049072265626, 0.28242132568359374, 0.2824335327148437, 0.2814668884277344, 0.28139007568359375, 0.28251776123046873, 0.28208615112304686, 0.2814786376953125, 0.28204290771484375, 0.28198822021484377, 0.28178521728515626, 0.28226739501953124, 0.2823375244140625, 0.2817307739257813, 0.28193624877929685, 0.28203347778320315, 0.2822744140625, 0.2821096496582031, 0.2826099548339844, 0.2821754760742187, 0.28191094970703123, 0.2817907104492188, 0.2819154968261719, 0.282218505859375, 0.28213824462890624, 0.2821349182128906, 0.282040283203125, 0.28201541137695313, 0.2820816345214844, 0.28174261474609374, 0.2815003967285156, 0.2821997375488281, 0.28205908203125, 0.2816632385253906, 0.282668701171875, 0.28182794189453125, 0.2821672973632813, 0.2818334655761719, 0.28256256103515626, 0.28206491088867186, 0.2816860046386719, 0.2823475341796875, 0.2824228515625, 0.28201397705078124, 0.2820582275390625, 0.2820000305175781, 0.2821888427734375, 0.28197781372070313, 0.28243557739257813, 0.28195431518554687, 0.28218572998046876, 0.28233468627929686, 0.28162115478515626, 0.2822348937988281, 0.2820526123046875, 0.2816813659667969, 0.28182583618164064, 0.2823434143066406, 0.28173226928710937, 0.2815312194824219, 0.2821590270996094, 0.28229574584960937, 0.2820245666503906, 0.28214068603515624, 0.282492919921875, 0.28185305786132814, 0.28190194702148436, 0.2820603332519531, 0.28267156982421876, 0.28136856079101563, 0.282112060546875, 0.2824962463378906, 0.2821064453125, 0.2815693359375, 0.28216445922851563, 0.28250607299804686, 0.281864013671875, 0.28174533081054687, 0.2819934692382812, 0.281740478515625, 0.28147732543945314, 0.2822243957519531, 0.28174221801757815, 0.2820280456542969, 0.28201895141601563, 0.2824937744140625, 0.2817577514648438, 0.28169625854492186, 0.28246426391601565, 0.282492919921875, 0.2821160888671875, 0.282071044921875, 0.2819317626953125, 0.281697998046875, 0.281837890625, 0.2820362243652344, 0.2827120666503906, 0.28151602172851564, 0.28276840209960935, 0.28217239379882814, 0.2814392395019531, 0.2822522888183594, 0.2821119995117187, 0.2827202453613281, 0.28199502563476564, 0.28225689697265627, 0.2823175354003906, 0.2820803833007812, 0.28190399169921876, 0.28221826171875, 0.2828515625, 0.2820765380859375, 0.2819029235839844, 0.2825469970703125, 0.281478759765625, 0.28158453369140624, 0.2819154052734375, 0.2817404479980469, 0.2814493103027344, 0.2822504272460937, 0.2821331787109375, 0.2818151550292969, 0.281635986328125, 0.2826691589355469, 0.28135467529296876, 0.28190548706054686, 0.28174935913085936, 0.2820091247558594, 0.2818586730957031, 0.28164913940429687, 0.2826322021484375, 0.2821663818359375, 0.2818118591308594, 0.2821775207519531, 0.28203826904296875, 0.2821048278808594, 0.2819526672363281, 0.28202249145507813, 0.28223480224609376, 0.2818842163085937, 0.28232101440429686, 0.28194351196289064, 0.28260369873046876, 0.2824259338378906, 0.28248236083984374, 0.28222836303710935, 0.2819715576171875, 0.2824027404785156, 0.2820341796875, 0.281385009765625, 0.28212225341796876, 0.2824920349121094, 0.28204327392578127, 0.28189816284179686, 0.282149658203125, 0.28204217529296877, 0.28256689453125, 0.2821114807128906, 0.2821842041015625, 0.2820157470703125, 0.2827670288085937, 0.28207037353515624, 0.28220645141601564, 0.28214492797851565, 0.28206887817382814, 0.28248272705078126, 0.28248028564453126, 0.2826987609863281, 0.2822668762207031, 0.282213134765625, 0.28295150756835935, 0.282881591796875, 0.2827792663574219, 0.2821653747558594, 0.2824528503417969, 0.2820559387207031, 0.281964599609375]",tokens/s,3.550757996585146,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3900, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 229, in convert_model self._replace_by_quant_layers(model, layers_to_be_replaced) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 299, in _replace_by_quant_layers self._replace_by_quant_layers(child, names, name + ""."" + name1 if name != """" else name1) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 299, in _replace_by_quant_layers self._replace_by_quant_layers(child, names, name + ""."" + name1 if name != """" else name1) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 299, in _replace_by_quant_layers self._replace_by_quant_layers(child, names, name + ""."" + name1 if name != """" else name1) [Previous line repeated 1 more time] File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 283, in _replace_by_quant_layers new_layer = QuantLinear( File ""/usr/local/lib/python3.10/dist-packages/auto_gptq/nn_modules/qlinear/qlinear_exllama.py"", line 72, in __init__ assert infeatures % self.group_size == 0 AssertionError ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4061.691904,4705.878016,0.0,4303.355904,4034.388992,s,1,10.7391796875,10.7391796875,0.0,10.7391796875,10.7391796875,10.7391796875,10.7391796875,[10.7391796875],,kWh,9.525886393333469e-05,1.0496882857772283e-05,2.8545856170007766e-05,0.00013430160296111474,,MB,4154.55232,4764.598272,0.0,4347.396096,4202.697728,s,10,1.9787164916992188,0.1978716491699219,0.0005553750929798312,0.19780340576171873,0.19861822357177736,0.1986799461364746,0.19872932418823241,"[0.19705722045898438, 0.19789520263671875, 0.1976128692626953, 0.19874166870117188, 0.19717079162597656, 0.1980823974609375, 0.19771160888671874, 0.19742329406738282, 0.19860450744628907, 0.19841693115234374]",tokens/s,1293.767960563974,kWh,5.824128305310927e-06,6.419191707452558e-07,3.852617461176546e-06,1.031866493723273e-05,tokens/kWh,24809411.058234666,MB,4159.46752,4764.598272,0.0,4347.396096,4213.085184,s,10,19.511509643554685,1.9511509643554685,0.005511864678501041,1.9530518188476562,1.9562375854492187,1.957585223388672,1.9586633337402344,"[1.958932861328125, 1.94563623046875, 1.9487459716796875, 1.9559381103515625, 1.952301513671875, 1.9404990234375, 1.954328125, 1.945576171875, 1.95574951171875, 1.9538021240234376]",tokens/s,32.288634324515755,kWh,5.627365179802198e-05,6.207168337220454e-06,3.724388763822345e-05,9.972470777346589e-05,tokens/kWh,631739.1287133221,,s,630,19.5088600730896,0.030966444560459685,0.0004869214326538801,0.030853072166442872,0.031332563591003414,0.03165189123153687,0.032890039443969746,"[0.03195817565917969, 0.031091360092163085, 0.031065792083740235, 0.031233728408813475, 0.031046176910400392, 0.03130409622192383, 0.031102848052978516, 0.030847103118896484, 0.03085312080383301, 0.030878816604614258, 0.030892959594726564, 0.03081625556945801, 0.03094937515258789, 0.030779071807861328, 0.031050048828125, 0.03079952049255371, 0.03096342468261719, 0.030681215286254882, 0.031336799621582034, 0.031185056686401365, 0.030881792068481444, 0.031406080245971676, 0.030803520202636717, 0.030849536895751952, 0.03086534309387207, 0.03120947265625, 0.03145244789123535, 0.0321952018737793, 0.031739200592041016, 0.031124448776245116, 0.031124799728393555, 0.03087808036804199, 0.031051136016845702, 0.03096467208862305, 0.031136991500854493, 0.03134524726867676, 0.030870880126953125, 0.030919328689575195, 0.030939136505126953, 0.031166559219360353, 0.03090940856933594, 0.030989248275756835, 0.031220895767211914, 0.03106928062438965, 0.031145248413085937, 0.031513055801391604, 0.031176288604736327, 0.031333887100219726, 0.030962112426757813, 0.030967296600341795, 0.030887935638427736, 0.031070240020751955, 0.031024063110351562, 0.031102432250976562, 0.030806047439575195, 0.03109324836730957, 0.030971967697143554, 0.03113113594055176, 0.030941408157348634, 0.030888160705566405, 0.031053407669067383, 0.030787776947021485, 0.03186870384216309, 0.03152751922607422, 0.031336448669433595, 0.030840799331665038, 0.030783647537231444, 0.030803520202636717, 0.03095327949523926, 0.030759424209594727, 0.030926847457885744, 0.03097536087036133, 0.03097248077392578, 0.030822048187255858, 0.030728832244873047, 0.030879520416259767, 0.03111257553100586, 0.03108518409729004, 0.030756864547729492, 0.030682592391967772, 0.03079007911682129, 0.03204924774169922, 0.030679136276245116, 0.030705856323242187, 0.03066441535949707, 0.030717727661132812, 0.031021280288696287, 0.030840831756591795, 0.03058687973022461, 0.030701568603515625, 0.030679264068603516, 0.03069014358520508, 0.030593984603881835, 0.03059916877746582, 0.030740480422973632, 0.03079097557067871, 0.031130304336547853, 0.030785312652587892, 0.03075708770751953, 0.03077939224243164, 0.03250790405273438, 0.030877824783325195, 0.030936704635620118, 0.030855424880981447, 0.030705663681030275, 0.030699520111083983, 0.03068070411682129, 0.030835071563720704, 0.03075071907043457, 0.03089344024658203, 0.03075462341308594, 0.030620479583740236, 0.030817440032958984, 0.030623584747314452, 0.030816415786743163, 0.03089206314086914, 0.030823232650756836, 0.03077635192871094, 0.030950368881225584, 0.031033344268798828, 0.030875648498535156, 0.030910463333129884, 0.030887903213500975, 0.031022111892700197, 0.030800735473632813, 0.030774656295776366, 0.03144281578063965, 0.030845056533813475, 0.03077494430541992, 0.030869855880737304, 0.030842880249023437, 0.03099443244934082, 0.03092633628845215, 0.031043264389038087, 0.030995424270629884, 0.03132512092590332, 0.030839712142944335, 0.031473663330078124, 0.03224899291992187, 0.03083964729309082, 0.03078144073486328, 0.030768831253051757, 0.030992671966552733, 0.03075494384765625, 0.030578399658203127, 0.032129215240478515, 0.03077324867248535, 0.031074016571044923, 0.030764640808105467, 0.03071251106262207, 0.03079167938232422, 0.030600351333618166, 0.030767488479614257, 0.030841279983520507, 0.03145116806030274, 0.030607295989990235, 0.030836511611938476, 0.031131935119628907, 0.030760959625244142, 0.030935232162475585, 0.030911455154418944, 0.030988479614257814, 0.03100864028930664, 0.031154975891113282, 0.030732288360595703, 0.03061497688293457, 0.030863040924072264, 0.030866079330444336, 0.03079395294189453, 0.031078399658203124, 0.030742368698120116, 0.030826112747192384, 0.031048288345336916, 0.030822336196899416, 0.030853055953979493, 0.030964895248413084, 0.030665632247924804, 0.03092889595031738, 0.030815711975097658, 0.031078464508056642, 0.030706111907958984, 0.030764863967895507, 0.0307488956451416, 0.030629344940185547, 0.030732831954956054, 0.030966976165771484, 0.03073721694946289, 0.03116966438293457, 0.03075302314758301, 0.03171331214904785, 0.030852928161621093, 0.030664800643920898, 0.030670944213867186, 0.031028736114501954, 0.03069593620300293, 0.030764320373535155, 0.03067363166809082, 0.03074662399291992, 0.030657983779907225, 0.03081068801879883, 0.03042508888244629, 0.030694944381713867, 0.03064691162109375, 0.03072585678100586, 0.030656639099121093, 0.030640031814575194, 0.030631071090698243, 0.030616512298583986, 0.030644224166870116, 0.030506143569946288, 0.030706527709960938, 0.0318353271484375, 0.030745407104492188, 0.031082431793212892, 0.030697120666503906, 0.030902687072753905, 0.03062579154968262, 0.03117670440673828, 0.030725919723510742, 0.03075916862487793, 0.030939071655273438, 0.03072003173828125, 0.031217695236206055, 0.030836095809936525, 0.03089059257507324, 0.030960992813110353, 0.030855199813842774, 0.030679679870605468, 0.030752447128295897, 0.03075836753845215, 0.030640096664428712, 0.030847871780395508, 0.030631935119628906, 0.030757951736450195, 0.030843839645385743, 0.030877088546752928, 0.030857824325561525, 0.030926240921020507, 0.03090083122253418, 0.03078278350830078, 0.03390534210205078, 0.031000448226928712, 0.03077132797241211, 0.030904319763183592, 0.030811616897583008, 0.03128988838195801, 0.03801436614990234, 0.03303689575195313, 0.030892032623291016, 0.03172147178649903, 0.03096598434448242, 0.030957279205322267, 0.03240240097045898, 0.031197183609008788, 0.031020767211914064, 0.030832927703857423, 0.030903871536254884, 0.030925247192382814, 0.031039487838745116, 0.03094528007507324, 0.031078464508056642, 0.031143871307373047, 0.031177888870239256, 0.03158412742614746, 0.033420257568359375, 0.031332416534423826, 0.031028160095214842, 0.031103679656982422, 0.03135724830627441, 0.031162368774414063, 0.03122790336608887, 0.03149007987976074, 0.031895519256591794, 0.030873600006103515, 0.030943168640136718, 0.030801984786987306, 0.030621696472167968, 0.030699520111083983, 0.030635135650634766, 0.030732223510742188, 0.03066566467285156, 0.030724096298217773, 0.0307589111328125, 0.03118694305419922, 0.031164415359497072, 0.03070534324645996, 0.030644544601440428, 0.030608415603637695, 0.030565343856811523, 0.03062950325012207, 0.030726528167724608, 0.030729440689086913, 0.03079363250732422, 0.030915456771850584, 0.030682527542114257, 0.031133535385131837, 0.03087027168273926, 0.03080806350708008, 0.030648223876953123, 0.030649951934814453, 0.0307238712310791, 0.030854143142700196, 0.030791391372680665, 0.03060704040527344, 0.03142278480529785, 0.03073843193054199, 0.030810111999511718, 0.03080601692199707, 0.03077939224243164, 0.030871360778808594, 0.030945823669433593, 0.030849727630615234, 0.03106275177001953, 0.030865312576293946, 0.030730592727661134, 0.03155891227722168, 0.03096633529663086, 0.030879615783691406, 0.030882112503051756, 0.030834688186645507, 0.030905567169189452, 0.030871360778808594, 0.030829536437988282, 0.030831840515136717, 0.03078544044494629, 0.03100556755065918, 0.03075071907043457, 0.030692480087280274, 0.03085308837890625, 0.030697664260864257, 0.031139583587646485, 0.03104252815246582, 0.030701471328735352, 0.03077459144592285, 0.03073481559753418, 0.03094764709472656, 0.031322111129760744, 0.03083263969421387, 0.03097542381286621, 0.030844831466674806, 0.03073855972290039, 0.030923391342163088, 0.03074399948120117, 0.030624223709106446, 0.030861312866210938, 0.03070697593688965, 0.030860000610351563, 0.030500864028930662, 0.03057868766784668, 0.030672895431518556, 0.030566463470458983, 0.030674911499023436, 0.030719039916992187, 0.03066464042663574, 0.03082748794555664, 0.030846975326538087, 0.030826496124267577, 0.03059712028503418, 0.030650016784667968, 0.030537919998168947, 0.030736543655395507, 0.03058393669128418, 0.031011808395385743, 0.03084867286682129, 0.03059459114074707, 0.030591712951660157, 0.030570463180541994, 0.030595199584960937, 0.030658464431762695, 0.030670528411865235, 0.030769472122192384, 0.030844480514526366, 0.030663295745849608, 0.030641664505004884, 0.03126012802124024, 0.030847936630249023, 0.030840799331665038, 0.030719776153564454, 0.03141980743408203, 0.03064918327331543, 0.030661632537841797, 0.030559423446655274, 0.03075872039794922, 0.030533632278442382, 0.03076710319519043, 0.03061961555480957, 0.030668832778930663, 0.030736383438110353, 0.03063158416748047, 0.031232351303100585, 0.03168582344055176, 0.03172390365600586, 0.033409473419189456, 0.03101286315917969, 0.03096944046020508, 0.03079376029968262, 0.03110540771484375, 0.03096575927734375, 0.030909759521484375, 0.030823104858398436, 0.031135744094848632, 0.03086150360107422, 0.03098944091796875, 0.030836767196655273, 0.030894847869873048, 0.031127456665039063, 0.030920703887939452, 0.031645696640014646, 0.03080745506286621, 0.030786239624023437, 0.03085040092468262, 0.03093337631225586, 0.031069631576538085, 0.03082316780090332, 0.03099238395690918, 0.030924224853515626, 0.03084284782409668, 0.03079251289367676, 0.0309716796875, 0.030957120895385742, 0.03075712013244629, 0.031589632034301755, 0.030892768859863282, 0.030968032836914062, 0.030752767562866212, 0.03069673538208008, 0.030759647369384767, 0.03091164779663086, 0.03081507110595703, 0.031068159103393556, 0.030971904754638672, 0.03073420715332031, 0.0314429759979248, 0.030821855545043946, 0.03075695991516113, 0.03318960189819336, 0.031125535964965822, 0.030939775466918944, 0.03142419242858887, 0.030841312408447265, 0.03080918312072754, 0.0318351993560791, 0.031154943466186524, 0.03103558349609375, 0.0313753604888916, 0.03152905654907227, 0.0310086727142334, 0.030979455947875975, 0.03117695999145508, 0.031417951583862305, 0.03096041679382324, 0.03104374313354492, 0.031018848419189452, 0.03075174331665039, 0.030756927490234374, 0.03147049522399902, 0.030686559677124022, 0.030662975311279296, 0.03125299263000488, 0.03073420715332031, 0.030561887741088867, 0.030947519302368165, 0.030912736892700195, 0.03066796875, 0.03069011116027832, 0.03080294418334961, 0.03098521614074707, 0.03087945556640625, 0.030777503967285156, 0.03047542381286621, 0.03081110382080078, 0.03109280014038086, 0.030666400909423828, 0.03044380760192871, 0.030764095306396483, 0.030649280548095702, 0.03040870475769043, 0.030533567428588867, 0.030760671615600584, 0.03058723258972168, 0.03087398338317871, 0.030663583755493166, 0.030663007736206054, 0.03064672088623047, 0.030650304794311523, 0.03058687973022461, 0.03060531234741211, 0.031145984649658204, 0.030752767562866212, 0.030729536056518555, 0.031177536010742187, 0.03120524787902832, 0.03078758430480957, 0.031295135498046876, 0.030784095764160156, 0.030721439361572265, 0.030791296005249023, 0.030689311981201173, 0.030945951461791993, 0.030994464874267578, 0.030889440536499023, 0.030822944641113282, 0.03084217643737793, 0.03076576042175293, 0.031694719314575195, 0.030847103118896484, 0.03080806350708008, 0.030930944442749023, 0.03134896087646484, 0.030936767578125, 0.03080611228942871, 0.030940864562988283, 0.030779455184936525, 0.031253791809082034, 0.030890975952148438, 0.031094783782958983, 0.030971904754638672, 0.031102975845336913, 0.03080396842956543, 0.030689279556274415, 0.030909919738769533, 0.03076355171203613, 0.03090163230895996, 0.030734975814819335, 0.030904319763183592, 0.031074304580688477, 0.03135481643676758, 0.030862848281860353, 0.031038015365600587, 0.030906368255615234, 0.03072585678100586, 0.030759199142456055, 0.030672895431518556, 0.030783008575439454, 0.03081062316894531, 0.03104102325439453, 0.030826400756835938, 0.03227414321899414, 0.03253049468994141, 0.030892768859863282, 0.030851104736328124, 0.030875680923461914, 0.03064393615722656, 0.030710304260253906, 0.030815776824951173, 0.030832864761352538, 0.030697471618652345, 0.03082569694519043, 0.030789888381958008, 0.03117625617980957, 0.03197622489929199, 0.03072751998901367, 0.030829408645629882, 0.030713888168334962, 0.03104921531677246, 0.030810592651367187, 0.030818304061889647, 0.030793407440185546, 0.03082784080505371, 0.03093190383911133, 0.030740543365478514, 0.03214303970336914, 0.034051551818847656, 0.03088060760498047, 0.030838016510009766, 0.03165695953369141, 0.030807167053222655, 0.031691648483276366, 0.030854879379272462, 0.030879936218261718, 0.03125862312316895, 0.031264768600463864, 0.03078144073486328, 0.030712928771972656, 0.030956447601318358, 0.03139798355102539, 0.030584991455078123, 0.030623712539672852, 0.030590751647949218, 0.030725631713867187, 0.030679296493530274, 0.030628095626831053, 0.03079167938232422, 0.030807199478149413, 0.030702432632446288, 0.03100271987915039, 0.03121552085876465, 0.030869600296020507, 0.030853279113769533, 0.03083852767944336, 0.03101081657409668, 0.031010175704956056, 0.031197824478149415, 0.031231264114379882, 0.031536928176879885, 0.0312159366607666, 0.031064704895019533, 0.031088640213012695, 0.031146144866943358, 0.031270751953125, 0.03140220832824707, 0.03132393646240234, 0.03089401626586914, 0.031612255096435546, 0.03251001739501953, 0.03115679931640625, 0.031060064315795898, 0.031715328216552735, 0.03108448028564453, 0.031258399963378904, 0.030678655624389647, 0.030902944564819335, 0.030765056610107422, 0.030877695083618165, 0.030959232330322266, 0.030852800369262696, 0.030812416076660156, 0.030730688095092773, 0.030967103958129884, 0.030782495498657227, 0.03105859184265137, 0.031161312103271485, 0.03089001655578613, 0.030856672286987304, 0.03061814308166504, 0.030881696701049805, 0.031203008651733397, 0.030750272750854492, 0.03052630424499512, 0.03075071907043457]",tokens/s,32.29301956340432,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1389.932544,1321.074688,0.0,918.552576,869.835264,s,1,8.45362109375,8.45362109375,0.0,8.45362109375,8.45362109375,8.45362109375,8.45362109375,[8.45362109375],,kWh,3.130091515831736e-05,3.4453613864132754e-06,8.663062486008455e-06,4.340933903073909e-05,,MB,1517.654016,1503.526912,0.0,1086.324736,1012.78208,s,10,0.4862799301147461,0.048627993011474614,0.00010955215157931049,0.048595167160034175,0.048729131698608395,0.048823365211486815,0.04889875202178955,"[0.048917598724365234, 0.048592159271240234, 0.048515361785888673, 0.0485975341796875, 0.04859280014038086, 0.04861356735229492, 0.04855849456787109, 0.04864672088623047, 0.04870819091796875, 0.04853750228881836]",tokens/s,5264.457448194343,kWh,1.462156466895749e-06,1.6117475720820766e-07,9.65453550140008e-07,2.588784774243965e-06,tokens/kWh,98888097.05115901,MB,1528.0128,1505.624064,0.0,1088.421888,1018.797568,s,10,10.674499877929685,1.0674499877929686,0.004986953341745047,1.067750244140625,1.0743638549804688,1.0753898132324218,1.0762105798339843,"[1.0606995849609375, 1.070240478515625, 1.065214111328125, 1.0675875244140625, 1.0741358642578125, 1.0679129638671876, 1.061115478515625, 1.076415771484375, 1.0625914306640625, 1.068586669921875]",tokens/s,59.01915848091126,kWh,3.1031275760188e-05,3.4223476885488405e-06,1.4795225725060742e-05,4.924884917379758e-05,tokens/kWh,1279217.7087768095,,s,630,10.670662437438963,0.016937559424506293,0.0003525072466907942,0.01687499237060547,0.017205340766906736,0.017347964859008788,0.018099694061279297,"[0.01677395248413086, 0.01719455909729004, 0.017219551086425783, 0.01690470314025879, 0.01662723159790039, 0.016707487106323242, 0.01671347236633301, 0.01676985549926758, 0.016699392318725585, 0.017113088607788086, 0.01713705635070801, 0.017076831817626953, 0.01717043113708496, 0.017121280670166016, 0.017032896041870117, 0.017119552612304686, 0.016627199172973634, 0.01645516777038574, 0.016454656600952147, 0.016449472427368165, 0.01650284767150879, 0.016846080780029297, 0.01685171127319336, 0.0167587833404541, 0.016712959289550782, 0.01673865509033203, 0.01668134307861328, 0.01661939239501953, 0.016599103927612303, 0.01661756706237793, 0.016594207763671875, 0.016886463165283205, 0.017224863052368165, 0.017133760452270507, 0.016992767333984374, 0.016852800369262694, 0.016855199813842772, 0.016906463623046875, 0.016956832885742186, 0.016857311248779296, 0.016898431777954102, 0.01689766311645508, 0.01692095947265625, 0.01680179214477539, 0.0167521915435791, 0.01681987190246582, 0.016857887268066408, 0.016977920532226562, 0.01691971206665039, 0.016861440658569336, 0.01691708755493164, 0.01668832015991211, 0.016783424377441406, 0.016864000320434572, 0.016736255645751954, 0.0166312313079834, 0.016620223999023437, 0.016931968688964842, 0.016783487319946288, 0.016783935546875, 0.016848960876464845, 0.016670816421508788, 0.01672956848144531, 0.016912384033203123, 0.016877567291259766, 0.016791263580322267, 0.016744735717773438, 0.016678047180175782, 0.016660320281982423, 0.01683331108093262, 0.01677948760986328, 0.016733856201171876, 0.016829984664916992, 0.01697475242614746, 0.016883615493774415, 0.016750591278076172, 0.0166395206451416, 0.01657699203491211, 0.01696352005004883, 0.016750656127929687, 0.01675468826293945, 0.0169880313873291, 0.01679782485961914, 0.016737600326538087, 0.016955839157104493, 0.016967391967773436, 0.016919040679931642, 0.01695737648010254, 0.016973920822143555, 0.01696767997741699, 0.017123327255249024, 0.01755264091491699, 0.017210111618041993, 0.01702911949157715, 0.01685468864440918, 0.01678371238708496, 0.016868928909301757, 0.017342367172241212, 0.016941600799560547, 0.016861183166503906, 0.016740352630615234, 0.016756736755371093, 0.016807519912719726, 0.016822687149047853, 0.016943103790283204, 0.01673356819152832, 0.016789440155029298, 0.016896703720092773, 0.01698102378845215, 0.017150463104248045, 0.016842752456665038, 0.016817983627319337, 0.016811904907226564, 0.016960287094116212, 0.01676288032531738, 0.016670719146728515, 0.01666662406921387, 0.019569664001464843, 0.020593664169311524, 0.017123327255249024, 0.01702252769470215, 0.01705824089050293, 0.016902143478393555, 0.016755903244018554, 0.01691526412963867, 0.016835872650146484, 0.01676288032531738, 0.01679961585998535, 0.016637664794921875, 0.016794015884399414, 0.01691200065612793, 0.01688819122314453, 0.017272031784057618, 0.016736095428466796, 0.017001087188720704, 0.01671379280090332, 0.016707040786743163, 0.016642847061157227, 0.016764928817749023, 0.016728063583374024, 0.01679155158996582, 0.016656383514404297, 0.01665023994445801, 0.01667852783203125, 0.01676736068725586, 0.0168407039642334, 0.016793184280395508, 0.016814432144165038, 0.018006080627441405, 0.016971872329711913, 0.016747808456420897, 0.01670591926574707, 0.01680112075805664, 0.01680476760864258, 0.017039360046386717, 0.01677107238769531, 0.016676864624023437, 0.016736255645751954, 0.01702707290649414, 0.017113088607788086, 0.016986112594604492, 0.01692620849609375, 0.016809728622436522, 0.016771839141845702, 0.016672767639160157, 0.016703487396240235, 0.016901376724243165, 0.016816896438598634, 0.01690969657897949, 0.017853055953979492, 0.016993919372558595, 0.017123136520385742, 0.017289152145385744, 0.017187456130981445, 0.016866880416870116, 0.017111488342285156, 0.017117183685302736, 0.016965631484985352, 0.01718659210205078, 0.01701487922668457, 0.01699033546447754, 0.016881343841552734, 0.01683692741394043, 0.016901376724243165, 0.01703193664550781, 0.01690777587890625, 0.016742111206054688, 0.016736127853393554, 0.016877504348754884, 0.017125280380249023, 0.017015296936035155, 0.01700387191772461, 0.01698883247375488, 0.01678745651245117, 0.016764928817749023, 0.016705535888671876, 0.01676697540283203, 0.01682598304748535, 0.016916767120361328, 0.016846656799316406, 0.017090688705444335, 0.01673846435546875, 0.016868896484375, 0.017263071060180664, 0.016961536407470702, 0.016672767639160157, 0.016682559967041016, 0.016738624572753907, 0.017889408111572264, 0.020936704635620116, 0.017176576614379883, 0.01709984016418457, 0.016970687866210938, 0.01675872039794922, 0.01683667182922363, 0.016844383239746095, 0.016799232482910157, 0.016716415405273438, 0.016773408889770507, 0.016656383514404297, 0.016693023681640624, 0.016767200469970704, 0.01699225616455078, 0.016773120880126953, 0.016754112243652343, 0.01663814353942871, 0.016992639541625977, 0.016762720108032227, 0.016778879165649414, 0.01711568069458008, 0.016653600692749022, 0.01685286331176758, 0.01676358413696289, 0.016869312286376954, 0.016707807540893554, 0.01662566375732422, 0.01664614486694336, 0.017128511428833006, 0.017050559997558595, 0.017074176788330078, 0.01753647994995117, 0.016693792343139648, 0.01665180778503418, 0.016672351837158202, 0.016710527420043947, 0.016640064239501953, 0.016740480422973634, 0.016703296661376953, 0.01663385581970215, 0.01690547180175781, 0.017218303680419923, 0.017184768676757813, 0.01693519973754883, 0.01696329689025879, 0.016954656600952148, 0.016863584518432617, 0.01675916862487793, 0.016629056930541994, 0.01680659294128418, 0.016982015609741212, 0.017069631576538086, 0.0170417594909668, 0.01680803108215332, 0.016725215911865234, 0.016708383560180663, 0.016768480300903322, 0.016976415634155275, 0.016893951416015626, 0.016711679458618164, 0.01683807945251465, 0.016723936080932617, 0.016854719161987306, 0.01680067253112793, 0.01676288032531738, 0.016764799118041993, 0.016813791275024415, 0.01691484832763672, 0.016959487915039064, 0.016945152282714843, 0.01683839988708496, 0.01659929656982422, 0.01660700798034668, 0.017047008514404296, 0.01684883117675781, 0.016856992721557617, 0.016855167388916015, 0.017094688415527345, 0.01695961570739746, 0.016892223358154296, 0.016945472717285158, 0.01714995193481445, 0.01702707290649414, 0.017059711456298827, 0.01741584014892578, 0.017274463653564453, 0.017170976638793946, 0.017267040252685547, 0.017210592269897462, 0.01719766426086426, 0.019157024383544923, 0.018157535552978516, 0.017167552947998047, 0.017142784118652343, 0.01720524787902832, 0.017175968170166016, 0.0171812801361084, 0.017121280670166016, 0.017072128295898437, 0.01745052719116211, 0.017420000076293945, 0.017399839401245117, 0.017356895446777345, 0.01725913619995117, 0.01705369567871094, 0.017039007186889648, 0.017823936462402344, 0.01738956832885742, 0.01717862319946289, 0.016989696502685548, 0.017011199951171875, 0.017041183471679686, 0.0168690242767334, 0.01681670379638672, 0.016895999908447267, 0.016923839569091798, 0.017101631164550782, 0.01708582305908203, 0.01686387252807617, 0.016876768112182618, 0.017042207717895507, 0.017063936233520507, 0.016833536148071288, 0.016705568313598634, 0.016683296203613283, 0.01696019172668457, 0.017024959564208984, 0.01709881591796875, 0.017160192489624023, 0.01683456039428711, 0.016869375228881836, 0.016885311126708984, 0.01714364814758301, 0.016759519577026368, 0.016639711380004883, 0.016653888702392577, 0.01692086410522461, 0.017066303253173827, 0.01686467170715332, 0.016725759506225586, 0.0169911994934082, 0.016872800827026368, 0.016902687072753907, 0.0167956485748291, 0.01676288032531738, 0.0167956485748291, 0.016852895736694337, 0.016945247650146485, 0.01707827186584473, 0.01699839973449707, 0.017462944030761717, 0.017747488021850586, 0.017081119537353515, 0.017102880477905272, 0.01698147201538086, 0.016908832550048828, 0.01686502456665039, 0.016744224548339844, 0.016933343887329103, 0.016850048065185547, 0.016913280487060547, 0.016871423721313478, 0.01675052833557129, 0.016799808502197266, 0.016860864639282228, 0.01682259178161621, 0.01675609588623047, 0.016622207641601563, 0.016740352630615234, 0.0172061767578125, 0.01700966453552246, 0.01682156753540039, 0.016771551132202148, 0.016903615951538085, 0.016639999389648438, 0.01656707191467285, 0.016508928298950197, 0.01664134407043457, 0.016659135818481444, 0.016846847534179688, 0.016731552124023438, 0.01667132759094238, 0.01663385581970215, 0.01657241630554199, 0.016918527603149415, 0.016590848922729492, 0.016717824935913086, 0.016710784912109374, 0.01667571258544922, 0.016910335540771485, 0.01678704071044922, 0.016736064910888672, 0.01667888069152832, 0.01692310333251953, 0.016770624160766603, 0.01668771171569824, 0.01666662406921387, 0.01666662406921387, 0.016660480499267577, 0.01681814384460449, 0.017106016159057616, 0.01808460807800293, 0.018105855941772463, 0.01700320053100586, 0.016910335540771485, 0.01696668815612793, 0.016816160202026368, 0.016760831832885743, 0.016730720520019532, 0.016980255126953125, 0.016963647842407226, 0.01678505516052246, 0.01683839988708496, 0.016628192901611327, 0.016867456436157228, 0.016959487915039064, 0.01681599998474121, 0.016674943923950195, 0.01658470344543457, 0.016532928466796874, 0.016886335372924804, 0.016790847778320312, 0.01696566390991211, 0.016815967559814453, 0.01694803237915039, 0.01694220733642578, 0.016966047286987303, 0.01686284828186035, 0.017034080505371092, 0.01688278388977051, 0.01675152015686035, 0.016704608917236328, 0.016992544174194334, 0.016917024612426758, 0.016861183166503906, 0.017025279998779296, 0.01705548858642578, 0.01681612777709961, 0.016672767639160157, 0.016760831832885743, 0.01675468826293945, 0.016881664276123046, 0.01783568000793457, 0.018483104705810546, 0.01699065589904785, 0.016846303939819337, 0.017352544784545898, 0.01714860725402832, 0.01687321662902832, 0.01680614471435547, 0.0166748161315918, 0.016680959701538087, 0.016777215957641603, 0.01681203269958496, 0.016547359466552735, 0.016791072845458985, 0.01656684875488281, 0.01659123229980469, 0.01659903907775879, 0.016740352630615234, 0.016885759353637696, 0.0171909122467041, 0.017258495330810548, 0.016987327575683595, 0.01696963119506836, 0.016956319808959962, 0.017139711380004884, 0.01720729637145996, 0.017258495330810548, 0.017237728118896484, 0.017180959701538087, 0.017165567398071287, 0.017062047958374023, 0.017035871505737304, 0.017125343322753905, 0.017338399887084962, 0.01741209602355957, 0.017082368850708008, 0.017293312072753905, 0.0174071044921875, 0.01725119972229004, 0.017212543487548828, 0.017341312408447267, 0.01740390396118164, 0.01724553680419922, 0.017373855590820313, 0.017230943679809572, 0.017219520568847655, 0.01699318313598633, 0.017440256118774415, 0.01736147117614746, 0.017341856002807618, 0.017269344329833985, 0.01705779266357422, 0.017127424240112304, 0.017092607498168946, 0.017202239990234375, 0.017013248443603517, 0.01692825508117676, 0.01675315284729004, 0.016634304046630858, 0.016977920532226562, 0.017001855850219728, 0.017021568298339843, 0.016832447052001952, 0.016915903091430665, 0.016855680465698242, 0.016860448837280273, 0.01709539222717285, 0.01690380859375, 0.016959871292114258, 0.016691200256347655, 0.01678335952758789, 0.017044960021972658, 0.016961408615112306, 0.016933536529541014, 0.016764928817749023, 0.01679088020324707, 0.016716447830200196, 0.01706719970703125, 0.017074464797973633, 0.016938720703125, 0.016892736434936523, 0.016792863845825196, 0.016857568740844726, 0.017096160888671875, 0.017097503662109374, 0.01704960060119629, 0.016924671173095703, 0.016736255645751954, 0.01670140838623047, 0.017007680892944337, 0.0168703670501709, 0.017110048294067384, 0.016986528396606446, 0.016665151596069336, 0.016664575576782227, 0.016846847534179688, 0.016900096893310547, 0.01682387161254883, 0.01667679977416992, 0.016679424285888672, 0.01661337661743164, 0.016570367813110352, 0.016590688705444338, 0.016629919052124024, 0.016893728256225586, 0.016842975616455078, 0.01676697540283203, 0.01678950309753418, 0.016565439224243163, 0.016837440490722656, 0.0170250244140625, 0.016914432525634765, 0.01676288032531738, 0.016789375305175783, 0.01677939224243164, 0.016697343826293946, 0.017715200424194336, 0.017006336212158205, 0.016877824783325196, 0.016859136581420898, 0.01691347122192383, 0.016892704010009765, 0.017272991180419923, 0.017186080932617188, 0.016857824325561523, 0.016920576095581053, 0.016824127197265625, 0.016935104370117186, 0.01683625602722168, 0.016908639907836913, 0.016682592391967774, 0.016722335815429687, 0.01665772819519043, 0.017060543060302736, 0.01700044822692871, 0.017213279724121094, 0.01711529541015625, 0.01694038391113281, 0.0176625919342041, 0.017006175994873047, 0.017084863662719725, 0.016963584899902344, 0.01716633605957031, 0.017088512420654296, 0.016855039596557618, 0.016867328643798828, 0.016887807846069337, 0.016957727432250977, 0.01711894416809082, 0.016850879669189453, 0.016805696487426757, 0.016860576629638673, 0.016855327606201172, 0.01701535987854004, 0.017073503494262697, 0.016949792861938477, 0.016859264373779298, 0.016965087890625, 0.017086208343505858, 0.016911008834838866, 0.017073631286621094, 0.016930912017822267, 0.01702332878112793, 0.01697148895263672, 0.01685932731628418, 0.017241567611694337, 0.017050016403198243, 0.016863679885864256, 0.016695295333862305, 0.016786624908447265, 0.017210176467895508, 0.01677516746520996, 0.016648191452026367, 0.016637567520141602, 0.016673152923583984, 0.0169736328125, 0.016961664199829102, 0.01686534309387207, 0.016734207153320312]",tokens/s,59.04038326520284,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,3163.000832,4423.876608,0.0,4028.628992,3944.723968,s,1,10.2890380859375,10.2890380859375,0.0,10.2890380859375,10.2890380859375,10.2890380859375,10.2890380859375,[10.2890380859375],,kWh,9.265283422500184e-05,1.0210715612788045e-05,2.773502218800933e-05,0.00013059857202579923,,MB,3183.652864,4763.615232,0.0,4353.687552,4305.05728,s,10,1.145394432067871,0.11453944320678713,0.0002339463254169406,0.11449860763549805,0.11482673568725586,0.11494184837341308,0.11503393852233887,"[0.1150569610595703, 0.11436332702636719, 0.1142347183227539, 0.11432816314697265, 0.11465875244140625, 0.1143918685913086, 0.11456227111816407, 0.11446249389648437, 0.11453472137451172, 0.11480115509033204]",tokens/s,2235.037929578747,kWh,3.4183840612885793e-06,3.7688180050823733e-07,2.2660515544417967e-06,6.061317416238614e-06,tokens/kWh,42235042.71763783,MB,3187.810304,4763.615232,0.0,4353.687552,4305.05984,s,10,25.420324462890623,2.5420324462890624,0.00740630854366198,2.5402193603515624,2.552800537109375,2.5546341552734373,2.5561010498046874,"[2.5564677734375, 2.55239306640625, 2.533532958984375, 2.5334658203125, 2.536947998046875, 2.540232666015625, 2.536437744140625, 2.546279052734375, 2.544361328125, 2.5402060546875]",tokens/s,24.78331859688469,kWh,7.489263759996022e-05,8.260680932844667e-06,3.806064737355796e-05,0.00012121396590636286,tokens/kWh,519742.0901867625,,s,630,25.41778401947023,0.04034568891979399,0.0005551954603061547,0.04022352027893067,0.04074762496948242,0.04107856597900391,0.04241985176086426,"[0.040673408508300785, 0.040661502838134765, 0.04061715316772461, 0.04047705459594726, 0.04062995147705078, 0.040651519775390624, 0.04033033752441406, 0.04043254470825195, 0.04060160064697266, 0.04055654525756836, 0.040473697662353515, 0.04043244934082031, 0.04102944183349609, 0.04362678527832031, 0.0407276496887207, 0.04527302551269531, 0.04102409744262695, 0.040957504272460935, 0.04071916961669922, 0.04134707260131836, 0.04053811264038086, 0.04056195068359375, 0.04037295913696289, 0.040683231353759765, 0.040735008239746094, 0.04055270385742187, 0.04081638336181641, 0.04091494369506836, 0.04069539260864258, 0.04033782577514648, 0.04096409606933594, 0.040339359283447264, 0.04035356903076172, 0.040245567321777344, 0.040204288482666016, 0.04024115371704102, 0.04128768157958984, 0.04006911849975586, 0.04072652816772461, 0.040045982360839845, 0.04247817611694336, 0.040529918670654294, 0.040114177703857425, 0.039972862243652346, 0.0405805778503418, 0.040454689025878905, 0.04016537475585937, 0.0399268798828125, 0.03990220642089844, 0.039812576293945315, 0.03988115310668945, 0.040103935241699216, 0.0401541748046875, 0.040274879455566404, 0.03986355209350586, 0.04002838516235351, 0.03991606521606445, 0.039932926177978514, 0.03998646545410156, 0.0398759994506836, 0.03993017578125, 0.04008550262451172, 0.04032716751098633, 0.04026208114624023, 0.04047257614135742, 0.04038383865356445, 0.0402786865234375, 0.04030054473876953, 0.04016332626342774, 0.0401162223815918, 0.04010720062255859, 0.03997779083251953, 0.04005068969726563, 0.04050124740600586, 0.04012236785888672, 0.04036608123779297, 0.04062617492675781, 0.04044800186157226, 0.040141918182373046, 0.04490742492675781, 0.04033052825927735, 0.04028387069702148, 0.040049568176269534, 0.04110345458984375, 0.040691585540771485, 0.040382591247558594, 0.04055199813842773, 0.04029894256591797, 0.04053104019165039, 0.04022544097900391, 0.04019356918334961, 0.04038524627685547, 0.04019830322265625, 0.04029993438720703, 0.03998764801025391, 0.040088897705078126, 0.040336063385009766, 0.04074655914306641, 0.04026959991455078, 0.040269824981689455, 0.041433761596679684, 0.04206768035888672, 0.04018739318847656, 0.04014889526367187, 0.04120249557495117, 0.04270844650268555, 0.04035644912719726, 0.040323070526123043, 0.04175462341308594, 0.04074454498291016, 0.04064092636108398, 0.04037392044067383, 0.04061014556884766, 0.04024115371704102, 0.04011606216430664, 0.04053798294067383, 0.04017523193359375, 0.040433792114257815, 0.04063900756835938, 0.040089569091796874, 0.040124446868896484, 0.04019404983520508, 0.04001587295532227, 0.040153087615966795, 0.040188129425048826, 0.04022643280029297, 0.040514049530029295, 0.040224704742431644, 0.040048702239990235, 0.040003360748291014, 0.04034377670288086, 0.04004044723510742, 0.04036608123779297, 0.040224704742431644, 0.04014400100708008, 0.04009056091308594, 0.04003609466552734, 0.0401082878112793, 0.04018175888061523, 0.0402630729675293, 0.04031948852539063, 0.04014416122436523, 0.04046828842163086, 0.040330238342285156, 0.04040703964233398, 0.040417278289794925, 0.04022819137573242, 0.04018780899047852, 0.04050543975830078, 0.04001244735717773, 0.04021420669555664, 0.04015955352783203, 0.04038800048828125, 0.04018982315063477, 0.03995107269287109, 0.04071331024169922, 0.04025436782836914, 0.04013840103149414, 0.03995888137817383, 0.040017822265625, 0.040169567108154294, 0.04029439926147461, 0.04022886276245117, 0.04031094360351563, 0.040253536224365234, 0.04001152038574219, 0.040187904357910156, 0.040136703491210936, 0.039858177185058595, 0.039925121307373045, 0.040059040069580075, 0.04015292739868164, 0.04033561706542969, 0.04018739318847656, 0.04012940979003906, 0.0401162223815918, 0.04013075256347656, 0.04014470291137695, 0.04026761627197266, 0.040406944274902344, 0.04071209716796875, 0.04042736053466797, 0.04023689651489258, 0.040100032806396485, 0.04019247817993164, 0.04014080047607422, 0.04024934387207031, 0.04027801513671875, 0.040030174255371094, 0.04064604949951172, 0.04039535903930664, 0.04016332626342774, 0.04013382339477539, 0.04006732940673828, 0.040030017852783206, 0.039994110107421876, 0.04017356872558594, 0.04005068969726563, 0.040101886749267575, 0.03993798446655274, 0.03999068832397461, 0.04009408187866211, 0.04048076629638672, 0.040100128173828124, 0.039997089385986326, 0.03999574279785156, 0.03989503860473633, 0.04000678253173828, 0.03997148895263672, 0.039921440124511716, 0.04002041625976562, 0.04038246536254883, 0.040252735137939456, 0.04032083129882812, 0.040341793060302736, 0.040251998901367186, 0.04047052764892578, 0.040433345794677736, 0.04023689651489258, 0.04021295928955078, 0.040213665008544924, 0.0400362548828125, 0.040239776611328125, 0.04009571075439453, 0.04030905532836914, 0.04023910522460938, 0.0401627197265625, 0.04020080184936523, 0.03999948883056641, 0.03997625732421875, 0.04009235382080078, 0.04017897415161133, 0.040276702880859376, 0.04032921600341797, 0.03991548919677734, 0.040459999084472655, 0.04180204772949219, 0.04045366287231445, 0.04073107147216797, 0.04010601425170898, 0.040065120697021485, 0.04018166351318359, 0.040275966644287106, 0.040123775482177736, 0.04074467086791992, 0.040323135375976565, 0.039996257781982424, 0.04014604949951172, 0.04030271911621094, 0.04013747024536133, 0.039962623596191404, 0.04003430557250977, 0.0414048957824707, 0.040562305450439456, 0.040652702331542966, 0.0404376335144043, 0.040241569519042966, 0.04036012649536133, 0.04032921600341797, 0.04022224044799805, 0.04085398483276367, 0.0407347183227539, 0.040869888305664064, 0.040525825500488284, 0.04035379028320313, 0.04042931365966797, 0.04032742309570313, 0.040325119018554685, 0.040277503967285154, 0.04033116912841797, 0.04029241561889649, 0.04001795196533203, 0.04015478515625, 0.04026556777954102, 0.04018617630004883, 0.03998585510253906, 0.039979007720947264, 0.03994131088256836, 0.04010067367553711, 0.040665088653564455, 0.04081459045410156, 0.04035583877563476, 0.04062825775146484, 0.040482017517089845, 0.04018662261962891, 0.040486751556396486, 0.04027612686157227, 0.04023910522460938, 0.04007526397705078, 0.04020633697509766, 0.04003635025024414, 0.04003859329223633, 0.04046623992919922, 0.040374271392822264, 0.04017295837402344, 0.04002588653564453, 0.04011004638671875, 0.0399532470703125, 0.04012236785888672, 0.039962623596191404, 0.03996876907348633, 0.03993360137939453, 0.03991587066650391, 0.04015919876098633, 0.040081409454345705, 0.04003036880493164, 0.0400010871887207, 0.040092063903808595, 0.04044524765014648, 0.03995868682861328, 0.0402334098815918, 0.040007038116455076, 0.04006361770629883, 0.04013993453979492, 0.03982342529296875, 0.04083977508544922, 0.04112319946289063, 0.04227705764770508, 0.04024911880493164, 0.04004556655883789, 0.040351486206054686, 0.04028416061401367, 0.040191295623779294, 0.04012716674804687, 0.042264575958251956, 0.040191070556640625, 0.03984476852416992, 0.03983769607543945, 0.04014284896850586, 0.03990729522705078, 0.039796768188476564, 0.04008550262451172, 0.0402083854675293, 0.03998720169067383, 0.03996041488647461, 0.03985833740234375, 0.039989246368408206, 0.040103935241699216, 0.04172009658813477, 0.040768863677978516, 0.040175071716308595, 0.04117504119873047, 0.040632320404052735, 0.04018220901489258, 0.04023664093017578, 0.04145379257202148, 0.04054697418212891, 0.04098252868652344, 0.04027391815185547, 0.040268863677978516, 0.04030915069580078, 0.04033795166015625, 0.040322399139404295, 0.04011484909057617, 0.04009369659423828, 0.04028006362915039, 0.04063641738891602, 0.03999334335327148, 0.03996006393432617, 0.04019395065307617, 0.04017203140258789, 0.04038016128540039, 0.03989923095703125, 0.04029695892333984, 0.040295711517333986, 0.03996105575561523, 0.0400992317199707, 0.04010863876342773, 0.039874561309814455, 0.0399458236694336, 0.04024566268920898, 0.040030208587646485, 0.03997081756591797, 0.04004006576538086, 0.040301982879638674, 0.04001020812988281, 0.04008729553222656, 0.039944480895996094, 0.04158246231079102, 0.04054243087768555, 0.04012713623046875, 0.04033536148071289, 0.040153087615966795, 0.04028416061401367, 0.04007526397705078, 0.0405129280090332, 0.04043836975097656, 0.04019993591308594, 0.040184062957763673, 0.04018175888061523, 0.039944000244140625, 0.04013657760620117, 0.04049132919311523, 0.04012403106689453, 0.04009817504882812, 0.0400357437133789, 0.04006768035888672, 0.03999334335327148, 0.0400175666809082, 0.040019519805908205, 0.0399815673828125, 0.03993423843383789, 0.040022014617919925, 0.040785919189453124, 0.03997238540649414, 0.039963104248046874, 0.040000545501708985, 0.040010719299316405, 0.04025360107421875, 0.04022233581542969, 0.04042160034179688, 0.04030054473876953, 0.04032921600341797, 0.04089846420288086, 0.04012656021118164, 0.04041523361206055, 0.040527393341064456, 0.04056889724731445, 0.040583873748779295, 0.04040265655517578, 0.04030575942993164, 0.040643489837646485, 0.04021193695068359, 0.04011859130859375, 0.04009334564208984, 0.04005849456787109, 0.040104511260986325, 0.04017776107788086, 0.04076531219482422, 0.040089534759521483, 0.04030860900878906, 0.04021299362182617, 0.04000982284545898, 0.0401899528503418, 0.04003168106079102, 0.04055843353271484, 0.040143585205078124, 0.040304447174072264, 0.040398143768310545, 0.040182655334472656, 0.03999961471557617, 0.04027913665771484, 0.04006358337402344, 0.04000297546386719, 0.04006755065917969, 0.040134464263916016, 0.040011680603027344, 0.03995641708374023, 0.04002608108520508, 0.039887584686279294, 0.040217823028564456, 0.040065216064453124, 0.04009235382080078, 0.0401448974609375, 0.04006092834472656, 0.0400928955078125, 0.04001984024047851, 0.040369056701660154, 0.040535903930664065, 0.04041129684448242, 0.04048691177368164, 0.040304641723632816, 0.04017766571044922, 0.040304641723632816, 0.04008345413208008, 0.04021452713012695, 0.040286209106445314, 0.04020547103881836, 0.040547168731689454, 0.04023091125488281, 0.040089599609375, 0.040312416076660154, 0.041940673828125, 0.04206252670288086, 0.04040256118774414, 0.04070646286010742, 0.04046233749389649, 0.040343551635742186, 0.04044800186157226, 0.04010598373413086, 0.040052734375, 0.04001574325561524, 0.04039443206787109, 0.0401228141784668, 0.04057702255249023, 0.040804126739501956, 0.04062844848632813, 0.04014694213867188, 0.0416255989074707, 0.041981952667236325, 0.040809921264648434, 0.04065951919555664, 0.04046976089477539, 0.040628990173339846, 0.0404213752746582, 0.04047872161865235, 0.0403394546508789, 0.04017926406860352, 0.040411487579345706, 0.04009910583496094, 0.04095699310302734, 0.04088332748413086, 0.0407674560546875, 0.04040975952148437, 0.040565567016601564, 0.04009897613525391, 0.040147647857666016, 0.04036214447021484, 0.04004044723510742, 0.04002732849121094, 0.039995296478271485, 0.039973438262939455, 0.0400096321105957, 0.03998764801025391, 0.03985609436035156, 0.0398306884765625, 0.039826465606689454, 0.03992057418823242, 0.0401005744934082, 0.040175552368164065, 0.040382720947265624, 0.04038361740112305, 0.040868736267089846, 0.04086150360107422, 0.040605438232421874, 0.0405016975402832, 0.0405667839050293, 0.04046601486206055, 0.04063273620605469, 0.04089772796630859, 0.04026451110839844, 0.040357887268066404, 0.040353889465332034, 0.0401585922241211, 0.040210559844970704, 0.04062422561645508, 0.04020406341552735, 0.040305183410644534, 0.039923713684082034, 0.03987836837768555, 0.040296192169189456, 0.0401229133605957, 0.04019516754150391, 0.04019398498535156, 0.04060176086425781, 0.04104060745239258, 0.04039075088500976, 0.04023616027832031, 0.04041747283935547, 0.04044790267944336, 0.04032985687255859, 0.040341663360595706, 0.04013071823120117, 0.040381534576416016, 0.040385311126708984, 0.04006038284301758, 0.04011468887329102, 0.04356022262573242, 0.04087472152709961, 0.04102963256835938, 0.04092313766479492, 0.04057292938232422, 0.040581119537353515, 0.04032924652099609, 0.04075721740722656, 0.04040419387817383, 0.04004521560668945, 0.041010238647460937, 0.04047999954223633, 0.04008310317993164, 0.04002361679077148, 0.040239551544189456, 0.04022476959228516, 0.04053923034667969, 0.04000656127929687, 0.04101500701904297, 0.04006940841674805, 0.03994214248657227, 0.04186111831665039, 0.040570079803466795, 0.04043993759155273, 0.04119414520263672, 0.041095169067382815, 0.040664352416992185, 0.04058323287963867, 0.040376991271972654, 0.04065622329711914, 0.04014352035522461, 0.040013824462890625, 0.04005414581298828, 0.041353855133056644, 0.04001177597045898, 0.039916671752929685, 0.03987955093383789, 0.03977830505371094, 0.040097118377685544, 0.04015785598754883, 0.040350784301757814, 0.039832511901855466, 0.03970816040039062, 0.040108097076416015, 0.03975936126708984, 0.03975980758666992, 0.03990425491333008, 0.040011585235595705, 0.0397305908203125, 0.04022665786743164, 0.03992243194580078, 0.03984918212890625, 0.03978931045532227, 0.03980310440063477, 0.04050281524658203, 0.039842273712158205, 0.039809024810791016, 0.04008755111694336, 0.04005068969726563, 0.04004355239868164, 0.04001276779174805, 0.0398699836730957, 0.03996902465820312, 0.045717727661132815, 0.04005068969726563, 0.040130561828613284, 0.03986636734008789, 0.03994214248657227, 0.04015625762939453, 0.03981609725952148, 0.04132044982910156, 0.04105827331542969, 0.04047248077392578]",tokens/s,24.785795627085953,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4372.512768,4566.482944,0.0,4188.012544,4187.049984,s,1,10.332669921875,10.332669921875,0.0,10.332669921875,10.332669921875,10.332669921875,10.332669921875,[10.332669921875],,kWh,0.00010267034029583305,1.1317863835509072e-05,3.084280245199992e-05,0.00014483100658334205,,MB,4080.443392,4962.844672,0.0,4555.014144,4514.269184,s,10,7.865827026367188,0.7865827026367188,0.002886280729514126,0.7850776062011718,0.7900420410156249,0.7905072570800781,0.7908794299316406,"[0.782828369140625, 0.7894422607421875, 0.7844773559570313, 0.7850126953125, 0.7840709228515625, 0.7897021484375, 0.7842396240234375, 0.7899386596679687, 0.7851425170898437, 0.7909724731445312]",tokens/s,325.4584662767914,kWh,2.2918281938461133e-05,2.5274666805281998e-06,1.5180375392153848e-05,4.062612401114318e-05,tokens/kWh,6301364.115606568,MB,4093.394944,4979.621888,0.0,4571.79136,4514.271744,s,10,467.798828125,46.7798828125,0.03317416613981883,46.7930546875,46.8095484375,46.81011796875,46.81057359375,"[46.7034296875, 46.74363671875, 46.7610078125, 46.77698046875, 46.78384375, 46.80287109375, 46.802265625, 46.809421875, 46.80468359375, 46.8106875]",tokens/s,1.3467327451954592,kWh,0.0013654926656507058,0.000150622730472546,0.0009080636687580463,0.002424179064881298,tokens/kWh,25988.179220203292,,s,630,467.7878660888669,0.7425204223632813,0.0006844989959605742,0.7426236572265625,0.7431574462890624,0.7433347930908203,0.7435878228759765,"[0.7409111938476562, 0.740742431640625, 0.7414298706054687, 0.74062060546875, 0.7404442138671875, 0.7408536987304688, 0.7409601440429687, 0.7411483764648438, 0.74078662109375, 0.7409418334960938, 0.7408561401367187, 0.7414207153320312, 0.7407407836914063, 0.7410364379882812, 0.7412715454101563, 0.7415802612304687, 0.7406494750976562, 0.7412059936523437, 0.7412817993164063, 0.741195068359375, 0.7406961669921875, 0.7416878662109375, 0.740632080078125, 0.7417431030273437, 0.7411015625, 0.741113525390625, 0.740952392578125, 0.7412178955078125, 0.7411934204101562, 0.7409158935546875, 0.741591064453125, 0.7417518920898437, 0.740760498046875, 0.7420047607421875, 0.7417855834960938, 0.741658203125, 0.7413723754882813, 0.7413206787109375, 0.7414326171875, 0.74105517578125, 0.7418880004882813, 0.7414899291992187, 0.7414234619140625, 0.7416295166015625, 0.7411597290039063, 0.7413309326171875, 0.741670166015625, 0.7417576904296875, 0.7414681396484375, 0.7412572021484375, 0.7414517822265625, 0.7421171264648437, 0.7413455200195312, 0.741306396484375, 0.74177490234375, 0.7413662109375, 0.7415738525390625, 0.7421973266601563, 0.741670654296875, 0.7415838623046875, 0.742012939453125, 0.7419064331054688, 0.7414863891601563, 0.7411098022460938, 0.7416702880859375, 0.7419783935546875, 0.7411612548828125, 0.7413350219726562, 0.7418200073242187, 0.741941650390625, 0.7415685424804688, 0.7422476196289063, 0.7416410522460938, 0.7412612915039063, 0.7419465942382812, 0.7418907470703126, 0.7419454345703125, 0.7413104858398437, 0.7418941650390625, 0.741507080078125, 0.7417914428710938, 0.7418411865234374, 0.74210302734375, 0.7418203735351563, 0.7427645874023437, 0.7417232055664063, 0.7411324462890625, 0.7418697509765625, 0.7416817016601562, 0.742033203125, 0.7417816772460938, 0.7421992797851562, 0.7420682373046875, 0.7422382202148438, 0.7418982543945313, 0.742023193359375, 0.74232763671875, 0.7421692504882812, 0.7418585205078125, 0.7418699951171875, 0.74219482421875, 0.7417301025390625, 0.741923583984375, 0.7422916259765625, 0.74202734375, 0.7420128784179687, 0.7420436401367188, 0.74224853515625, 0.7418878173828125, 0.7422157592773437, 0.7422811889648437, 0.7418238525390625, 0.741875732421875, 0.7418477172851563, 0.7424307250976563, 0.7423569946289063, 0.7418634033203125, 0.7422015991210937, 0.7421802978515625, 0.741806396484375, 0.742371337890625, 0.742463623046875, 0.7422913208007812, 0.742451171875, 0.7423262329101562, 0.7423321533203125, 0.7417332763671876, 0.7420638427734375, 0.7420910034179687, 0.7422750854492187, 0.7414292602539062, 0.7421375732421875, 0.742060302734375, 0.7421511840820313, 0.7419559936523438, 0.7420770263671875, 0.7415540771484375, 0.741965087890625, 0.7424330444335937, 0.7415834350585937, 0.74231396484375, 0.741759033203125, 0.7421541748046875, 0.742, 0.7415894165039062, 0.7419058227539063, 0.7423639526367187, 0.7421171875, 0.7419309692382813, 0.7422015380859375, 0.742751953125, 0.7422569580078126, 0.7420476684570313, 0.7418409423828125, 0.742561767578125, 0.7424368896484375, 0.742372802734375, 0.7426729736328125, 0.7423775024414062, 0.7418040161132813, 0.742319580078125, 0.7422195434570312, 0.7423568725585937, 0.7422083740234375, 0.7421634521484375, 0.742289794921875, 0.7424149169921875, 0.7425884399414062, 0.7427276611328125, 0.74241845703125, 0.7423897705078125, 0.7428956298828125, 0.7419310302734375, 0.742454345703125, 0.7419319458007813, 0.7426245727539063, 0.7416572875976563, 0.742635498046875, 0.7424710693359375, 0.7423289184570312, 0.742068115234375, 0.7431066284179687, 0.7425264892578125, 0.7424691162109375, 0.7425054931640624, 0.7422279663085938, 0.7425269775390625, 0.7424140625, 0.7426787719726563, 0.7426966552734375, 0.7415352172851563, 0.7422392578125, 0.7424102172851562, 0.742135009765625, 0.7422636108398437, 0.7420353393554687, 0.74172021484375, 0.74231103515625, 0.7420568237304688, 0.7420620727539062, 0.7426764526367188, 0.742227783203125, 0.7420909423828125, 0.7422996215820312, 0.742192626953125, 0.7425576171875, 0.7425132446289062, 0.7418695678710937, 0.7424423217773437, 0.7422299194335937, 0.7424633178710938, 0.7417919311523438, 0.742465576171875, 0.7422327270507812, 0.7426171264648438, 0.742260009765625, 0.7421241455078125, 0.7425425415039062, 0.7420343017578125, 0.742509765625, 0.7425892944335938, 0.7424696044921875, 0.7419692993164062, 0.7424620971679687, 0.742371337890625, 0.7421785888671875, 0.7423941040039063, 0.7422993774414063, 0.7428602905273437, 0.7424945068359375, 0.7421895141601562, 0.7425863647460937, 0.7418142700195313, 0.7424737548828125, 0.7430901489257813, 0.7421828002929688, 0.7430094604492188, 0.7428085327148437, 0.7429366455078125, 0.7423128662109375, 0.7424125366210937, 0.7427161254882813, 0.7429427490234375, 0.7427215576171875, 0.7429300537109375, 0.7427828369140625, 0.7426687622070313, 0.7426682739257813, 0.7428546752929688, 0.7472858276367188, 0.7425925903320313, 0.7426705932617188, 0.7421051025390625, 0.7418914184570312, 0.7425516967773438, 0.7425357055664062, 0.7420498046875, 0.7424694213867188, 0.7420654907226563, 0.7424951171875, 0.7422190551757812, 0.7421734008789063, 0.7426744384765624, 0.7417849731445313, 0.7421753540039062, 0.7425023803710937, 0.7420511474609375, 0.7422849731445312, 0.7427368774414063, 0.7424122924804688, 0.7424389038085938, 0.7426638793945313, 0.7428917846679688, 0.742703125, 0.7427176513671875, 0.7426803588867188, 0.7424163818359375, 0.742846435546875, 0.7427088623046875, 0.7428919067382812, 0.7428956298828125, 0.7427333984375, 0.7425970458984374, 0.7429732666015625, 0.7428092041015625, 0.7422715454101563, 0.7426232299804687, 0.7429365234375, 0.7422946166992187, 0.7425728759765625, 0.7430309448242187, 0.7424307250976563, 0.7419351196289062, 0.7428231201171875, 0.7425114135742188, 0.74288330078125, 0.742803466796875, 0.7427543334960938, 0.7422457885742187, 0.7433970947265625, 0.7424357299804687, 0.7428424072265625, 0.7427125244140625, 0.74261572265625, 0.7431414184570313, 0.7426329345703125, 0.7430205078125, 0.7426730346679687, 0.7428487548828125, 0.7423252563476562, 0.7427470703125, 0.7431675415039063, 0.742809814453125, 0.7430718383789062, 0.742619140625, 0.7426431274414063, 0.7423594970703125, 0.7428670654296875, 0.7424796752929688, 0.7430535278320313, 0.7420407104492187, 0.7421405639648437, 0.7426275634765624, 0.7426573486328125, 0.7423167114257813, 0.7425001831054687, 0.7434381713867187, 0.7422440185546875, 0.7427672119140625, 0.74290087890625, 0.742628173828125, 0.742381591796875, 0.742920166015625, 0.7431004028320313, 0.742475341796875, 0.7425416870117187, 0.7425712890625, 0.7420874633789063, 0.743067626953125, 0.7426334838867188, 0.7430707397460937, 0.7431751708984375, 0.7429895629882812, 0.7426439819335937, 0.74296728515625, 0.7430427856445313, 0.7428402099609375, 0.74272802734375, 0.7427555541992188, 0.7429782104492187, 0.7431640014648437, 0.7428445434570312, 0.7430631713867187, 0.7427684326171875, 0.7425909423828125, 0.742961181640625, 0.7423259887695313, 0.74312060546875, 0.7428482055664063, 0.7428569946289062, 0.7432578125, 0.7426240844726563, 0.743041015625, 0.742920166015625, 0.7431248779296875, 0.7429837646484375, 0.7431264038085937, 0.7429137573242187, 0.743111572265625, 0.7432763061523437, 0.7424658203125, 0.7430245971679688, 0.7433277587890625, 0.7429918823242188, 0.7425571899414063, 0.7433282470703125, 0.7428646850585937, 0.743037109375, 0.7425703125, 0.7429478759765625, 0.7422984619140625, 0.742576171875, 0.7425693359375, 0.7424683227539063, 0.7430922241210938, 0.742842529296875, 0.7428029174804688, 0.7423410034179687, 0.7428628540039063, 0.7429490356445313, 0.742640625, 0.7428678588867188, 0.7427801513671874, 0.7429978637695313, 0.7429356689453125, 0.7426107788085937, 0.7426436767578125, 0.742823974609375, 0.7427727661132812, 0.7442432250976563, 0.7429447631835937, 0.7430430908203125, 0.7428211059570312, 0.7427387084960938, 0.7429916381835937, 0.742849853515625, 0.7425873413085937, 0.742582275390625, 0.7429816284179688, 0.743125, 0.742664306640625, 0.7427764892578125, 0.7429058837890625, 0.743031005859375, 0.7429959716796875, 0.742522705078125, 0.7427412719726563, 0.7431339111328125, 0.7428846435546875, 0.7428405151367188, 0.7426342163085937, 0.7431577758789063, 0.742472900390625, 0.7430396728515625, 0.742861083984375, 0.7432180786132813, 0.7428720703125, 0.7430491943359375, 0.7427246704101562, 0.743373779296875, 0.7430021362304687, 0.7431200561523438, 0.7430538330078125, 0.7428633422851563, 0.7426433715820312, 0.7431026611328125, 0.742940673828125, 0.7435852661132812, 0.7429833984375, 0.7430778198242187, 0.7431577758789063, 0.7431532592773438, 0.7425741577148437, 0.7428815307617187, 0.7427513427734375, 0.7430576171875, 0.7426541137695313, 0.7425327758789062, 0.7432507934570313, 0.7426170654296875, 0.7427522583007813, 0.7428915405273437, 0.7424327392578125, 0.7430390014648437, 0.7426314086914062, 0.7429222412109375, 0.7431489868164063, 0.7427958374023438, 0.7436157836914062, 0.7425665893554687, 0.7428231201171875, 0.742515380859375, 0.7431558227539062, 0.7427317504882812, 0.7427861938476562, 0.7427708740234376, 0.7428307495117188, 0.7424716796875, 0.7424383544921875, 0.7432681274414062, 0.7425994262695312, 0.7430263671875, 0.7423818359375, 0.7434302368164063, 0.7426638793945313, 0.7433649291992187, 0.7426434326171875, 0.7430082397460938, 0.7428876953125, 0.7429345092773437, 0.7427215576171875, 0.7425567016601563, 0.7435888671875, 0.742903564453125, 0.7430648193359375, 0.7429068603515625, 0.7430791015625, 0.7429815673828125, 0.7432242431640625, 0.743103759765625, 0.7426015014648437, 0.7428159790039063, 0.7433822631835938, 0.7434183959960937, 0.7428648681640625, 0.7430839233398437, 0.7431104736328125, 0.7426787719726563, 0.7426023559570313, 0.7431417846679688, 0.743210693359375, 0.7427914428710938, 0.7485798950195313, 0.7432222290039062, 0.7423714599609375, 0.7427328491210937, 0.7425728759765625, 0.742863037109375, 0.742371337890625, 0.742516845703125, 0.7428583984375, 0.7426624145507813, 0.7424384155273438, 0.7427845458984375, 0.7433097534179688, 0.74272412109375, 0.7423936157226563, 0.7428265380859375, 0.7431574096679687, 0.7425597534179688, 0.7424810180664062, 0.7430985107421875, 0.7432814331054688, 0.7429212646484376, 0.742916259765625, 0.7430966796875, 0.7423143310546875, 0.7430452270507812, 0.7427455444335938, 0.7430186157226563, 0.742625732421875, 0.7432007446289063, 0.743462890625, 0.742307861328125, 0.742781005859375, 0.7429815063476563, 0.7430794067382812, 0.7428836669921876, 0.74315185546875, 0.7430922241210938, 0.7428587646484375, 0.7430697021484375, 0.7426969604492187, 0.7434301147460938, 0.74296630859375, 0.743320556640625, 0.7429058837890625, 0.7432684326171874, 0.7429072265625, 0.7427815551757813, 0.7426837768554687, 0.7428739624023437, 0.7429467163085938, 0.7428485717773438, 0.7434170532226563, 0.7433401489257813, 0.7428226928710937, 0.7428097534179687, 0.7429998168945312, 0.743096435546875, 0.7431119384765625, 0.7429306640625, 0.7432291259765625, 0.7430718383789062, 0.7435374145507813, 0.7433564453125, 0.7432493896484375, 0.7426903686523437, 0.7428428344726562, 0.7428362426757813, 0.7431109008789063, 0.7427963256835938, 0.7429578247070312, 0.7426099243164063, 0.7430643920898438, 0.7425025634765625, 0.742628662109375, 0.7431317138671875, 0.74294287109375, 0.7432335205078126, 0.742649658203125, 0.7429142456054687, 0.7428546752929688, 0.7432407836914062, 0.7422105102539063, 0.7429706420898438, 0.7433489990234375, 0.7431597900390625, 0.742063720703125, 0.7429671020507812, 0.7425288696289063, 0.7424314575195312, 0.7430123291015625, 0.7431261596679688, 0.743260009765625, 0.743404541015625, 0.7429522705078125, 0.7430451049804687, 0.7432089233398438, 0.7428690795898437, 0.7429454956054687, 0.74342822265625, 0.74301123046875, 0.7425867919921875, 0.7429883422851562, 0.7435775756835937, 0.742518798828125, 0.7427189331054688, 0.7433569946289063, 0.7431270141601563, 0.7429991455078125, 0.7428924560546875, 0.7433113403320313, 0.743530517578125, 0.7433584594726562, 0.7430082397460938, 0.743664794921875, 0.7432050170898438, 0.7432547607421875, 0.7431248779296875, 0.7426888427734375, 0.7428217163085937, 0.7434033203125, 0.7434325561523437, 0.7432396850585937, 0.743583740234375, 0.743044921875, 0.7427442626953125, 0.743669189453125, 0.7433466796875]",tokens/s,1.3467643042291242,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,2065.076224,2350.77632,0.0,1948.254208,1873.42336,s,1,9.1903642578125,9.1903642578125,0.0,9.1903642578125,9.1903642578125,9.1903642578125,9.1903642578125,[9.1903642578125],,kWh,5.793497292915314e-05,6.3833906957613754e-06,1.7316958298013718e-05,8.163532192292824e-05,,MB,2132.39808,2451.439616,0.0,2034.23744,2023.140352,s,10,0.8362190399169922,0.08362190399169922,0.00024227978400290535,0.08361990356445312,0.08388296356201172,0.08391213912963867,0.08393547958374023,"[0.08394131469726562, 0.08317810821533203, 0.08354441833496094, 0.08382364654541016, 0.08369538879394531, 0.0838493423461914, 0.08387648010253906, 0.08339225769042968, 0.08354147338867188, 0.0833766098022461]",tokens/s,3061.398841449628,kWh,2.494874931410368e-06,2.751385472405686e-07,1.648600559145373e-06,4.4186140377963096e-06,tokens/kWh,57936719.0277779,MB,2141.913088,2451.439616,0.0,2034.23744,2023.142912,s,10,17.080737304687503,1.7080737304687499,0.011909771557403947,1.704851623535156,1.7245524169921875,1.7294508911132813,1.7333696704101564,"[1.6904195556640624, 1.704584228515625, 1.734349365234375, 1.7234638671875, 1.7122548828125, 1.7051190185546874, 1.7008658447265625, 1.70244580078125, 1.7006376953125, 1.7065970458984374]",tokens/s,36.883653718338486,kWh,4.9802623684424013e-05,5.492926473663553e-06,2.5353282153453582e-05,8.064883231154116e-05,tokens/kWh,781164.4408766531,,s,630,17.078311103820795,0.027108430323525078,0.0004879160924318792,0.027146239280700684,0.027530195045471193,0.0277276141166687,0.028879893798828132,"[0.02787843132019043, 0.027298303604125978, 0.027271648406982422, 0.02713523292541504, 0.027384191513061523, 0.02758268737792969, 0.027381919860839845, 0.02723344039916992, 0.027145055770874022, 0.027047744750976564, 0.026955263137817383, 0.026979007720947266, 0.02707072067260742, 0.026703615188598633, 0.02695577621459961, 0.027114496231079102, 0.02653900718688965, 0.029575168609619142, 0.027695104598999022, 0.02655996894836426, 0.02644432067871094, 0.02637824058532715, 0.02650268745422363, 0.02648726463317871, 0.026686847686767576, 0.02664512062072754, 0.026614816665649414, 0.026493919372558593, 0.026898303985595704, 0.026713823318481444, 0.02751036834716797, 0.026515359878540038, 0.02673961639404297, 0.026456064224243164, 0.026433183670043946, 0.0268407039642334, 0.026737375259399412, 0.0265482234954834, 0.026814464569091798, 0.02666223907470703, 0.02691753578186035, 0.026462207794189452, 0.026449920654296875, 0.026302463531494142, 0.02652569580078125, 0.026537984848022462, 0.02660966491699219, 0.02652364730834961, 0.026513408660888672, 0.026443584442138672, 0.02672591972351074, 0.026440191268920898, 0.02665897560119629, 0.026377344131469728, 0.02652150344848633, 0.027190176010131836, 0.026828863143920897, 0.026654720306396484, 0.026624000549316407, 0.026653984069824218, 0.026589920043945312, 0.026587135314941408, 0.026445823669433592, 0.02651683235168457, 0.026395296096801756, 0.02636185646057129, 0.02635775947570801, 0.026327039718627928, 0.026372095108032227, 0.026797760009765626, 0.02699910354614258, 0.026762975692749023, 0.026869951248168947, 0.02699068832397461, 0.027082752227783204, 0.027405567169189453, 0.027195840835571288, 0.02712812805175781, 0.02700611114501953, 0.027017248153686522, 0.02679792022705078, 0.026749631881713868, 0.026849279403686522, 0.02696940803527832, 0.027022111892700196, 0.02748640060424805, 0.027222015380859374, 0.027275264739990233, 0.027303680419921875, 0.02730828857421875, 0.027068416595458986, 0.02715443229675293, 0.027172864913940428, 0.027177055358886718, 0.027105184555053712, 0.02727731132507324, 0.02707865524291992, 0.027146015167236328, 0.027107648849487305, 0.02731612777709961, 0.02719539260864258, 0.027254783630371093, 0.026989599227905274, 0.027206623077392578, 0.02710527992248535, 0.02705580711364746, 0.027101503372192384, 0.02711142349243164, 0.027115520477294923, 0.027158527374267577, 0.027029504776000978, 0.027073631286621092, 0.027054880142211916, 0.027429183959960936, 0.027014944076538087, 0.027211679458618163, 0.027207807540893556, 0.027229440689086913, 0.027178815841674805, 0.027153343200683595, 0.027054079055786134, 0.027328447341918947, 0.027159679412841798, 0.02725984001159668, 0.027221376419067383, 0.027255136489868163, 0.02774415969848633, 0.027312128067016602, 0.027248640060424805, 0.027289119720458985, 0.02710576057434082, 0.026980287551879884, 0.027149728775024414, 0.027393695831298828, 0.02714931106567383, 0.026916608810424805, 0.02711577606201172, 0.02709702491760254, 0.027283519744873048, 0.027215871810913086, 0.02726092720031738, 0.027082752227783204, 0.027824224472045897, 0.02763065528869629, 0.027529855728149415, 0.02730620765686035, 0.027340799331665038, 0.027288576126098633, 0.027231231689453125, 0.027371616363525392, 0.0275250244140625, 0.027301536560058594, 0.027263391494750978, 0.027310304641723633, 0.030491647720336915, 0.02757484817504883, 0.028051904678344727, 0.027583744049072264, 0.027553567886352538, 0.02748076820373535, 0.027631616592407225, 0.027436256408691406, 0.027574304580688477, 0.027550464630126954, 0.027606719970703124, 0.027486368179321288, 0.028006559371948243, 0.027543071746826173, 0.027666912078857422, 0.027820032119750978, 0.029832895278930665, 0.027758975982666016, 0.027865024566650392, 0.027420095443725586, 0.027435583114624025, 0.027333984375, 0.02758108711242676, 0.027371519088745116, 0.028458240509033204, 0.02715110397338867, 0.027278720855712892, 0.02735686492919922, 0.02728646469116211, 0.0271779842376709, 0.027983104705810547, 0.027250431060791017, 0.027559295654296875, 0.027254560470581054, 0.027403104782104493, 0.028112895965576173, 0.027271167755126953, 0.02725267219543457, 0.027172447204589844, 0.02727884864807129, 0.027231199264526367, 0.027340032577514647, 0.0274337272644043, 0.02752672004699707, 0.02712825584411621, 0.027342559814453125, 0.02703385543823242, 0.027199520111083984, 0.027490304946899413, 0.02726911926269531, 0.027244543075561522, 0.027148288726806642, 0.02717081642150879, 0.027276575088500978, 0.027568864822387695, 0.027160192489624025, 0.027386079788208006, 0.027346080780029296, 0.027403263092041014, 0.02750668716430664, 0.02765577507019043, 0.027316255569458006, 0.02718070411682129, 0.027271263122558592, 0.027988031387329103, 0.027392576217651367, 0.02727731132507324, 0.027320320129394532, 0.027244543075561522, 0.027264352798461913, 0.02718339157104492, 0.027575775146484374, 0.027390464782714844, 0.02720332717895508, 0.02724675178527832, 0.027128320693969726, 0.027236352920532225, 0.027182336807250976, 0.027089664459228516, 0.027432960510253908, 0.02735049629211426, 0.027275808334350587, 0.027437055587768554, 0.027326143264770508, 0.0271977596282959, 0.02731999969482422, 0.027206207275390627, 0.02740940856933594, 0.027251455307006837, 0.02736511993408203, 0.027384063720703126, 0.027493791580200197, 0.027515232086181642, 0.02750694465637207, 0.02757427215576172, 0.02753324890136719, 0.027463743209838867, 0.027707391738891602, 0.028137344360351563, 0.0274117431640625, 0.02739081573486328, 0.027793439865112304, 0.02758857536315918, 0.02730188751220703, 0.027321632385253907, 0.027529504776000975, 0.027363775253295898, 0.02727731132507324, 0.02718720054626465, 0.027236352920532225, 0.027309600830078124, 0.027271648406982422, 0.02731808090209961, 0.027695295333862304, 0.02740345573425293, 0.027765279769897462, 0.027347232818603514, 0.027230207443237304, 0.027289600372314454, 0.026968063354492186, 0.026756736755371095, 0.026833280563354493, 0.027026592254638673, 0.02677436828613281, 0.027129823684692383, 0.026791967391967774, 0.027060224533081056, 0.02680012893676758, 0.028059648513793944, 0.026985984802246094, 0.027227712631225587, 0.02701398468017578, 0.027227807998657226, 0.026997055053710937, 0.027832447052001955, 0.027221696853637695, 0.027285024642944335, 0.027139968872070312, 0.027202463150024413, 0.0273220157623291, 0.027408735275268555, 0.027494400024414063, 0.02747494316101074, 0.026884576797485352, 0.02705366325378418, 0.02706038475036621, 0.02721990394592285, 0.027161439895629882, 0.027019264221191407, 0.026974271774291993, 0.02731532859802246, 0.026808767318725585, 0.026986656188964845, 0.02678396797180176, 0.02694950485229492, 0.026502880096435547, 0.026546079635620116, 0.02643356704711914, 0.026661632537841796, 0.026549728393554687, 0.02689254379272461, 0.027892576217651368, 0.027045888900756834, 0.027074176788330077, 0.027033983230590822, 0.028399616241455077, 0.027108383178710938, 0.02719024085998535, 0.027851903915405273, 0.027200639724731444, 0.02700057601928711, 0.02699212837219238, 0.026907136917114258, 0.026938400268554687, 0.026725343704223633, 0.026849279403686522, 0.02686067199707031, 0.026673471450805664, 0.0265098876953125, 0.026560224533081055, 0.026387807846069335, 0.0263832950592041, 0.026413248062133788, 0.026595136642456055, 0.02672230339050293, 0.02642153549194336, 0.026521312713623048, 0.026505216598510743, 0.026382335662841795, 0.02694924736022949, 0.02694592094421387, 0.027047935485839843, 0.026771711349487304, 0.02669718360900879, 0.02643587112426758, 0.026564319610595702, 0.0263767032623291, 0.026889024734497072, 0.026626016616821287, 0.027079423904418944, 0.027166559219360353, 0.0271876163482666, 0.02716876792907715, 0.02725872039794922, 0.0270849609375, 0.027801408767700195, 0.02720787239074707, 0.027252288818359376, 0.027178783416748047, 0.027237024307250977, 0.0273756160736084, 0.02754150390625, 0.02725391960144043, 0.02779631996154785, 0.027379711151123046, 0.02768067169189453, 0.0299582405090332, 0.02754159927368164, 0.027199392318725587, 0.027084800720214845, 0.026973312377929687, 0.027070688247680663, 0.026731136322021485, 0.027228191375732423, 0.027294431686401367, 0.02679827117919922, 0.026933055877685547, 0.026832895278930666, 0.026648096084594727, 0.026423391342163087, 0.026703775405883787, 0.0264586238861084, 0.026628063201904296, 0.026818527221679687, 0.02664860725402832, 0.026466304779052735, 0.02636185646057129, 0.026225696563720702, 0.026256351470947265, 0.02714646339416504, 0.02651705551147461, 0.026476768493652342, 0.026775039672851563, 0.026785696029663086, 0.026685760498046874, 0.026566656112670898, 0.02662006378173828, 0.026668447494506836, 0.02685411262512207, 0.026820608139038086, 0.02691059112548828, 0.027174911499023437, 0.027248767852783202, 0.027021312713623048, 0.02705948829650879, 0.027447071075439453, 0.027360191345214845, 0.027041664123535158, 0.027172000885009765, 0.027062944412231445, 0.027327903747558592, 0.027362207412719726, 0.027371519088745116, 0.027215871810913086, 0.02725273513793945, 0.027305984497070314, 0.027453439712524414, 0.02734192085266113, 0.027392032623291016, 0.02708358383178711, 0.02741004753112793, 0.027269567489624023, 0.027473920822143554, 0.027273216247558595, 0.027592063903808594, 0.027262880325317384, 0.027445791244506836, 0.027291391372680666, 0.02726118469238281, 0.027221792221069335, 0.027234720230102538, 0.02710643196105957, 0.027050880432128905, 0.02702511978149414, 0.027189151763916015, 0.026718591690063475, 0.02678169631958008, 0.026810367584228514, 0.026267263412475587, 0.026747264862060548, 0.026484832763671876, 0.026564512252807617, 0.026441728591918946, 0.02687932777404785, 0.026808992385864257, 0.026562559127807618, 0.02654207992553711, 0.026406112670898436, 0.026393407821655272, 0.02632700729370117, 0.026429088592529296, 0.026519519805908203, 0.0264051513671875, 0.026785888671875, 0.026867616653442384, 0.026906719207763673, 0.026627424240112305, 0.026677343368530275, 0.026555967330932618, 0.026745792388916015, 0.026540096282958985, 0.026477792739868163, 0.026531776428222655, 0.02661667251586914, 0.026492927551269533, 0.026894271850585936, 0.027158592224121095, 0.027055871963500976, 0.027264575958251953, 0.027056415557861327, 0.027105695724487306, 0.02710937690734863, 0.02716409683227539, 0.027235904693603517, 0.0272989444732666, 0.02721574401855469, 0.027053728103637695, 0.02750908851623535, 0.027035648345947266, 0.02728313636779785, 0.027191616058349608, 0.027586496353149415, 0.02710243225097656, 0.027198240280151366, 0.029405248641967772, 0.027677951812744142, 0.027235071182250978, 0.027395103454589845, 0.02722489547729492, 0.027322528839111328, 0.027279264450073244, 0.028936288833618165, 0.028153919219970704, 0.027344831466674803, 0.027099136352539063, 0.02716876792907715, 0.027234304428100587, 0.027323999404907227, 0.02739036750793457, 0.027080703735351562, 0.030386688232421875, 0.027488256454467775, 0.027126848220825197, 0.027138431549072264, 0.027169343948364257, 0.02719068717956543, 0.0272421760559082, 0.02736012840270996, 0.02709097671508789, 0.02710105514526367, 0.027152191162109374, 0.027135871887207032, 0.02740678405761719, 0.0273305606842041, 0.02717081642150879, 0.028616512298583984, 0.028741823196411134, 0.027451391220092772, 0.02733203125, 0.02706447982788086, 0.027132320404052734, 0.027045888900756834, 0.027016992568969726, 0.02689200019836426, 0.026800640106201173, 0.026576480865478515, 0.026613855361938478, 0.02639030456542969, 0.026544479370117186, 0.026726591110229493, 0.026705919265747072, 0.026703872680664063, 0.026760255813598633, 0.02649388885498047, 0.026416799545288087, 0.026378559112548827, 0.026621984481811523, 0.026480447769165038, 0.026896575927734374, 0.026867712020874023, 0.026755071640014647, 0.027246591567993163, 0.02707196807861328, 0.02719968032836914, 0.026974559783935547, 0.02697420883178711, 0.026762847900390626, 0.02657731246948242, 0.02663350486755371, 0.02687049674987793, 0.0266279354095459, 0.02694153594970703, 0.02762451171875, 0.026545440673828125, 0.02658070373535156, 0.02650111961364746, 0.026632192611694337, 0.026423295974731444, 0.026645503997802734, 0.026424095153808592, 0.026652896881103515, 0.026423295974731444, 0.02657801628112793, 0.026617408752441406, 0.02655254364013672, 0.02647859191894531, 0.026402624130249023, 0.026460416793823244, 0.026410240173339844, 0.026481216430664062, 0.02651558494567871, 0.026632192611694337, 0.026998783111572267, 0.026804256439208984, 0.02652560043334961, 0.02710639953613281, 0.026727392196655275, 0.02665987205505371, 0.02670252799987793, 0.02684534454345703, 0.026726528167724608, 0.027131904602050783, 0.026841087341308592, 0.02692095947265625, 0.026865472793579103, 0.026847423553466795, 0.026889888763427735, 0.02688604736328125, 0.026745279312133788, 0.027150335311889647, 0.027336223602294922, 0.027470304489135743, 0.027448415756225586, 0.027456447601318358, 0.027367391586303712, 0.02732646369934082, 0.027602943420410156, 0.02743654441833496, 0.027412992477416992, 0.027648000717163085, 0.02749001693725586, 0.027498655319213867, 0.027226240158081054, 0.027142143249511717, 0.027312128067016602, 0.02737766456604004, 0.027317823410034178, 0.027373119354248045, 0.027314271926879883, 0.027406272888183595, 0.027394912719726563, 0.0270599365234375, 0.026933536529541016, 0.02698854446411133, 0.02711881637573242, 0.027253536224365233, 0.02725856018066406, 0.027279680252075195, 0.027356479644775392, 0.027388608932495118, 0.027416576385498048, 0.02743052864074707, 0.02742038345336914, 0.027406496047973634, 0.027322463989257813, 0.027402271270751954]",tokens/s,36.88889353110888,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,7186.915328,7954.366464,0.0,7551.844352,7485.12768,s,1,13.1071796875,13.1071796875,0.0,13.1071796875,13.1071796875,13.1071796875,13.1071796875,[13.1071796875],,kWh,0.00016715702960837006,1.8431183839013684e-05,5.210976390999933e-05,0.00023769797735738308,,MB,2837.659648,8254.2592,0.0,7837.057024,7735.356416,s,10,3.591346343994141,0.35913463439941407,0.0008169131912510408,0.3591637573242188,0.36015645751953124,0.3602684906005859,0.3603581170654297,"[0.3586661682128906, 0.3581451721191406, 0.3577377624511719, 0.3592274169921875, 0.3586064758300781, 0.36013156127929685, 0.35910009765625, 0.35944796752929686, 0.36038052368164064, 0.3599031982421875]",tokens/s,712.8245941194517,kWh,1.0519604659375167e-05,1.1601177320764338e-06,6.9666127161428185e-06,1.8646335107594418e-05,tokens/kWh,13729239.473752374,MB,2846.101504,8275.23072,0.0,7858.028544,7759.281152,s,10,26.238495361328123,2.6238495361328122,0.006674512428565961,2.6234400634765622,2.6332176513671874,2.6334500122070312,2.633635900878906,"[2.612402587890625, 2.614532958984375, 2.62148388671875, 2.622002197265625, 2.62136767578125, 2.6283408203125, 2.6248779296875, 2.626638916015625, 2.633682373046875, 2.633166015625]",tokens/s,24.010523138782265,kWh,7.70417478877065e-05,8.497814796872888e-06,5.1163600454656934e-05,0.00013670316313923634,tokens/kWh,460852.54030173813,,s,630,26.235626274108885,0.04164385122874427,0.00039486079044677083,0.041620832443237304,0.042129242324829105,0.042256020736694336,0.04260664470672607,"[0.042026016235351564, 0.04112892913818359, 0.04129167938232422, 0.04110960006713867, 0.04092428970336914, 0.04094668960571289, 0.040906593322753905, 0.04088220977783203, 0.04089753723144531, 0.04098937606811524, 0.04094771194458008, 0.040980800628662106, 0.04109721755981445, 0.04123353576660156, 0.04084566497802734, 0.0407782096862793, 0.04085971069335938, 0.04112118530273438, 0.04126166534423828, 0.04152060699462891, 0.04156662368774414, 0.041785472869873046, 0.04158054351806641, 0.04152115249633789, 0.04150697708129883, 0.0417380485534668, 0.04156150436401367, 0.04147004699707031, 0.04163836669921875, 0.0414474868774414, 0.04119571304321289, 0.04111532974243164, 0.04115596771240235, 0.041237377166748045, 0.04122003173828125, 0.041310176849365235, 0.04138979339599609, 0.04132211303710937, 0.04185971069335938, 0.04165814590454102, 0.04154185485839844, 0.0421723518371582, 0.041972927093505856, 0.04188227081298828, 0.04171798324584961, 0.04161049652099609, 0.041681663513183594, 0.041617568969726564, 0.041629344940185546, 0.041457855224609375, 0.041436992645263675, 0.04149881744384765, 0.04145356750488281, 0.04142454528808594, 0.04190857696533203, 0.042076160430908206, 0.0421396484375, 0.042033153533935545, 0.04203033447265625, 0.04198681640625, 0.0417259521484375, 0.0421638069152832, 0.041955169677734376, 0.042024478912353516, 0.0410912971496582, 0.04132275390625, 0.041332736968994144, 0.04106607818603516, 0.040939712524414064, 0.040947425842285154, 0.040976318359375, 0.04114694213867188, 0.04117299270629883, 0.04084940719604492, 0.040812545776367185, 0.040822654724121096, 0.0411280632019043, 0.04127129745483398, 0.041129150390625, 0.04108502578735351, 0.04109782409667969, 0.0414117431640625, 0.04151609420776367, 0.04155177688598633, 0.04157171249389648, 0.04179417419433594, 0.0415984001159668, 0.04155859375, 0.041653663635253906, 0.04156681442260742, 0.041438526153564456, 0.041398880004882815, 0.041226337432861325, 0.04140192031860351, 0.04140911865234375, 0.04132182312011719, 0.04123289489746094, 0.041217662811279296, 0.04131651306152344, 0.04168262481689453, 0.04275868988037109, 0.04182342529296875, 0.041705440521240235, 0.041644863128662106, 0.04185910415649414, 0.041770847320556644, 0.04165155029296875, 0.04138022232055664, 0.04154617691040039, 0.04147382354736328, 0.04152751922607422, 0.04156729507446289, 0.041433727264404294, 0.04136991882324219, 0.041592159271240235, 0.04225606536865235, 0.04219535827636719, 0.042095008850097655, 0.041946464538574216, 0.04190700912475586, 0.041984001159667966, 0.04174169540405273, 0.04164672088623047, 0.041940990447998046, 0.041809921264648435, 0.04156825637817383, 0.04210678482055664, 0.04101980972290039, 0.041234432220458986, 0.04114227294921875, 0.04115456008911133, 0.041224193572998044, 0.04103987121582031, 0.04097395324707031, 0.041232769012451174, 0.04152630233764648, 0.041470207214355466, 0.041586559295654295, 0.04162380981445313, 0.04145827102661133, 0.04179257583618164, 0.041651294708251956, 0.04161478424072266, 0.041412033081054685, 0.04154627227783203, 0.04144377517700195, 0.04115251159667969, 0.0411602897644043, 0.041494945526123046, 0.04107059097290039, 0.041262432098388674, 0.04141926574707031, 0.04138598251342773, 0.041387680053710935, 0.04173577499389648, 0.041802654266357424, 0.04198559951782226, 0.04176736068725586, 0.04180377578735352, 0.041984001159667966, 0.041777153015136716, 0.04160217666625977, 0.04134966278076172, 0.0416525764465332, 0.041570079803466796, 0.04141632080078125, 0.04167689514160156, 0.041691646575927735, 0.041583839416503905, 0.04161395263671875, 0.04151516723632812, 0.04190617752075195, 0.04191231918334961, 0.04172390365600586, 0.04214169692993164, 0.04201465606689453, 0.04191648101806641, 0.04192256164550781, 0.041637889862060545, 0.04213350296020508, 0.0419936637878418, 0.041663040161132814, 0.04150207901000977, 0.04164467239379883, 0.04163382339477539, 0.04196883010864258, 0.042267425537109375, 0.04205977630615235, 0.04204515075683594, 0.04206182479858398, 0.04179702377319336, 0.041570911407470705, 0.041463489532470706, 0.041430561065673825, 0.041564960479736325, 0.041560001373291015, 0.04138396835327148, 0.04140035247802734, 0.041332736968994144, 0.04118259048461914, 0.041007614135742186, 0.04108639907836914, 0.04134064102172851, 0.04118422317504883, 0.040839134216308595, 0.04086345672607422, 0.04089478302001953, 0.040852481842041016, 0.0410747184753418, 0.041414878845214845, 0.04163670349121094, 0.04155174255371094, 0.04154531097412109, 0.04159123229980469, 0.04167679977416992, 0.04162761688232422, 0.04170163345336914, 0.04170121765136719, 0.04186447906494141, 0.04171763229370117, 0.04159363174438477, 0.041465919494628904, 0.041699359893798825, 0.041387935638427735, 0.04140764617919922, 0.04179824066162109, 0.04179935836791992, 0.041693088531494144, 0.04192524719238281, 0.04181615829467773, 0.04180985641479492, 0.04179983901977539, 0.041636798858642576, 0.04150934219360351, 0.041686912536621094, 0.04171219253540039, 0.04146176147460937, 0.04153548812866211, 0.04181942367553711, 0.04178607940673828, 0.04181129455566406, 0.041860958099365235, 0.04178531265258789, 0.04171395111083984, 0.041966049194335935, 0.042264766693115234, 0.04212876892089844, 0.042324512481689454, 0.04229654312133789, 0.04212201690673828, 0.04220662307739258, 0.041960094451904295, 0.04217795181274414, 0.04144809722900391, 0.04153580856323242, 0.041562110900878906, 0.041232513427734374, 0.04119948959350586, 0.040984447479248044, 0.04095558547973633, 0.0409719352722168, 0.041003807067871094, 0.04105011367797851, 0.0415494384765625, 0.041271774291992185, 0.04154137420654297, 0.04185068893432617, 0.041671009063720704, 0.04150009536743164, 0.04168352127075195, 0.04151417541503906, 0.04141734313964844, 0.04151302337646484, 0.04176406478881836, 0.041159584045410154, 0.04117913436889648, 0.041596927642822266, 0.04163792037963867, 0.04143036651611328, 0.04139276885986328, 0.04121916961669922, 0.04142979049682617, 0.04135260772705078, 0.041223136901855466, 0.04160812759399414, 0.04210515213012695, 0.04188838577270508, 0.04192646408081055, 0.041807456970214846, 0.041990238189697264, 0.0417011833190918, 0.04172556686401367, 0.04172041702270508, 0.041896129608154295, 0.04176911926269531, 0.041574111938476564, 0.04138764953613281, 0.04140816116333008, 0.041423423767089844, 0.04137004852294922, 0.04163516616821289, 0.042283679962158205, 0.04229523086547852, 0.04217433547973633, 0.0419263687133789, 0.041951713562011717, 0.041899585723876955, 0.04175302505493164, 0.04157235336303711, 0.041916286468505856, 0.04179776000976562, 0.04188563156127929, 0.041807937622070315, 0.041852928161621096, 0.041996448516845704, 0.04199222564697266, 0.0416910400390625, 0.041600574493408204, 0.04161990356445312, 0.04147792053222656, 0.04165382385253906, 0.04177552032470703, 0.041568286895751955, 0.04152239990234375, 0.041675777435302735, 0.04148019027709961, 0.04127084732055664, 0.04152556610107422, 0.04172198486328125, 0.04167459106445313, 0.041522430419921874, 0.04123740768432617, 0.04107263946533203, 0.04107468795776367, 0.04130815887451172, 0.041506526947021484, 0.04188931274414062, 0.0416343994140625, 0.04159708786010742, 0.04158793640136719, 0.041616031646728516, 0.04136767959594727, 0.042016769409179686, 0.041629695892333986, 0.041744384765625, 0.04171494293212891, 0.04150502395629883, 0.041627296447753905, 0.041583454132080075, 0.04151295852661133, 0.04157628631591797, 0.04169510269165039, 0.041998241424560545, 0.04224761581420899, 0.04216019058227539, 0.04197465515136719, 0.04184179306030274, 0.04193369674682617, 0.04182425689697265, 0.0416130256652832, 0.041396289825439456, 0.041576480865478514, 0.04204563140869141, 0.04145977783203125, 0.041353153228759765, 0.041320289611816406, 0.04170563125610351, 0.04228915023803711, 0.04219030380249023, 0.042207775115966795, 0.04202310562133789, 0.04217782211303711, 0.042207775115966795, 0.042188800811767575, 0.0421739501953125, 0.041949695587158206, 0.042008575439453126, 0.04191027069091797, 0.04205548858642578, 0.04115584182739258, 0.04101433563232422, 0.040955486297607424, 0.040989086151123046, 0.04103359985351562, 0.0411069450378418, 0.04144998550415039, 0.04145900726318359, 0.04181488037109375, 0.042255966186523435, 0.04200473785400391, 0.04170310211181641, 0.04170713424682617, 0.041687744140625, 0.041621505737304686, 0.041455486297607425, 0.041398494720458985, 0.041478046417236326, 0.0413675537109375, 0.041285823822021485, 0.041123615264892575, 0.041113632202148434, 0.04112947082519531, 0.04111206436157227, 0.04122793579101563, 0.04127593612670898, 0.04159616088867187, 0.041666366577148437, 0.04176300811767578, 0.04162412643432617, 0.041918464660644535, 0.041889694213867186, 0.04194441604614258, 0.04184086227416992, 0.041896480560302735, 0.04190412902832031, 0.041885345458984376, 0.041809886932373044, 0.04177756881713867, 0.04161532974243164, 0.0415346565246582, 0.04161347198486328, 0.04215465545654297, 0.04205350494384766, 0.04213977432250977, 0.041850879669189454, 0.041772991180419924, 0.04178700637817383, 0.04169574356079102, 0.041612575531005856, 0.041466175079345705, 0.04160054397583008, 0.041890625, 0.04165849685668945, 0.041533313751220706, 0.04153548812866211, 0.041704830169677735, 0.04199692916870117, 0.042336254119873046, 0.04243865585327149, 0.042692607879638675, 0.04242803192138672, 0.042060543060302734, 0.041338302612304687, 0.04152156829833984, 0.04135708618164063, 0.04134143829345703, 0.041496318817138673, 0.0416995849609375, 0.040982688903808594, 0.041236320495605466, 0.041316352844238284, 0.041240577697753904, 0.0413139533996582, 0.041374046325683596, 0.04152115249633789, 0.04152115249633789, 0.041780479431152345, 0.04166883087158203, 0.041707489013671876, 0.041663040161132814, 0.0417179183959961, 0.041465694427490235, 0.04157843017578125, 0.04151424026489258, 0.04129465484619141, 0.041529342651367186, 0.04148179244995117, 0.04143750381469727, 0.04164006423950195, 0.041474014282226565, 0.041453025817871095, 0.04135692977905273, 0.041392704010009766, 0.04143942260742187, 0.04210214233398438, 0.04203168106079101, 0.04192076873779297, 0.04271923065185547, 0.04182950210571289, 0.04196441650390625, 0.04192361450195312, 0.04173923110961914, 0.04190412902832031, 0.04188979339599609, 0.041664737701416016, 0.041496353149414064, 0.04145471954345703, 0.04164492797851563, 0.04215193557739258, 0.042076160430908206, 0.04210620880126953, 0.042133953094482424, 0.04210665512084961, 0.04194144058227539, 0.042040958404541015, 0.04196515274047852, 0.041748767852783204, 0.0415810546875, 0.04169424057006836, 0.041724895477294924, 0.041627647399902344, 0.04152467346191406, 0.04210131072998047, 0.042614208221435544, 0.042161857604980466, 0.04196380615234375, 0.04172188949584961, 0.041777153015136716, 0.04145971298217774, 0.041452896118164065, 0.0415401611328125, 0.04136969757080078, 0.041527294158935545, 0.04141260910034179, 0.041191390991210934, 0.04105587387084961, 0.041046432495117184, 0.041299968719482424, 0.041588737487792966, 0.0414961929321289, 0.041404800415039064, 0.04258812713623047, 0.041758750915527346, 0.04192393493652344, 0.041591457366943356, 0.041637889862060545, 0.04155347061157227, 0.041533119201660154, 0.04156902313232422, 0.04181411361694336, 0.041635425567626956, 0.041643489837646486, 0.04170857620239258, 0.041594688415527346, 0.04144063949584961, 0.04145011138916015, 0.04162345504760742, 0.041893985748291014, 0.04221305465698242, 0.04195155334472656, 0.04209868621826172, 0.04194918441772461, 0.04175795364379883, 0.041863582611083985, 0.04173379135131836, 0.04162015914916992, 0.04166451263427735, 0.04157440185546875, 0.04147727966308594, 0.04155068969726562, 0.04140851211547852, 0.041596927642822266, 0.042153312683105466, 0.04228518295288086, 0.0422446403503418, 0.04233216094970703, 0.045592575073242186, 0.04141875076293945, 0.041768959045410156, 0.041752574920654296, 0.04182198333740234, 0.04184688186645508, 0.04196160125732422, 0.04236697769165039, 0.04240531158447266, 0.042267200469970706, 0.042288734436035154, 0.04295395278930664, 0.04134755325317383, 0.041925952911376956, 0.041726879119873043, 0.041490432739257815, 0.04134668731689453, 0.041404705047607425, 0.0412360954284668, 0.04120419311523438, 0.04119270324707031, 0.04101763153076172, 0.041113983154296874, 0.041240833282470704, 0.041600353240966795, 0.041517566680908204, 0.04174748611450195, 0.04166726303100586, 0.041674079895019533, 0.04168153762817383, 0.042317279815673826, 0.04142166519165039, 0.04156111907958984, 0.04138700866699219, 0.04154364776611328, 0.04135935974121094, 0.04168499374389648, 0.04174028778076172, 0.041488384246826174, 0.04146780776977539, 0.0414944953918457, 0.041513088226318356, 0.04161321640014649, 0.04202905654907227, 0.0419857292175293, 0.041898399353027346, 0.04211711883544922, 0.041835807800292966, 0.042154975891113285, 0.04179942321777344, 0.04187955093383789, 0.04176601409912109, 0.04190092849731445, 0.041535743713378905, 0.04151679992675781, 0.04152441787719727, 0.04174726486206055, 0.042040897369384767, 0.04226911926269531, 0.04255052947998047, 0.04216105651855469, 0.04219068908691406, 0.042223617553710936, 0.042147838592529296, 0.04205567932128906, 0.041852191925048826, 0.0418741455078125, 0.04177331161499023, 0.04196940612792969, 0.04248508834838867, 0.04269321441650391, 0.04233660888671875, 0.04242607879638672, 0.042392929077148436]",tokens/s,24.013148892188905,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2050.514944,2155.741184,0.0,1753.219072,1633.407488,s,1,9.422015625,9.422015625,0.0,9.422015625,9.422015625,9.422015625,9.422015625,[9.422015625],,kWh,5.378010735415727e-05,5.925084828566777e-06,1.5450290137997147e-05,7.51554823207212e-05,,MB,1946.54208,2241.724416,0.0,1824.52224,1762.836992,s,10,0.8206128005981446,0.08206128005981446,0.000406875530544768,0.08227532958984375,0.08245298385620117,0.08247095680236817,0.08248533515930176,"[0.08142982482910156, 0.08195062255859376, 0.08238175964355468, 0.08130518341064454, 0.08228915405273438, 0.0822955551147461, 0.08244898986816407, 0.08226150512695313, 0.08176127624511718, 0.08248892974853515]",tokens/s,3119.6198720444236,kWh,2.4314441579889094e-06,2.681156259530379e-07,1.6134103816363855e-06,4.312970165578333e-06,tokens/kWh,59355847.6344509,MB,1953.910784,2430.468096,0.0,2013.26592,1853.210112,s,10,13.407829223632813,1.3407829223632812,0.015119034435602554,1.3336165161132811,1.3607123657226563,1.3631648986816407,1.3651269250488283,"[1.3356656494140624, 1.3309630126953125, 1.3246400146484374, 1.32935205078125, 1.3315673828125, 1.32207275390625, 1.3532647705078125, 1.365617431640625, 1.3601673583984375, 1.354518798828125]",tokens/s,46.987471983127136,kWh,3.919810745076143e-05,4.323188169288976e-06,2.1141996711563042e-05,6.466329233161345e-05,tokens/kWh,974277.6423587656,,s,630,13.404014467239405,0.021276213440062506,0.0004500678012159599,0.021182144165039062,0.021725183486938478,0.02193448667526245,0.023002092094421386,"[0.021499135971069335, 0.021433151245117188, 0.021253631591796874, 0.02114009666442871, 0.021184192657470704, 0.021190975189208986, 0.021065664291381837, 0.02102070426940918, 0.020989984512329102, 0.020955135345458984, 0.02101862335205078, 0.020785152435302736, 0.020824064254760744, 0.020858879089355468, 0.02087321662902832, 0.02089132881164551, 0.020846912384033203, 0.02083839988708496, 0.020955135345458984, 0.020971168518066408, 0.02087356758117676, 0.020952415466308594, 0.02094761657714844, 0.021409887313842774, 0.021294656753540038, 0.021606752395629883, 0.021501951217651367, 0.021466495513916016, 0.02143631935119629, 0.02140847969055176, 0.021337343215942384, 0.02128767967224121, 0.021153791427612305, 0.02125391960144043, 0.021323999404907226, 0.021206464767456055, 0.021282655715942383, 0.021295520782470705, 0.02114796829223633, 0.021120416641235353, 0.021092960357666016, 0.020954912185668945, 0.02151759910583496, 0.021121183395385743, 0.02130534362792969, 0.021105567932128907, 0.02112006378173828, 0.021010847091674806, 0.020999807357788086, 0.02105833625793457, 0.020935712814331056, 0.02097587203979492, 0.021252832412719726, 0.02154275131225586, 0.021239200592041017, 0.021506175994873047, 0.023293983459472655, 0.021336671829223632, 0.021161983489990235, 0.021147232055664062, 0.020911808013916015, 0.021686784744262694, 0.021154016494750977, 0.021424064636230467, 0.021057600021362304, 0.020703231811523438, 0.020911808013916015, 0.020796831130981446, 0.020908960342407225, 0.020855968475341796, 0.020888256072998046, 0.02079759979248047, 0.02110873603820801, 0.02089779281616211, 0.020916223526000977, 0.02096691131591797, 0.021058048248291016, 0.02099932861328125, 0.02096553611755371, 0.020988607406616212, 0.020976736068725587, 0.021089183807373048, 0.021327072143554688, 0.02123036766052246, 0.021391359329223633, 0.021216863632202147, 0.021277088165283203, 0.021747711181640626, 0.0240064640045166, 0.021260480880737304, 0.021126623153686522, 0.021137247085571288, 0.021026687622070314, 0.020892255783081053, 0.021024896621704103, 0.020883295059204103, 0.02100067138671875, 0.021082944869995117, 0.020831199645996095, 0.021493696212768556, 0.021263744354248045, 0.021872831344604493, 0.021048992156982423, 0.021070304870605468, 0.021036479949951174, 0.021166656494140627, 0.02098806381225586, 0.02083468818664551, 0.02096726417541504, 0.020920320510864256, 0.020998144149780275, 0.020964448928833007, 0.02106051254272461, 0.020954336166381836, 0.020982559204101563, 0.02103091239929199, 0.020989952087402345, 0.020944896697998046, 0.02109414482116699, 0.021055551528930665, 0.022040800094604494, 0.021241823196411134, 0.021028863906860353, 0.020983327865600587, 0.02101193618774414, 0.020871936798095705, 0.021119552612304686, 0.021024511337280272, 0.020919872283935548, 0.021043487548828125, 0.0209800968170166, 0.021169792175292967, 0.020895679473876952, 0.02093894386291504, 0.020846879959106446, 0.020981760025024415, 0.02096895980834961, 0.020916000366210937, 0.02092310333251953, 0.021015615463256837, 0.021003040313720703, 0.02097577667236328, 0.020963327407836914, 0.020924415588378906, 0.02086502456665039, 0.020856416702270508, 0.02091868782043457, 0.020899839401245117, 0.020829599380493165, 0.020679040908813475, 0.02066966438293457, 0.020706304550170897, 0.020758527755737305, 0.020739328384399413, 0.020918495178222658, 0.02096348762512207, 0.021097856521606444, 0.02125721549987793, 0.021078016281127928, 0.020955135345458984, 0.020926464080810548, 0.020968448638916014, 0.021107648849487303, 0.02126422309875488, 0.021944543838500977, 0.021279808044433593, 0.021413888931274414, 0.02119980812072754, 0.02114905548095703, 0.02112575912475586, 0.021714912414550782, 0.02158095932006836, 0.02115862464904785, 0.02096463966369629, 0.021152320861816405, 0.021000543594360353, 0.020903839111328124, 0.020869375228881836, 0.02094060707092285, 0.021274240493774414, 0.021012800216674805, 0.0209715518951416, 0.020840192794799806, 0.020836639404296874, 0.020770816802978515, 0.020824064254760744, 0.020746240615844725, 0.020918272018432618, 0.021097600936889647, 0.021372928619384765, 0.02120460891723633, 0.02105792045593262, 0.021124095916748048, 0.02105855941772461, 0.021157119750976564, 0.021021440505981447, 0.02096291160583496, 0.020930976867675782, 0.021050432205200195, 0.021089439392089845, 0.020934431076049805, 0.021198848724365234, 0.021038240432739257, 0.021123647689819336, 0.020967039108276367, 0.02114633560180664, 0.02105881690979004, 0.021872768402099608, 0.021211711883544922, 0.021223424911499023, 0.02127187156677246, 0.021391263961791994, 0.021151615142822267, 0.021113759994506837, 0.022236223220825194, 0.02276211166381836, 0.02127027130126953, 0.02128656005859375, 0.021195871353149414, 0.021156671524047852, 0.021275648117065428, 0.02102617645263672, 0.02115836715698242, 0.021016735076904297, 0.02088755226135254, 0.020817920684814452, 0.020878463745117187, 0.02076915168762207, 0.02102684783935547, 0.020961759567260742, 0.0210402889251709, 0.021160032272338865, 0.021388032913208007, 0.0212541446685791, 0.02118182373046875, 0.021086816787719728, 0.020944927215576174, 0.020846559524536134, 0.021043231964111328, 0.020798912048339845, 0.020823808670043947, 0.020935264587402344, 0.020793344497680662, 0.020799711227416993, 0.02082371139526367, 0.020719263076782228, 0.02074608039855957, 0.02077187156677246, 0.02080748748779297, 0.02083020782470703, 0.020996095657348633, 0.02080073547363281, 0.02128700828552246, 0.021192287445068358, 0.020924928665161133, 0.020938655853271485, 0.020969568252563478, 0.02084828758239746, 0.02086128044128418, 0.02083020782470703, 0.020682752609252928, 0.020791296005249024, 0.0208035831451416, 0.020801279067993166, 0.02089971160888672, 0.02118646430969238, 0.021532447814941406, 0.02120159912109375, 0.021098495483398438, 0.02100239944458008, 0.02096931266784668, 0.02089583969116211, 0.020852447509765625, 0.020799072265625, 0.020785760879516602, 0.02077916717529297, 0.0212739200592041, 0.02172979164123535, 0.020927999496459963, 0.020885440826416017, 0.020935264587402344, 0.020789087295532225, 0.02100649642944336, 0.02299875259399414, 0.0211376953125, 0.021073919296264648, 0.02112291145324707, 0.020948831558227538, 0.02099964714050293, 0.021013343811035156, 0.021012479782104493, 0.021399551391601563, 0.02143027114868164, 0.021618688583374023, 0.021391679763793945, 0.021129247665405273, 0.020958879470825195, 0.02149990463256836, 0.02113859176635742, 0.021010400772094727, 0.021053760528564454, 0.022131071090698243, 0.021042816162109373, 0.020988479614257812, 0.020983808517456053, 0.021137407302856445, 0.021094400405883788, 0.021192703247070312, 0.02115497589111328, 0.021275487899780274, 0.021381120681762695, 0.021290143966674804, 0.021242719650268554, 0.021656991958618164, 0.02124777603149414, 0.02121343994140625, 0.021260671615600586, 0.020983808517456053, 0.02104230308532715, 0.02103500747680664, 0.02114758491516113, 0.021045631408691406, 0.020935232162475587, 0.0209300479888916, 0.021082304000854493, 0.021493631362915037, 0.02088751983642578, 0.020867551803588867, 0.02095238494873047, 0.020900543212890626, 0.020981088638305664, 0.020961664199829103, 0.021046848297119142, 0.02090671920776367, 0.020914176940917968, 0.020707328796386718, 0.020971519470214844, 0.02091334342956543, 0.02111296081542969, 0.0211231689453125, 0.020961088180541994, 0.021403648376464843, 0.020980159759521486, 0.021090175628662108, 0.020957664489746095, 0.020797279357910155, 0.02078940773010254, 0.02073401641845703, 0.02078745651245117, 0.020713151931762694, 0.02083785629272461, 0.020802080154418947, 0.020813823699951172, 0.020674016952514647, 0.021391584396362306, 0.020990272521972657, 0.020924287796020506, 0.02123081588745117, 0.02102774429321289, 0.020979040145874022, 0.02093244743347168, 0.02099078369140625, 0.02083184051513672, 0.020803552627563476, 0.02085523223876953, 0.020754432678222655, 0.020770816802978515, 0.02115590476989746, 0.021733312606811522, 0.021147232055664062, 0.02098627281188965, 0.02087936019897461, 0.020959232330322267, 0.02102272033691406, 0.020907039642333983, 0.02089241600036621, 0.02103273582458496, 0.020836767196655274, 0.02126473617553711, 0.02163462448120117, 0.02091004753112793, 0.020986656188964843, 0.020987903594970703, 0.02099404716491699, 0.021000192642211913, 0.021437728881835937, 0.020944639205932616, 0.021012832641601562, 0.020991743087768553, 0.020866016387939453, 0.02085660743713379, 0.020987424850463867, 0.020803455352783204, 0.020877248764038087, 0.021259040832519532, 0.021428159713745117, 0.0227574405670166, 0.021765344619750975, 0.021213024139404298, 0.021436416625976562, 0.02135545539855957, 0.022165504455566407, 0.021725183486938478, 0.021646976470947266, 0.021406080245971678, 0.021450143814086914, 0.021445119857788086, 0.021297119140625, 0.02131315231323242, 0.021198720932006837, 0.021645952224731445, 0.02147327995300293, 0.02152448081970215, 0.021379072189331053, 0.021368959426879882, 0.021626752853393556, 0.02146099281311035, 0.021559488296508788, 0.021491519927978514, 0.021559295654296876, 0.02176527976989746, 0.021635936737060546, 0.021746784210205077, 0.02161347198486328, 0.021667072296142578, 0.021606975555419922, 0.021653696060180663, 0.021725183486938478, 0.021534015655517578, 0.02160095977783203, 0.021559295654296876, 0.021778432846069336, 0.022031583786010743, 0.021988128662109373, 0.021716543197631836, 0.021720640182495116, 0.021922687530517578, 0.021558399200439452, 0.021879680633544923, 0.021843967437744142, 0.021831680297851562, 0.021932479858398437, 0.02301919937133789, 0.022435712814331054, 0.02196268844604492, 0.021699071884155274, 0.0216944637298584, 0.021632896423339844, 0.021528703689575195, 0.021512128829956054, 0.021635135650634765, 0.02164531135559082, 0.021936128616333008, 0.021793983459472657, 0.02185651206970215, 0.02173561668395996, 0.023003456115722656, 0.021611583709716796, 0.021529600143432616, 0.02147123146057129, 0.02152448081970215, 0.02147327995300293, 0.0216760311126709, 0.021623104095458985, 0.02142790412902832, 0.021547008514404296, 0.02154287910461426, 0.021401632308959962, 0.021456895828247072, 0.021489664077758788, 0.021560447692871094, 0.021587135314941407, 0.02123129653930664, 0.021346303939819337, 0.02146713638305664, 0.02195039939880371, 0.022417472839355468, 0.021579616546630858, 0.021717151641845702, 0.021960704803466798, 0.021495391845703125, 0.021285120010375976, 0.021399391174316405, 0.021411903381347658, 0.02128102493286133, 0.021236831665039063, 0.021400447845458984, 0.02141801643371582, 0.021820608139038085, 0.021482303619384767, 0.021425952911376955, 0.021280511856079102, 0.021449184417724608, 0.02145280075073242, 0.02141798400878906, 0.021356544494628905, 0.02141798400878906, 0.02135024070739746, 0.021399168014526366, 0.02154550361633301, 0.02296339225769043, 0.022694623947143555, 0.021915327072143553, 0.02174198341369629, 0.021668415069580078, 0.021722944259643554, 0.02154719924926758, 0.021624607086181642, 0.021571807861328125, 0.021671199798583986, 0.021797599792480468, 0.021581216812133788, 0.021498464584350587, 0.021552160263061525, 0.02159715270996094, 0.025251007080078124, 0.022124832153320312, 0.021666336059570312, 0.022163455963134765, 0.021661792755126953, 0.021749664306640625, 0.021708351135253906, 0.021504383087158203, 0.02156662368774414, 0.02167491149902344, 0.02166169548034668, 0.021595455169677733, 0.021506752014160156, 0.021604352951049805, 0.02161177635192871, 0.021422367095947265, 0.021569887161254884, 0.023205535888671875, 0.024051328659057618, 0.021455968856811523, 0.02147635269165039, 0.021354591369628906, 0.02135862350463867, 0.021444896697998046, 0.021383743286132812, 0.02134182357788086, 0.021298688888549806, 0.021344928741455077, 0.02118560028076172, 0.021019071578979493, 0.02098601531982422, 0.021227872848510743, 0.021182464599609374, 0.021093536376953125, 0.021027679443359374, 0.0210882568359375, 0.021065376281738282, 0.02111712074279785, 0.02113961601257324, 0.020969472885131835, 0.021130912780761717, 0.02146544075012207, 0.02145894432067871, 0.02142416000366211, 0.021493728637695313, 0.021257375717163084, 0.021294015884399414, 0.02149295997619629, 0.021309759140014647, 0.021386720657348632, 0.022012832641601563, 0.021407487869262696, 0.021578367233276368, 0.02151318359375, 0.02142438316345215, 0.02145916748046875, 0.021399871826171875, 0.021302879333496092, 0.021698976516723634, 0.02139664077758789, 0.021431264877319337, 0.02170662307739258, 0.021024768829345702, 0.020841728210449217, 0.020938783645629882, 0.02116067123413086, 0.020863231658935548, 0.02101740837097168, 0.020843456268310547, 0.020733951568603515, 0.020826175689697267, 0.021208736419677736, 0.021358879089355468, 0.02149692726135254, 0.021494688034057616, 0.02149737548828125, 0.021520864486694335, 0.02176736068725586, 0.02144748878479004, 0.021381120681762695, 0.021436416625976562, 0.021403648376464843, 0.021360639572143555, 0.021381120681762695, 0.02145280075073242, 0.02177145576477051, 0.021725439071655275, 0.021659679412841797, 0.02170319938659668, 0.021876735687255858, 0.021942272186279296, 0.021538816452026367, 0.021585920333862304, 0.021972768783569335, 0.021806432723999025, 0.02173632049560547, 0.021661184310913087, 0.02157119941711426, 0.02160220718383789, 0.021584863662719726, 0.021595327377319336, 0.021477888107299805, 0.02136000061035156, 0.021515199661254883, 0.021590015411376954, 0.021595359802246094, 0.022023040771484374, 0.02171446418762207, 0.02212451171875, 0.02175347137451172, 0.02158883285522461, 0.02156332778930664, 0.02167807960510254, 0.02186444854736328, 0.021609760284423827]",tokens/s,47.000844526076634,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1389.228032,1490.944,0.0,1088.421888,1083.532288,s,1,8.676767578125,8.676767578125,0.0,8.676767578125,8.676767578125,8.676767578125,8.676767578125,[8.676767578125],,kWh,4.08753520875166e-05,4.499365037285521e-06,1.2165843066000437e-05,5.754056019080256e-05,,MB,1533.063168,1608.384512,0.0,1191.182336,1170.870784,s,10,0.5576947212219239,0.05576947212219239,0.00012627627599519708,0.05575038528442383,0.05583950157165527,0.055970741844177246,0.05607573406219483,"[0.05610198211669922, 0.05571430587768555, 0.055684799194335936, 0.055790847778320315, 0.05558473587036133, 0.05581033706665039, 0.05575062561035156, 0.05572911834716797, 0.05577782440185547, 0.055750144958496096]",tokens/s,4590.324961998874,kWh,1.660689161150532e-06,1.831446087870339e-07,1.0987398310113607e-06,2.9425736009489263e-06,tokens/kWh,86998673.51404385,MB,1576.697856,1650.327552,0.0,1233.125376,1170.873344,s,10,14.597031250000002,1.459703125,0.006588272875754845,1.4567598876953125,1.4694689819335938,1.4694914489746094,1.469509422607422,"[1.4600758056640626, 1.455873779296875, 1.454390380859375, 1.4522021484375, 1.456242431640625, 1.4531844482421874, 1.45727734375, 1.4694639892578125, 1.4688070068359376, 1.469513916015625]",tokens/s,43.159460934907564,kWh,4.2395758664682403e-05,4.675883574886989e-06,1.8683998217388946e-05,6.575564045695834e-05,tokens/kWh,958092.7136013206,,s,630,14.594776052474975,0.023166311194404723,0.0004486240632147607,0.0230644006729126,0.023596992111206052,0.023773387145996094,0.025257497730255138,"[0.024798944473266603, 0.023200031280517577, 0.023218559265136718, 0.02323801612854004, 0.022800703048706055, 0.02286591911315918, 0.022877248764038086, 0.02290892791748047, 0.022789056777954102, 0.022980607986450196, 0.023439359664916993, 0.023587039947509766, 0.023303327560424806, 0.023042688369750975, 0.02375257682800293, 0.025863807678222658, 0.023387807846069336, 0.023097856521606445, 0.02284556770324707, 0.023005407333374025, 0.023310335159301757, 0.02337295913696289, 0.023077728271484375, 0.023199167251586914, 0.022959936141967775, 0.023095359802246095, 0.02305433654785156, 0.022805183410644532, 0.023105056762695312, 0.02300156784057617, 0.023195648193359376, 0.023039487838745116, 0.023093759536743166, 0.02319561576843262, 0.02283318328857422, 0.02275494384765625, 0.022929567337036133, 0.023230688095092773, 0.02320902442932129, 0.023206560134887696, 0.023161312103271485, 0.02304185676574707, 0.02323027229309082, 0.023034048080444337, 0.022851072311401367, 0.02297907257080078, 0.02303385543823242, 0.022949535369873045, 0.022974815368652344, 0.02327459144592285, 0.023208864212036134, 0.02305843162536621, 0.023144447326660156, 0.023074399948120116, 0.02318582344055176, 0.023177215576171875, 0.02303385543823242, 0.02325503921508789, 0.023254304885864257, 0.023103872299194337, 0.023177568435668944, 0.022931455612182617, 0.023040031433105467, 0.02403727912902832, 0.02325129508972168, 0.02649776077270508, 0.025060991287231445, 0.023103456497192382, 0.023025920867919922, 0.023205888748168944, 0.02326527976989746, 0.023371776580810546, 0.02305558395385742, 0.0232589111328125, 0.023566816329956056, 0.022858015060424806, 0.023216384887695313, 0.02312396812438965, 0.02306281661987305, 0.023065984725952147, 0.023189855575561524, 0.02345699119567871, 0.023013919830322267, 0.02289244842529297, 0.022913055419921877, 0.023138175964355467, 0.022979007720947266, 0.022968320846557616, 0.022841344833374022, 0.022734848022460938, 0.0229101448059082, 0.023350080490112304, 0.02329599952697754, 0.023094751358032226, 0.02318569564819336, 0.02331820869445801, 0.02298019218444824, 0.023067615509033204, 0.022898687362670898, 0.022924287796020508, 0.022876319885253907, 0.022807392120361328, 0.023021568298339845, 0.022822912216186524, 0.02275868797302246, 0.02267136001586914, 0.022827072143554686, 0.022780160903930664, 0.022721120834350586, 0.022738752365112306, 0.022746368408203124, 0.022704160690307618, 0.022835519790649413, 0.02287379264831543, 0.022923744201660157, 0.022806751251220704, 0.022927391052246095, 0.022850624084472658, 0.022883199691772462, 0.02292937660217285, 0.022904928207397462, 0.022902015686035157, 0.022757856369018555, 0.023013504028320312, 0.02315484809875488, 0.023224319458007812, 0.023949312210083007, 0.02332057571411133, 0.02326665687561035, 0.02303580856323242, 0.02297494316101074, 0.02292355155944824, 0.02286524772644043, 0.023222944259643555, 0.02316214370727539, 0.023235424041748047, 0.02357200050354004, 0.02335548782348633, 0.023277824401855468, 0.023607295989990236, 0.023127840042114257, 0.023119808197021485, 0.0231693115234375, 0.023232511520385742, 0.02329523277282715, 0.02305510330200195, 0.022982656478881838, 0.022945791244506835, 0.022916351318359375, 0.022769567489624023, 0.022748287200927735, 0.022701791763305664, 0.022733951568603517, 0.02356220817565918, 0.022673791885375976, 0.022805023193359374, 0.022726463317871093, 0.022700223922729492, 0.022800384521484376, 0.02305843162536621, 0.02308243179321289, 0.022849184036254883, 0.022854272842407226, 0.02305833625793457, 0.022996896743774413, 0.022960607528686523, 0.023422975540161133, 0.023244352340698243, 0.02370195198059082, 0.023459711074829102, 0.02329408073425293, 0.023015424728393553, 0.02355200004577637, 0.022878175735473634, 0.022865888595581054, 0.022724704742431642, 0.022791423797607423, 0.02292153549194336, 0.02287846374511719, 0.023073951721191407, 0.022967296600341795, 0.022986751556396484, 0.022840576171875, 0.024173311233520508, 0.022787328720092773, 0.022941568374633788, 0.022801279067993164, 0.02291708755493164, 0.023165056228637695, 0.023818784713745118, 0.02355423927307129, 0.02317068862915039, 0.023087263107299805, 0.023004640579223634, 0.023135040283203127, 0.023938976287841796, 0.023067935943603516, 0.023378047943115234, 0.02305094337463379, 0.022947744369506837, 0.023026880264282228, 0.022919519424438477, 0.022997568130493164, 0.02328780746459961, 0.022970367431640625, 0.022918527603149413, 0.0227774715423584, 0.023055007934570312, 0.022819168090820314, 0.02331203269958496, 0.023703008651733398, 0.023331903457641603, 0.023170879364013672, 0.023020959854125975, 0.022810911178588866, 0.022835519790649413, 0.02288787269592285, 0.02270060729980469, 0.022805599212646483, 0.023124895095825194, 0.02290073585510254, 0.022853055953979493, 0.023159360885620116, 0.023551551818847657, 0.02306662368774414, 0.02291551971435547, 0.02297216033935547, 0.022789920806884766, 0.022855648040771483, 0.02285753631591797, 0.02328646469116211, 0.023700607299804687, 0.023456640243530273, 0.023414400100708006, 0.023075199127197264, 0.023130111694335938, 0.02295427131652832, 0.02297417640686035, 0.02296531105041504, 0.022700992584228516, 0.022808576583862306, 0.022891872406005858, 0.022750879287719728, 0.022920192718505858, 0.022685344696044923, 0.022835552215576174, 0.02269968032836914, 0.022810943603515627, 0.022850976943969727, 0.022620800018310547, 0.02291004753112793, 0.023003072738647462, 0.026212352752685547, 0.02323980712890625, 0.023169919967651366, 0.02319955253601074, 0.02336387252807617, 0.02321379280090332, 0.023244863510131837, 0.02321011161804199, 0.022998464584350585, 0.023028480529785157, 0.022931039810180662, 0.0228351993560791, 0.02350102424621582, 0.022991039276123046, 0.023052160263061523, 0.022904767990112304, 0.022947359085083007, 0.022998847961425782, 0.02319148826599121, 0.02319228744506836, 0.023307775497436522, 0.023104000091552734, 0.023188575744628907, 0.023089727401733397, 0.023316991806030272, 0.023234176635742187, 0.02314201545715332, 0.0234051513671875, 0.02294169616699219, 0.02304800033569336, 0.022984895706176758, 0.022999040603637694, 0.022890592575073244, 0.02299839973449707, 0.023017599105834962, 0.02321558380126953, 0.023383136749267577, 0.023033695220947267, 0.023136255264282226, 0.023635040283203124, 0.023540639877319337, 0.02332467269897461, 0.023098880767822266, 0.02321254348754883, 0.02323583984375, 0.023126016616821288, 0.022991615295410155, 0.023035871505737306, 0.022982175827026368, 0.022788383483886718, 0.022786272048950194, 0.022789695739746093, 0.02289084815979004, 0.022933088302612304, 0.02278860855102539, 0.022810527801513672, 0.022835039138793947, 0.022782432556152345, 0.022687519073486328, 0.022730752944946288, 0.022708223342895507, 0.02265830421447754, 0.022801151275634767, 0.02349260711669922, 0.02305843162536621, 0.02309119987487793, 0.02332262420654297, 0.023369951248168944, 0.023377695083618165, 0.023027040481567382, 0.022972192764282227, 0.02321731185913086, 0.02340780830383301, 0.023274015426635743, 0.023039743423461913, 0.023001407623291014, 0.02308470344543457, 0.02325695991516113, 0.023056255340576173, 0.02295043182373047, 0.023039871215820313, 0.023171199798583984, 0.022838432312011717, 0.023316640853881836, 0.023239360809326173, 0.023549951553344727, 0.023131296157836913, 0.022909791946411132, 0.02302720069885254, 0.022854143142700196, 0.023034944534301757, 0.023131071090698244, 0.02310348892211914, 0.02297420883178711, 0.02297238349914551, 0.023142688751220702, 0.02286796760559082, 0.022829055786132812, 0.022874111175537108, 0.02285977554321289, 0.02315056037902832, 0.023242368698120117, 0.02346611213684082, 0.023148031234741212, 0.02308483123779297, 0.02316703987121582, 0.022870975494384764, 0.023109632492065428, 0.02338787269592285, 0.023197568893432618, 0.02298931121826172, 0.023106752395629884, 0.023035903930664063, 0.022839456558227538, 0.02292153549194336, 0.022832799911499023, 0.023140960693359375, 0.02288844871520996, 0.022790143966674805, 0.022951263427734375, 0.02281532859802246, 0.0227840633392334, 0.022796287536621093, 0.022756607055664062, 0.022964736938476563, 0.022661376953125, 0.023238624572753906, 0.022939136505126953, 0.022799072265625, 0.022846944808959962, 0.022899263381958006, 0.022764671325683595, 0.022659936904907228, 0.02315673637390137, 0.022937280654907227, 0.022836992263793945, 0.022807104110717773, 0.02302742385864258, 0.022896543502807617, 0.022911136627197265, 0.022868064880371092, 0.022978687286376955, 0.022833087921142577, 0.023385984420776367, 0.023349407196044922, 0.023208000183105468, 0.023427040100097656, 0.023029760360717775, 0.023301408767700194, 0.025337760925292968, 0.023984928131103516, 0.02330415916442871, 0.02362099266052246, 0.023160831451416015, 0.02312816047668457, 0.023067232131958007, 0.022785791397094725, 0.022822399139404297, 0.02270876884460449, 0.022752735137939455, 0.022838016510009766, 0.022742431640625, 0.023177791595458984, 0.023116832733154298, 0.02335628890991211, 0.023250431060791017, 0.023245439529418946, 0.023118080139160155, 0.022847232818603517, 0.022966432571411132, 0.023196544647216797, 0.022975168228149413, 0.022853408813476563, 0.0230118408203125, 0.02277356719970703, 0.022838623046875, 0.023011199951171873, 0.023362527847290038, 0.023022783279418944, 0.023317472457885742, 0.022873151779174806, 0.025615135192871095, 0.024774208068847656, 0.023026111602783204, 0.022923263549804687, 0.02276937675476074, 0.02273308753967285, 0.02273606491088867, 0.02282579231262207, 0.02344361686706543, 0.023089632034301758, 0.023006847381591797, 0.023157472610473632, 0.0230533447265625, 0.022970687866210936, 0.022905471801757813, 0.02284956741333008, 0.022822912216186524, 0.022962047576904298, 0.02281484794616699, 0.02304819107055664, 0.023228416442871092, 0.023017471313476562, 0.022947744369506837, 0.023009376525878908, 0.02368873596191406, 0.023402080535888672, 0.02342585563659668, 0.023456928253173828, 0.023540224075317383, 0.023607551574707033, 0.02385513687133789, 0.02383785629272461, 0.023731168746948243, 0.023526655197143555, 0.023572992324829102, 0.02369152069091797, 0.023909759521484376, 0.023646175384521486, 0.023531904220581056, 0.023576160430908204, 0.024877567291259766, 0.023711936950683594, 0.023749792098999023, 0.023489376068115235, 0.023461631774902344, 0.023767295837402343, 0.023526559829711913, 0.023419328689575195, 0.023325088500976563, 0.023508224487304687, 0.023257856369018556, 0.025912864685058594, 0.024379007339477538, 0.023165695190429686, 0.02294361686706543, 0.022929439544677733, 0.02280188751220703, 0.022864608764648436, 0.022880287170410157, 0.022753248214721678, 0.02267465591430664, 0.023076736450195312, 0.023185855865478517, 0.022919647216796873, 0.022742271423339844, 0.022699968338012695, 0.02278873634338379, 0.022942079544067382, 0.023043840408325196, 0.022687807083129882, 0.02344550323486328, 0.02345779228210449, 0.022980224609375, 0.02307315254211426, 0.02324025535583496, 0.022946239471435547, 0.022863359451293946, 0.022673919677734376, 0.022791423797607423, 0.023156768798828126, 0.023463743209838867, 0.023296928405761717, 0.023197504043579103, 0.02301136016845703, 0.02352924728393555, 0.022868192672729493, 0.022951135635375975, 0.023147455215454103, 0.023099136352539063, 0.023179519653320314, 0.023076608657836915, 0.023103263854980467, 0.02315839958190918, 0.02327199935913086, 0.023523807525634766, 0.023365312576293946, 0.023975711822509765, 0.023585119247436524, 0.02370969581604004, 0.023818304061889648, 0.023706687927246093, 0.023772031784057616, 0.02353971290588379, 0.023693279266357423, 0.023660703659057616, 0.023605119705200194, 0.023584640502929688, 0.02352060890197754, 0.024007455825805664, 0.023793024063110353, 0.023736959457397462, 0.02345350456237793, 0.02348217582702637, 0.023613088607788085, 0.02366111946105957, 0.02373436737060547, 0.023717952728271485, 0.02367692756652832, 0.02333286476135254, 0.023256895065307617, 0.023087295532226562, 0.022961759567260744, 0.023042688369750975, 0.02329782485961914, 0.023162879943847657, 0.023007232666015624, 0.023036928176879884, 0.022928384780883788, 0.02308710479736328, 0.023187456130981447, 0.022937599182128905, 0.0228351993560791, 0.023068672180175782, 0.02289779281616211, 0.023306432723999022, 0.023335168838500977, 0.02337798309326172, 0.022905120849609373, 0.02294988822937012, 0.022802335739135742, 0.02288163185119629, 0.023223039627075195, 0.023195648193359376, 0.02318035125732422, 0.02299180793762207, 0.023014591217041015, 0.02290265655517578, 0.022840255737304686, 0.02344960021972656, 0.022716415405273437, 0.022892000198364258, 0.022788639068603515, 0.022949151992797852, 0.022786239624023437, 0.02275382423400879, 0.022760799407958984, 0.023392927169799804, 0.022937599182128905, 0.022863552093505858, 0.02291878318786621, 0.023042240142822266, 0.023104000091552734, 0.023314016342163086, 0.02410691261291504, 0.023147008895874024, 0.024465408325195313, 0.022943744659423827, 0.02288435173034668, 0.022887584686279296, 0.022993696212768554, 0.02336892890930176, 0.023450464248657227, 0.023168159484863282, 0.023100255966186523, 0.023282688140869142, 0.023194623947143556, 0.023564287185668945, 0.02345369529724121, 0.023631040573120116, 0.02387353515625, 0.026370143890380858, 0.023728864669799805, 0.023537664413452147, 0.023797760009765623, 0.02355596733093262, 0.02355830383300781, 0.023678943634033202, 0.023584768295288085, 0.023774496078491213, 0.023596960067749022, 0.023575359344482422, 0.024271968841552735, 0.023790271759033203, 0.023597280502319337, 0.02323983955383301, 0.023341760635375977, 0.02314371109008789]",tokens/s,43.166129972454414,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1862.438912,2872.967168,0.0,2470.445056,2443.388928,s,1,9.111041015625,9.111041015625,0.0,9.111041015625,9.111041015625,9.111041015625,9.111041015625,[9.111041015625],,kWh,5.6699869887495424e-05,6.247125367991756e-06,1.748945843599997e-05,8.043645369148716e-05,,MB,1983.102976,3885.891584,0.0,3466.592256,3034.868224,s,10,3.7734732971191405,0.37734732971191415,0.003110385812695085,0.37657823181152345,0.3817689453125,0.3830650299072266,0.38410189758300783,"[0.38436111450195315, 0.3814809265136719, 0.3775335388183594, 0.37390692138671877, 0.37773953247070313, 0.37625946044921876, 0.3744965515136719, 0.37674993896484377, 0.3764065246582031, 0.3745387878417969]",tokens/s,678.4200651305608,kWh,1.1119264119444756e-05,1.226266084037464e-06,5.043183046889217e-06,1.7388713250371437e-05,tokens/kWh,14722193.431680841,MB,1994.784768,3887.988736,0.0,3468.689408,3034.870784,s,10,23.95289892578125,2.395289892578125,0.00949001471887643,2.393815673828125,2.4101381591796875,2.4111927368164063,2.4120363989257814,"[2.412247314453125, 2.39185595703125, 2.38252392578125, 2.3971259765625, 2.395227294921875, 2.38754541015625, 2.3839140625, 2.40990380859375, 2.400151123046875, 2.392404052734375]",tokens/s,26.30161810276381,kWh,6.959059728055282e-05,7.67590390887673e-06,3.5554571653510036e-05,0.00011282107284293956,tokens/kWh,558406.3190721784,,s,630,23.95116535949707,0.038017722792852496,0.0004639133224404029,0.03790336036682129,0.038494405364990235,0.03871037425994873,0.039763014221191405,"[0.03909836959838867, 0.03877283096313477, 0.03867023849487305, 0.03872153472900391, 0.03889766311645508, 0.03853107070922852, 0.03847516632080078, 0.038292064666748046, 0.03859571075439453, 0.0384436149597168, 0.0386952018737793, 0.03853311920166016, 0.03857340621948242, 0.03849683380126953, 0.03851663970947266, 0.03847967910766602, 0.038383808135986325, 0.03835718536376953, 0.038400001525878906, 0.03839148712158203, 0.03845347213745117, 0.03838556671142578, 0.038534400939941406, 0.03880454254150391, 0.038266399383544925, 0.03817001724243164, 0.03795449447631836, 0.037926910400390625, 0.038176769256591796, 0.03811532974243164, 0.03808870315551758, 0.037935134887695315, 0.03796169662475586, 0.038356990814208985, 0.03798233413696289, 0.03781564712524414, 0.0380175666809082, 0.03842374420166016, 0.03860268783569336, 0.037989246368408204, 0.03802262496948242, 0.037798431396484374, 0.03775241470336914, 0.0376646728515625, 0.03775539016723633, 0.03792486572265625, 0.0377262077331543, 0.03769548797607422, 0.03769071960449219, 0.03778819274902344, 0.03775910568237305, 0.03762716674804688, 0.03748259353637695, 0.037646976470947266, 0.03789004898071289, 0.03811072158813476, 0.04267264175415039, 0.0384059829711914, 0.038192768096923825, 0.0382176628112793, 0.03829542541503906, 0.03824303817749024, 0.03841024017333984, 0.03899795150756836, 0.03827312088012695, 0.03849417495727539, 0.03856588745117188, 0.038190975189208984, 0.03807356643676758, 0.03831027221679688, 0.03819164657592773, 0.03812351989746094, 0.03829721450805664, 0.03824678421020508, 0.03833760070800781, 0.03942086410522461, 0.03862851333618164, 0.03807331085205078, 0.037770465850830076, 0.037720191955566404, 0.038255233764648434, 0.03775382232666016, 0.037843265533447266, 0.037777919769287106, 0.03770521545410156, 0.037671390533447265, 0.037626014709472654, 0.03801824188232422, 0.03766672134399414, 0.037644256591796876, 0.03759814453125, 0.03755129623413086, 0.03774540710449219, 0.037625919342041014, 0.037719871520996096, 0.03821136093139649, 0.038058399200439456, 0.03793859100341797, 0.037669502258300784, 0.037766273498535154, 0.03807648086547852, 0.03782326507568359, 0.037672256469726564, 0.037757568359375, 0.03754332733154297, 0.03764905548095703, 0.038193153381347655, 0.038384769439697264, 0.03892444610595703, 0.038347488403320314, 0.037904384613037106, 0.03771129608154297, 0.03764486312866211, 0.03767500686645508, 0.03795548629760742, 0.037652576446533206, 0.03768870544433594, 0.037668895721435544, 0.037859935760498044, 0.03779993438720703, 0.03776038360595703, 0.037672737121582034, 0.0376833610534668, 0.03764828872680664, 0.03769222259521485, 0.03773865509033203, 0.03823977661132812, 0.03785065460205078, 0.0378106575012207, 0.037652286529541015, 0.03771836853027344, 0.037765121459960936, 0.03773062515258789, 0.03775078582763672, 0.037623809814453124, 0.03761971282958984, 0.03765657424926758, 0.03761094284057617, 0.03752374267578125, 0.03750940704345703, 0.03776710510253906, 0.037865345001220706, 0.03757894515991211, 0.037645408630371094, 0.037905216217041016, 0.03781846237182617, 0.0377446403503418, 0.03767705535888672, 0.03758886337280273, 0.037674560546875, 0.03769187164306641, 0.037756801605224606, 0.03773443222045898, 0.03764620971679687, 0.03767123031616211, 0.037684703826904295, 0.037836704254150394, 0.037800575256347654, 0.039008255004882815, 0.039015838623046875, 0.03797622299194336, 0.037730751037597654, 0.0377077751159668, 0.037829696655273436, 0.03766774368286133, 0.03757590484619141, 0.03770771026611328, 0.037994400024414066, 0.037671424865722655, 0.03766320037841797, 0.0376995849609375, 0.03808051300048828, 0.03810617446899414, 0.0376800308227539, 0.037752864837646484, 0.03811459350585938, 0.037806526184082034, 0.03802755355834961, 0.037978111267089845, 0.03778355026245117, 0.03790028762817383, 0.03774784088134766, 0.03768384170532227, 0.03789593505859375, 0.03799296188354492, 0.03790166473388672, 0.037860000610351566, 0.037730304718017575, 0.03792281723022461, 0.03857401657104492, 0.03810342407226563, 0.03778905487060547, 0.03788422393798828, 0.03787990570068359, 0.038496479034423825, 0.03776716613769531, 0.04038387298583984, 0.037988990783691404, 0.03784470367431641, 0.039015872955322266, 0.03796806335449219, 0.03796035385131836, 0.03784223937988281, 0.037685920715332034, 0.03760294342041016, 0.037814014434814455, 0.03806447982788086, 0.04025145721435547, 0.038093055725097656, 0.03807027053833008, 0.03781785583496094, 0.038107040405273435, 0.037787647247314454, 0.03792867279052734, 0.03816742324829102, 0.037804031372070314, 0.03778083038330078, 0.037956256866455075, 0.03791452789306641, 0.03753993606567383, 0.038029312133789066, 0.03824425506591797, 0.037754657745361325, 0.04090502548217773, 0.03791667175292969, 0.03799619293212891, 0.03810543823242187, 0.03791430282592773, 0.03776953506469727, 0.03763558578491211, 0.037669376373291014, 0.037580799102783204, 0.03746428680419922, 0.03783603286743164, 0.037699199676513674, 0.03856841659545898, 0.03762015914916992, 0.03776095962524414, 0.037727710723876956, 0.03771011352539062, 0.0379128303527832, 0.037856990814208985, 0.03773628616333008, 0.03780185699462891, 0.0381091194152832, 0.037857952117919924, 0.03785871887207031, 0.03821324920654297, 0.03799692916870117, 0.037917312622070314, 0.037948894500732425, 0.03796022415161133, 0.038575393676757816, 0.03815292739868164, 0.038055519104003906, 0.03791030502319336, 0.038333057403564456, 0.03808636856079101, 0.03798659133911133, 0.038566078186035156, 0.03782841491699219, 0.037937152862548826, 0.037811904907226565, 0.03776953506469727, 0.03815423965454102, 0.03790777587890625, 0.03779654312133789, 0.03787366485595703, 0.03813580703735352, 0.038110240936279294, 0.03795414352416992, 0.03788735961914062, 0.03789270401000976, 0.037865886688232424, 0.038034847259521484, 0.03779235076904297, 0.03782374572753906, 0.038572864532470705, 0.03808975982666016, 0.037956512451171875, 0.040005630493164065, 0.037969345092773436, 0.037890625, 0.03785302352905273, 0.03784716796875, 0.03771804809570312, 0.037720062255859374, 0.037787647247314454, 0.03804569625854492, 0.03836108779907227, 0.03830988693237305, 0.038027263641357424, 0.03799859237670898, 0.03801702499389648, 0.038100990295410156, 0.03832646560668945, 0.03798748779296875, 0.03799516677856445, 0.038508544921875, 0.037869438171386716, 0.038221759796142576, 0.038029502868652344, 0.037881919860839844, 0.037797279357910156, 0.0377943344116211, 0.037985790252685545, 0.03780863952636719, 0.037910526275634765, 0.03769331359863281, 0.038279296875, 0.03786924743652344, 0.037757152557373046, 0.037605152130126954, 0.037668479919433596, 0.03756307220458984, 0.03835670471191406, 0.037900577545166014, 0.03794124984741211, 0.03773785781860352, 0.037595775604248045, 0.03781836700439453, 0.03840201568603516, 0.03761673736572266, 0.037671871185302734, 0.03764633560180664, 0.03765043258666992, 0.03765593719482422, 0.03778351974487305, 0.03777542495727539, 0.037687904357910154, 0.037622974395751956, 0.037665599822998046, 0.039532001495361326, 0.038607040405273435, 0.0381115837097168, 0.037748737335205076, 0.037945343017578126, 0.03845529556274414, 0.03807436752319336, 0.03783996963500977, 0.03767737579345703, 0.037899040222167966, 0.03761663818359375, 0.03763820648193359, 0.03773926544189453, 0.037613121032714844, 0.03776761627197266, 0.037959678649902344, 0.03797148895263672, 0.038175201416015624, 0.03780819320678711, 0.03769286346435547, 0.038441184997558595, 0.03788803100585938, 0.03792095947265625, 0.03789625549316406, 0.03767881774902344, 0.03749203109741211, 0.0376363525390625, 0.03793094253540039, 0.03813824081420898, 0.03792323303222656, 0.037883712768554685, 0.03773193740844726, 0.03780873489379883, 0.03820544052124023, 0.0376627197265625, 0.037718017578125, 0.03803862380981445, 0.037689952850341796, 0.03804947280883789, 0.037819007873535156, 0.03779315185546875, 0.03839049530029297, 0.0380968017578125, 0.03768441772460938, 0.03775283050537109, 0.03771065521240234, 0.038619743347167966, 0.037773311614990236, 0.03786083221435547, 0.03783737564086914, 0.03924700927734375, 0.0391030387878418, 0.03810665512084961, 0.037830753326416014, 0.03778214263916015, 0.037604446411132815, 0.03758172988891602, 0.03772415924072266, 0.03785103988647461, 0.03765871810913086, 0.037591041564941405, 0.03760697555541992, 0.03764476776123047, 0.037953502655029295, 0.03768867111206055, 0.03777552032470703, 0.03780425643920898, 0.03763843154907227, 0.037623809814453124, 0.03768729782104492, 0.037689567565917965, 0.0377968635559082, 0.03766115188598633, 0.037910430908203126, 0.037871231079101564, 0.03788819122314453, 0.03790703964233398, 0.03791836929321289, 0.03778201675415039, 0.0379279670715332, 0.03771884918212891, 0.037634048461914066, 0.03770982360839844, 0.03756777572631836, 0.037700126647949216, 0.037789886474609374, 0.03772115325927734, 0.0377784309387207, 0.03764985656738281, 0.03788236618041992, 0.03776921463012695, 0.037631999969482424, 0.037719905853271486, 0.03775094223022461, 0.0376146240234375, 0.03844156646728516, 0.03765856170654297, 0.03767862319946289, 0.03764726257324219, 0.037720062255859374, 0.03797747039794922, 0.03791689682006836, 0.037709632873535154, 0.037726974487304686, 0.03780508804321289, 0.03777414321899414, 0.03784089660644531, 0.037641502380371096, 0.03860867309570312, 0.038316513061523436, 0.03805174255371094, 0.03791596984863281, 0.03789849472045898, 0.037926559448242185, 0.03792371368408203, 0.03974524688720703, 0.038123809814453125, 0.038711296081542966, 0.03849356842041016, 0.03840473556518555, 0.03816185760498047, 0.03806233596801758, 0.03840201568603516, 0.03817916870117188, 0.03815628814697265, 0.038229343414306644, 0.03815900802612305, 0.03820544052124023, 0.0383631362915039, 0.0381003532409668, 0.03801561737060547, 0.038307838439941407, 0.0381952018737793, 0.038141952514648435, 0.03799244689941406, 0.038012928009033206, 0.038029312133789066, 0.03816633605957031, 0.03816825485229492, 0.03826726531982422, 0.03842438507080078, 0.038182239532470706, 0.03810812759399414, 0.038346752166748044, 0.03839740753173828, 0.03879276657104492, 0.03816751861572266, 0.037932735443115234, 0.0384126091003418, 0.037951488494873044, 0.03787571334838867, 0.03829081726074219, 0.038093441009521486, 0.0378263053894043, 0.038252609252929684, 0.03782880020141602, 0.03823820877075195, 0.03822796630859375, 0.038938304901123044, 0.038254913330078126, 0.03829715347290039, 0.03831033706665039, 0.03825068664550781, 0.03820291137695313, 0.03822419357299805, 0.038356895446777346, 0.03811280059814453, 0.03824079895019531, 0.03861094284057617, 0.03895113754272461, 0.03837724685668945, 0.0383917121887207, 0.04127699279785156, 0.03900073623657226, 0.0382050895690918, 0.03848409652709961, 0.037996543884277346, 0.03781568145751953, 0.03762239837646485, 0.03756032180786133, 0.03763132858276367, 0.037660640716552736, 0.03748044967651367, 0.03757932662963867, 0.0378504638671875, 0.03779587173461914, 0.037961471557617185, 0.03782099151611328, 0.03760543823242188, 0.038209632873535154, 0.037720352172851565, 0.03788595199584961, 0.03776102447509765, 0.037644287109375, 0.037746688842773435, 0.03767097473144531, 0.037636032104492186, 0.037733409881591795, 0.037645278930664064, 0.03749023818969727, 0.0375239028930664, 0.03805990219116211, 0.03757459259033203, 0.03776435089111328, 0.03764688110351563, 0.038031776428222655, 0.037883838653564456, 0.038157855987548825, 0.038994110107421875, 0.038813056945800783, 0.03819375991821289, 0.03801919937133789, 0.03846783828735351, 0.038399391174316407, 0.038379295349121094, 0.03870803070068359, 0.03843881607055664, 0.03862895965576172, 0.03835456085205078, 0.03852102279663086, 0.03857068634033203, 0.038442367553710936, 0.0383166732788086, 0.03851830291748047, 0.038351295471191406, 0.03837545776367188, 0.03820729446411133, 0.03803564834594726, 0.037842529296875, 0.037671329498291016, 0.03787273788452149, 0.03782867050170898, 0.03765526580810547, 0.03829056167602539, 0.03893289566040039, 0.038504318237304686, 0.03795529556274414, 0.03804748916625977, 0.03792758560180664, 0.03808665466308594, 0.03792281723022461, 0.03870924758911133, 0.03777241516113281, 0.038203582763671876, 0.03788256072998047, 0.03800678253173828, 0.03797401428222656, 0.03842982482910156, 0.03791347122192383, 0.03775283050537109, 0.03816447830200195, 0.03788937759399414, 0.03797395324707031, 0.03837820816040039, 0.03837542343139649, 0.03875132751464844, 0.038244544982910154, 0.03799443054199219, 0.03783555221557617, 0.038456863403320315, 0.037881664276123043, 0.03812828826904297, 0.03764374542236328, 0.037808673858642575, 0.03750707244873047, 0.03790233612060547, 0.03856793594360351, 0.03826892852783203, 0.03747840118408203, 0.037533695220947266, 0.03779388809204102, 0.03765852737426758, 0.037493793487548825, 0.03752767944335938, 0.03769225692749024, 0.037562305450439454, 0.03778886413574219, 0.03754070281982422, 0.037539871215820315, 0.03762364959716797, 0.03775529479980469, 0.037969825744628906, 0.037967041015625, 0.03896985626220703, 0.03977027130126953, 0.03813792037963867, 0.038012863159179684, 0.038069889068603514, 0.03787737655639648, 0.037958400726318356, 0.03765043258666992, 0.03760947036743164, 0.0377059211730957, 0.03764771270751953, 0.037695423126220706, 0.03767350387573242, 0.03794704055786133, 0.0377245101928711]",tokens/s,26.303521792946643,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3728.146432,4375.642112,0.0,3997.171712,3878.257152,s,1,10.0629853515625,10.0629853515625,0.0,10.0629853515625,10.0629853515625,10.0629853515625,10.0629853515625,[10.0629853515625],,kWh,9.067295107500967e-05,9.994629891378651e-06,2.7987522389999464e-05,0.00012865510335638777,,MB,1964.519424,4862.181376,0.0,4454.350848,4371.844096,s,10,6.671113952636719,0.6671113952636719,0.0009954266792010686,0.6670467224121094,0.6679490539550781,0.6684646697998047,0.6688771624755859,"[0.66518603515625, 0.6659949951171875, 0.66783447265625, 0.6668709716796875, 0.6672155151367187, 0.6689802856445313, 0.6677422485351563, 0.6668779296875, 0.666758056640625, 0.6676534423828125]",tokens/s,383.7440070991704,kWh,1.9423147584895633e-05,2.1412516791791457e-06,1.2930131871874938e-05,3.4494531135949714e-05,tokens/kWh,7421466.2895707665,MB,1959.440384,5063.507968,0.0,4655.67744,4530.328576,s,10,392.1630078125,39.21630078125,0.020344922816255443,39.211791015624996,39.23774375,39.25023125,39.26022125,"[39.19016015625, 39.1965625, 39.20012890625, 39.20765234375, 39.26271875, 39.23496875, 39.22264453125, 39.2159296875, 39.2257734375, 39.20646875]",tokens/s,1.606474826664972,kWh,0.0011421220202313548,0.00012598509379877328,0.0007595156805287254,0.0020276227945588534,tokens/kWh,31070.86789962174,,s,630,392.15904455566374,0.6224746738978795,0.0008655115987336269,0.6223991088867188,0.6236659790039062,0.6239581451416015,0.6245922937011719,"[0.6200103759765625, 0.6197701416015625, 0.6201177368164063, 0.6205819091796875, 0.6210646362304687, 0.6203760986328125, 0.6204375610351562, 0.6206837158203125, 0.6204436645507813, 0.6202265014648437, 0.6222623901367188, 0.621433837890625, 0.6215280151367187, 0.62188134765625, 0.62137060546875, 0.6221484985351563, 0.62211083984375, 0.621301513671875, 0.621649658203125, 0.6213512573242187, 0.62057861328125, 0.6210986328125, 0.6208408813476562, 0.6229445190429688, 0.6221722412109375, 0.6218251953125, 0.6227607421875, 0.623111572265625, 0.6230137939453125, 0.6239930419921875, 0.6230023193359375, 0.6225838012695313, 0.622202880859375, 0.6215720825195312, 0.6214387817382813, 0.6213839111328125, 0.6214364013671875, 0.6218626708984375, 0.622228271484375, 0.62367333984375, 0.6235811767578125, 0.6236504516601562, 0.6236121215820313, 0.6237943115234375, 0.6235645141601562, 0.623310791015625, 0.6226824340820313, 0.6235648193359375, 0.6227107543945313, 0.6225997924804687, 0.6235303955078125, 0.622992919921875, 0.622903564453125, 0.6229484252929688, 0.6223258056640625, 0.623763427734375, 0.6220636596679687, 0.6224998168945313, 0.6232493896484375, 0.6211295776367187, 0.6223375244140625, 0.621364990234375, 0.6211470336914062, 0.6223689575195313, 0.622508056640625, 0.6228316040039062, 0.6213529663085937, 0.6221004638671875, 0.6214983520507813, 0.6214381713867188, 0.6226903686523437, 0.6209523315429688, 0.6207808227539062, 0.6214124145507812, 0.620971923828125, 0.6218842163085937, 0.6220738525390626, 0.621828125, 0.621791259765625, 0.6213467407226563, 0.6219163818359374, 0.6220556030273438, 0.6224869384765624, 0.6239113159179688, 0.6213734130859375, 0.6217617797851562, 0.6224534912109375, 0.6221947021484375, 0.6215208740234375, 0.6229728393554688, 0.622939697265625, 0.6218276977539062, 0.6214949340820313, 0.6219102783203125, 0.6232936401367187, 0.6222221069335937, 0.6208645629882813, 0.6215435791015625, 0.6210117797851562, 0.6210989379882812, 0.6213345336914062, 0.6213837890625, 0.6225961303710937, 0.62291552734375, 0.6242017211914063, 0.6238248901367187, 0.6230337524414062, 0.6229817504882813, 0.6226165771484375, 0.62198291015625, 0.6223634643554687, 0.6218589477539063, 0.6218522338867187, 0.621098876953125, 0.622270263671875, 0.6222772216796875, 0.6219161376953125, 0.621995849609375, 0.621876953125, 0.6222908935546875, 0.6215070190429688, 0.623447265625, 0.6230023803710938, 0.6239747314453125, 0.6237037353515625, 0.6231859130859375, 0.6228176879882813, 0.6215496826171875, 0.6214759521484375, 0.6214410400390625, 0.6216576538085937, 0.6214496459960938, 0.6215905151367187, 0.6229995727539063, 0.6222479248046875, 0.621897705078125, 0.6223482666015625, 0.62142578125, 0.6226810302734375, 0.6223095092773437, 0.6218217163085937, 0.6223750610351563, 0.6216907958984375, 0.62226220703125, 0.6223396606445313, 0.6222217407226562, 0.6226906127929688, 0.6216229858398438, 0.6219797973632812, 0.622447998046875, 0.6220122680664063, 0.6219999389648437, 0.6224759521484375, 0.6215211181640625, 0.6214738159179688, 0.6216309204101562, 0.6225701293945313, 0.6222386474609375, 0.6228568115234375, 0.6218812866210938, 0.6217998657226562, 0.6232882080078125, 0.6228809204101563, 0.6232711181640626, 0.6232659301757812, 0.6227479248046875, 0.6222993774414063, 0.6229536743164062, 0.622607666015625, 0.6231890869140625, 0.623139404296875, 0.6220667114257813, 0.6217567749023437, 0.6218615112304687, 0.6213767700195313, 0.6219066162109375, 0.621654052734375, 0.6217686767578126, 0.6228472900390625, 0.62200439453125, 0.6217765502929687, 0.6235244750976563, 0.622743896484375, 0.621717529296875, 0.62225, 0.6226636962890625, 0.6220015258789062, 0.6222670288085937, 0.62209228515625, 0.62174755859375, 0.621290283203125, 0.6210610961914063, 0.6216672973632813, 0.6211151123046875, 0.62069580078125, 0.6213017578125, 0.6216365966796875, 0.6210303955078125, 0.6210828247070312, 0.6210349731445313, 0.6217708129882813, 0.6215928955078125, 0.62160693359375, 0.6215065307617188, 0.621294921875, 0.6211319580078125, 0.6208429565429687, 0.6212675170898437, 0.62142626953125, 0.6217486572265625, 0.6214430541992187, 0.6210413818359375, 0.6219266967773438, 0.622487548828125, 0.6214102783203125, 0.6214717407226562, 0.622182373046875, 0.6214000854492188, 0.6219857788085937, 0.6214390258789062, 0.6237880249023438, 0.6231531372070312, 0.6232815551757812, 0.6228436279296875, 0.62311083984375, 0.6232412719726562, 0.6218281860351562, 0.62165576171875, 0.62274609375, 0.6230274658203125, 0.6233749389648438, 0.6227387084960937, 0.62357373046875, 0.6229483642578125, 0.6236651611328125, 0.6231387939453125, 0.6232576293945312, 0.6238064575195312, 0.6240092163085937, 0.623517822265625, 0.6245886840820313, 0.6245232543945313, 0.6244086303710937, 0.6249613647460938, 0.623146484375, 0.6227442626953125, 0.6224465942382813, 0.6225262451171875, 0.6226920776367187, 0.6234403686523438, 0.6223196411132812, 0.62211279296875, 0.621302978515625, 0.6227747802734375, 0.6231701049804688, 0.6235216064453125, 0.6242981567382813, 0.6236590576171875, 0.6236607055664063, 0.624884033203125, 0.6239848022460938, 0.62408642578125, 0.6232537841796875, 0.6225061645507812, 0.6219195556640625, 0.6220023803710938, 0.622193115234375, 0.6218832397460937, 0.622002197265625, 0.6220484619140625, 0.621796630859375, 0.6219608154296875, 0.6227579345703125, 0.6230382690429688, 0.6230971069335938, 0.6237848510742188, 0.6233681640625, 0.6225735473632813, 0.6229154052734375, 0.6248265380859375, 0.6229381103515625, 0.6229155883789063, 0.622581787109375, 0.6224097290039062, 0.6226590576171875, 0.6229775390625, 0.6237630004882813, 0.6239482421875, 0.6238617553710938, 0.6235765991210938, 0.6235120849609375, 0.623795654296875, 0.6234913330078125, 0.622713134765625, 0.6221495971679688, 0.6220308227539062, 0.6224120483398438, 0.6234224853515625, 0.6228380126953125, 0.6244541625976563, 0.6237429809570313, 0.6242688598632813, 0.6238717651367187, 0.623831787109375, 0.6240173950195312, 0.6237614135742188, 0.6245928955078125, 0.6245908203125, 0.62287255859375, 0.6223831176757812, 0.6224219970703125, 0.6238592529296875, 0.623743408203125, 0.624645263671875, 0.6239662475585938, 0.622827392578125, 0.6229811401367188, 0.6220188598632812, 0.6238758544921875, 0.6241565551757813, 0.6236318969726562, 0.6231119995117187, 0.6219984741210938, 0.622020751953125, 0.6231390991210938, 0.6225299682617188, 0.6230388793945313, 0.6222376708984375, 0.6226309204101562, 0.6222622680664063, 0.6220226440429687, 0.6226022338867188, 0.6219960327148437, 0.6217298583984375, 0.6219807739257812, 0.6221729736328125, 0.6236580200195313, 0.6226025390625, 0.6238562622070313, 0.6229373779296875, 0.6232030639648437, 0.6226739501953125, 0.6230437622070313, 0.6224904174804687, 0.621959228515625, 0.6215430908203124, 0.6218031005859375, 0.62206005859375, 0.6217870483398438, 0.6218916015625, 0.6230121459960938, 0.6222722778320312, 0.6226165161132813, 0.6225267944335937, 0.6229401245117188, 0.6235381469726563, 0.623805908203125, 0.6235285034179687, 0.6229237060546875, 0.6232515258789062, 0.623276123046875, 0.6229988403320312, 0.6240543212890625, 0.6237988891601562, 0.6229699096679687, 0.622371826171875, 0.62280908203125, 0.6232158203125, 0.6228017578125, 0.6229827270507813, 0.62249560546875, 0.6228301391601563, 0.6217666625976562, 0.6224403076171875, 0.6227353515625, 0.6230809326171876, 0.6235040893554687, 0.623515625, 0.623261962890625, 0.6221266479492188, 0.6223284912109375, 0.6214202880859375, 0.6217576904296875, 0.6218575439453125, 0.6223890991210937, 0.6238516235351562, 0.623969482421875, 0.6214234008789062, 0.6214876098632812, 0.621227783203125, 0.6215134887695313, 0.6225276489257813, 0.6228345336914063, 0.6229075317382813, 0.6235851440429687, 0.6217216186523438, 0.6213507080078124, 0.6221498413085937, 0.6238248901367187, 0.623427734375, 0.6234868774414063, 0.6225546264648437, 0.6222705688476563, 0.6223138427734375, 0.6221107177734375, 0.623091796875, 0.62264111328125, 0.6221414184570313, 0.6228643798828125, 0.6230693969726563, 0.6235441284179688, 0.622635009765625, 0.6219710083007812, 0.62380078125, 0.62267626953125, 0.6227508544921875, 0.6230059814453125, 0.621990234375, 0.624089111328125, 0.6222274780273438, 0.6224034423828125, 0.6229784545898438, 0.6222505493164062, 0.622751953125, 0.62209228515625, 0.6220062866210937, 0.6222725219726563, 0.6221803588867187, 0.6223031005859375, 0.6222578125, 0.6225883178710937, 0.6229033813476562, 0.6239375610351563, 0.6232567138671875, 0.6232687377929688, 0.6230304565429687, 0.62258154296875, 0.6221129760742188, 0.6221259765625, 0.6225110473632812, 0.6222695922851562, 0.6221068115234375, 0.6221582641601563, 0.6215231323242187, 0.6219796752929687, 0.62226025390625, 0.62123828125, 0.6218137817382813, 0.6220238037109375, 0.622521240234375, 0.623515625, 0.6222418212890625, 0.6231427001953125, 0.622386474609375, 0.6224884643554688, 0.6225634765625, 0.6221185913085937, 0.6217791137695312, 0.621676513671875, 0.6228825073242188, 0.6224960327148438, 0.6222042846679687, 0.6219224853515625, 0.622136962890625, 0.6224452514648438, 0.6224302368164063, 0.622475341796875, 0.6222970581054688, 0.6220874633789063, 0.6237088623046875, 0.6230947875976562, 0.62299462890625, 0.622970703125, 0.6222347412109375, 0.6225335083007812, 0.62200830078125, 0.6229933471679687, 0.6223155517578125, 0.6216693725585938, 0.6226721801757813, 0.6223756713867188, 0.622839599609375, 0.6227332763671874, 0.6231636962890625, 0.623339111328125, 0.6221480102539062, 0.6229912719726562, 0.6224384765625, 0.6230159301757813, 0.622149658203125, 0.6223973999023438, 0.6233170776367187, 0.6224008178710938, 0.6225232543945313, 0.623578857421875, 0.6218077392578125, 0.6247218627929687, 0.6235191650390625, 0.6220989990234375, 0.6221145629882813, 0.6218303833007812, 0.6222553100585938, 0.622135498046875, 0.6215460815429688, 0.6213087158203126, 0.622620849609375, 0.6242772827148437, 0.6232037963867187, 0.6247526245117188, 0.6224877319335937, 0.6222564697265625, 0.6226760864257812, 0.6223031616210938, 0.6220322265625, 0.6223919067382813, 0.6222521362304687, 0.6217685546875, 0.6215347900390625, 0.6217870483398438, 0.6225313720703125, 0.621974853515625, 0.6225928344726562, 0.6223268432617187, 0.6222604370117187, 0.6231044921875, 0.6236356201171875, 0.6232913208007812, 0.6224424438476562, 0.622376953125, 0.6224302368164063, 0.6221639404296875, 0.622761962890625, 0.6221732788085937, 0.6236161499023437, 0.6223853759765625, 0.6218573608398438, 0.62199169921875, 0.6220431518554688, 0.621332763671875, 0.622243896484375, 0.62230322265625, 0.6227742919921875, 0.6223658447265625, 0.6220337524414062, 0.6244816284179687, 0.6226843872070312, 0.6229847412109375, 0.6227628784179687, 0.62247900390625, 0.6219923095703125, 0.6221475830078125, 0.6218950805664063, 0.622811279296875, 0.6216702880859375, 0.6225107421875, 0.6226431274414063, 0.6224752807617188, 0.6241111450195312, 0.6234976806640625, 0.6229844970703124, 0.6231921997070312, 0.6222235107421875, 0.6225997924804687, 0.623913818359375, 0.62384912109375, 0.6238252563476563, 0.6229421997070312, 0.621756591796875, 0.6219512939453125, 0.6219854736328125, 0.6218219604492188, 0.622545166015625, 0.621743896484375, 0.6217643432617187, 0.6226516723632812, 0.622095947265625, 0.622987060546875, 0.6225020141601563, 0.622263916015625, 0.6228427734375, 0.6223524169921875, 0.6226841430664063, 0.623431884765625, 0.6231898193359375, 0.622972900390625, 0.62239111328125, 0.621619384765625, 0.6229232788085938, 0.6225082397460937, 0.6230407104492187, 0.6244061889648438, 0.622408447265625, 0.6222244873046875, 0.6240897216796875, 0.6229627075195312, 0.6230916748046875, 0.6225797119140625, 0.6231428833007813, 0.6217769165039062, 0.6236446533203125, 0.6240706787109375, 0.6222821044921875, 0.6225885009765625, 0.6225654296875, 0.6230643920898438, 0.62136767578125, 0.62153369140625, 0.6229584350585937, 0.6218322143554688, 0.6215249633789063, 0.6218395385742187, 0.6217871704101563, 0.6217344970703125, 0.6213716430664062, 0.621669677734375, 0.6217459716796875, 0.621306640625, 0.6215516357421875, 0.6213387451171875, 0.6225891723632813, 0.6224488525390625, 0.6220723876953125, 0.6219570922851563, 0.6220472412109375, 0.6218035278320313, 0.6217536010742187, 0.6216425170898437, 0.6220791625976563, 0.6216027221679687, 0.6216016845703125]",tokens/s,1.6064910620991075,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1412.01408,1946.025984,0.0,1543.503872,1510.249472,s,1,9.0036572265625,9.0036572265625,0.0,9.0036572265625,9.0036572265625,9.0036572265625,9.0036572265625,[9.0036572265625],,kWh,4.326230976666163e-05,4.764806672887686e-06,1.3024177086012889e-05,6.105129352556221e-05,,MB,1477.320704,2042.494976,0.0,1625.2928,1592.853504,s,10,0.4814502105712891,0.0481450210571289,0.00023174004680795476,0.04808217620849609,0.04828624954223633,0.04853051681518555,0.04872593063354492,"[0.048774784088134765, 0.048056640625, 0.04793180847167969, 0.04804092788696289, 0.04797884750366211, 0.048107711791992185, 0.04820601654052734, 0.04823196792602539, 0.04816524887084961, 0.04795625686645508]",tokens/s,5317.268419017416,kWh,1.4544131943198987e-06,1.6013898274312275e-07,9.583410099005049e-07,2.5728931869635267e-06,tokens/kWh,99498883.70691584,MB,1514.872832,2084.438016,0.0,1667.23584,1592.856064,s,10,13.999325927734374,1.3999325927734376,0.01272510510382027,1.4026399536132814,1.4149755126953123,1.4163786865234373,1.4175012255859374,"[1.39648583984375, 1.4146636962890624, 1.4035003662109375, 1.370700439453125, 1.40515234375, 1.4177818603515624, 1.401779541015625, 1.39571484375, 1.3885228271484376, 1.405024169921875]",tokens/s,45.00216676517924,kWh,4.020642566109496e-05,4.434620474895099e-06,1.8538730143298372e-05,6.317977627928845e-05,tokens/kWh,997154.5280804138,,s,630,13.996947875976558,0.02221737758091518,0.00045163319632669053,0.0222150239944458,0.022599770164489746,0.022842889022827148,0.023871333141326907,"[0.02237808036804199, 0.022045055389404298, 0.022355903625488283, 0.022413408279418946, 0.02285366439819336, 0.022314592361450194, 0.022372095108032227, 0.022546560287475585, 0.02240287971496582, 0.022315711975097657, 0.022413087844848634, 0.02395363235473633, 0.022451839447021484, 0.022483327865600585, 0.0223191032409668, 0.022843551635742188, 0.022476640701293946, 0.022464736938476563, 0.022258880615234376, 0.022152000427246094, 0.022216480255126955, 0.02219798469543457, 0.022245664596557618, 0.022155263900756835, 0.02224332809448242, 0.022236608505249025, 0.022274623870849608, 0.02227132797241211, 0.022022815704345704, 0.02211974334716797, 0.022168479919433593, 0.022171072006225586, 0.022071647644042968, 0.021958656311035156, 0.021975040435791016, 0.021896223068237304, 0.021893535614013672, 0.021981760025024413, 0.021911231994628907, 0.0216331844329834, 0.021685983657836912, 0.02194272041320801, 0.021481472015380858, 0.02150543975830078, 0.02156368064880371, 0.021604671478271484, 0.02170675277709961, 0.02174515151977539, 0.021833984375, 0.022231199264526366, 0.022208255767822267, 0.022048927307128905, 0.022177984237670898, 0.022178016662597656, 0.022177568435668947, 0.022026239395141603, 0.02221468734741211, 0.02231603240966797, 0.022035104751586914, 0.022066591262817382, 0.022024383544921877, 0.022050975799560547, 0.021977567672729494, 0.023126367568969727, 0.022450239181518554, 0.022329151153564455, 0.022292608261108397, 0.02233344078063965, 0.022366207122802736, 0.02236579132080078, 0.022454687118530273, 0.02230428886413574, 0.02226838493347168, 0.022396255493164062, 0.022501407623291017, 0.02306425666809082, 0.023112415313720703, 0.023006624221801757, 0.022170431137084962, 0.02208070373535156, 0.023136064529418944, 0.022356544494628906, 0.02217414474487305, 0.022116064071655273, 0.022552032470703125, 0.022219583511352538, 0.022335487365722655, 0.022115360260009764, 0.021877567291259767, 0.022521568298339845, 0.021709247589111327, 0.02185206413269043, 0.02206278419494629, 0.02202047920227051, 0.02228540802001953, 0.02561734390258789, 0.022822912216186524, 0.022580671310424804, 0.02248966407775879, 0.022347776412963868, 0.02237424087524414, 0.02236432075500488, 0.02233344078063965, 0.02253209686279297, 0.022534143447875975, 0.022382591247558595, 0.022392160415649415, 0.02270275115966797, 0.02229596710205078, 0.02384102439880371, 0.022782304763793945, 0.022212608337402344, 0.02229248046875, 0.02240438461303711, 0.02238332748413086, 0.0223089599609375, 0.022282400131225587, 0.02244166374206543, 0.022349248886108397, 0.022257856369018555, 0.023219776153564454, 0.021975936889648436, 0.022038528442382813, 0.02209382438659668, 0.0219400634765625, 0.0219005126953125, 0.021960927963256837, 0.02195734405517578, 0.02178767967224121, 0.02173027229309082, 0.021784576416015625, 0.022013952255249023, 0.02232729530334473, 0.022320671081542967, 0.02224380874633789, 0.02223459243774414, 0.022229536056518555, 0.022171648025512695, 0.022287616729736327, 0.022377216339111328, 0.02225766372680664, 0.022312320709228516, 0.022153823852539063, 0.022298015594482423, 0.02228223991394043, 0.022264448165893555, 0.02495692825317383, 0.022192127227783204, 0.022136831283569337, 0.02209347152709961, 0.02212803268432617, 0.02209049606323242, 0.021983423233032227, 0.021950464248657226, 0.022039871215820312, 0.021990079879760743, 0.022040063858032227, 0.02333951950073242, 0.021982336044311525, 0.021795360565185547, 0.02190140724182129, 0.021966175079345705, 0.022243839263916015, 0.02278432083129883, 0.02212384033203125, 0.022241504669189453, 0.02228201675415039, 0.02217990493774414, 0.022354656219482422, 0.02210211181640625, 0.02221878433227539, 0.022195743560791015, 0.022124895095825194, 0.022784000396728517, 0.022421503067016603, 0.022379999160766603, 0.022425920486450195, 0.022404447555541992, 0.022387584686279296, 0.023432512283325196, 0.022213247299194334, 0.022276287078857423, 0.022567935943603516, 0.022688064575195312, 0.0223089599609375, 0.022195903778076172, 0.02217385673522949, 0.022100448608398438, 0.022079647064208983, 0.021833087921142576, 0.021682815551757814, 0.02144060707092285, 0.02149100875854492, 0.02147532844543457, 0.021531232833862303, 0.021546304702758787, 0.021433023452758788, 0.021465087890625, 0.021536575317382813, 0.021442752838134765, 0.02143600082397461, 0.021524192810058594, 0.02174006462097168, 0.021700159072875976, 0.021529184341430665, 0.0219289608001709, 0.02228326416015625, 0.021658975601196288, 0.021600927352905273, 0.02143436813354492, 0.021784576416015625, 0.02191574478149414, 0.02160630416870117, 0.021514240264892577, 0.02151535987854004, 0.022139808654785157, 0.02157734489440918, 0.021846336364746095, 0.021472736358642577, 0.021506111145019532, 0.021824031829833983, 0.022023263931274413, 0.021887903213500978, 0.021743616104125976, 0.021585920333862304, 0.021542400360107423, 0.02169203186035156, 0.021561695098876954, 0.021494304656982422, 0.021501951217651367, 0.0216144962310791, 0.021824607849121092, 0.02203913688659668, 0.02182943916320801, 0.022022464752197265, 0.02199990463256836, 0.021868671417236328, 0.021773984909057617, 0.021821664810180663, 0.02188502311706543, 0.022706079483032226, 0.021917695999145507, 0.021700607299804688, 0.021613824844360353, 0.021990144729614258, 0.021989183425903322, 0.022003904342651367, 0.021977088928222657, 0.021859743118286132, 0.02219887924194336, 0.022273088455200197, 0.022107072830200195, 0.02211724853515625, 0.021989376068115234, 0.021977024078369142, 0.02223468780517578, 0.0221844482421875, 0.022310016632080078, 0.021953407287597655, 0.021970943450927736, 0.02220582389831543, 0.02275596809387207, 0.022183328628540038, 0.021792383193969728, 0.021672927856445312, 0.021555200576782226, 0.022005216598510742, 0.02191798400878906, 0.02241097640991211, 0.022049312591552735, 0.022347776412963868, 0.02230886459350586, 0.02228223991394043, 0.022437887191772463, 0.022200319290161134, 0.022277664184570313, 0.022407072067260742, 0.022266368865966796, 0.02238287925720215, 0.02231068801879883, 0.022413440704345703, 0.0226014404296875, 0.022368064880371095, 0.022351232528686524, 0.02233033561706543, 0.022441471099853515, 0.02237487983703613, 0.022312992095947264, 0.022421503067016603, 0.022392192840576173, 0.022272064208984376, 0.02226233673095703, 0.02265292739868164, 0.0225086727142334, 0.022610815048217773, 0.022595136642456055, 0.022442432403564454, 0.022242559432983398, 0.023255807876586914, 0.022334495544433595, 0.023317472457885742, 0.022499263763427733, 0.022519487380981446, 0.02215283203125, 0.022246143341064454, 0.02229257583618164, 0.0226712646484375, 0.02224127960205078, 0.022080959320068358, 0.022112831115722657, 0.02216307258605957, 0.022534528732299806, 0.02220809555053711, 0.022044160842895507, 0.02213475227355957, 0.022812671661376953, 0.02220044708251953, 0.02212873649597168, 0.023742368698120117, 0.02230259132385254, 0.02269196891784668, 0.02260806465148926, 0.022363231658935546, 0.022163200378417968, 0.022159839630126955, 0.023181695938110352, 0.02213916778564453, 0.02217763137817383, 0.022147071838378905, 0.022101951599121095, 0.022255680084228516, 0.022239231109619142, 0.02230665588378906, 0.022362272262573243, 0.022419456481933595, 0.022185983657836913, 0.022335487365722655, 0.0223191032409668, 0.022312223434448244, 0.022455007553100585, 0.022452224731445314, 0.02247270393371582, 0.0224516487121582, 0.02232137680053711, 0.0223884162902832, 0.02245439910888672, 0.022636415481567383, 0.02249385643005371, 0.022341535568237304, 0.022841440200805665, 0.0224768009185791, 0.022599584579467775, 0.02247279930114746, 0.02270947265625, 0.02249180793762207, 0.02288038444519043, 0.022390783309936522, 0.024888864517211916, 0.0230732479095459, 0.02253785514831543, 0.02242585563659668, 0.022296768188476562, 0.022277503967285155, 0.022385215759277342, 0.0223656005859375, 0.022560991287231446, 0.022358400344848633, 0.022607551574707032, 0.02227436828613281, 0.02242131233215332, 0.022821056365966798, 0.022394784927368162, 0.022394975662231444, 0.023476224899291992, 0.022181888580322266, 0.02222489547729492, 0.022316383361816405, 0.0222706241607666, 0.02285990333557129, 0.02250943946838379, 0.0223024959564209, 0.023588768005371095, 0.022144895553588867, 0.022263999938964843, 0.022007776260375977, 0.021999103546142578, 0.021814048767089842, 0.021929983139038087, 0.021858272552490236, 0.02194000053405762, 0.02224483108520508, 0.02212944030761719, 0.022462400436401367, 0.02293916893005371, 0.022321311950683594, 0.02219171142578125, 0.022245376586914063, 0.02194700813293457, 0.021983392715454103, 0.021788543701171875, 0.021631103515625, 0.021596160888671875, 0.021901248931884765, 0.021642656326293946, 0.022063968658447265, 0.022075199127197267, 0.022189184188842772, 0.022823392868041994, 0.022131103515625, 0.02211027145385742, 0.02230860710144043, 0.022210752487182617, 0.02234163284301758, 0.022335712432861327, 0.022183071136474608, 0.02218009567260742, 0.022089088439941406, 0.02229145622253418, 0.022355487823486328, 0.0226964168548584, 0.022232160568237305, 0.022113216400146483, 0.022292448043823243, 0.022175743103027345, 0.022433792114257813, 0.02224127960205078, 0.02241904067993164, 0.02220047950744629, 0.02235843276977539, 0.02237628746032715, 0.022390911102294922, 0.02223628807067871, 0.02222729682922363, 0.022391199111938476, 0.02224947166442871, 0.02205286407470703, 0.02226335906982422, 0.022651264190673828, 0.02253139114379883, 0.022273920059204103, 0.022768512725830078, 0.023026079177856446, 0.02243984031677246, 0.02226576042175293, 0.022335487365722655, 0.022388927459716795, 0.022360063552856444, 0.02230019187927246, 0.022333215713500977, 0.022504127502441407, 0.0225296630859375, 0.022366207122802736, 0.022466495513916017, 0.022415807723999023, 0.02234979248046875, 0.022366239547729493, 0.022323135375976563, 0.02235603141784668, 0.02214499282836914, 0.021952415466308595, 0.022132863998413087, 0.02208358383178711, 0.022190080642700196, 0.02209587287902832, 0.022153215408325197, 0.022176895141601562, 0.022752128601074218, 0.022443935394287108, 0.022208608627319337, 0.021916831970214844, 0.02180796813964844, 0.021748928070068358, 0.02181385612487793, 0.021790943145751952, 0.02165555191040039, 0.021874080657958983, 0.02177257537841797, 0.021801279067993163, 0.02174991989135742, 0.02175369644165039, 0.02177449607849121, 0.02235171127319336, 0.02202828788757324, 0.021983232498168945, 0.022289472579956053, 0.02196985626220703, 0.021733375549316408, 0.021794815063476563, 0.021777759552001952, 0.02202828788757324, 0.021996192932128907, 0.021939552307128907, 0.021973503112792968, 0.022330751419067384, 0.02232310485839844, 0.0222806396484375, 0.02207993507385254, 0.022286336898803712, 0.02224742317199707, 0.02229862403869629, 0.02230067253112793, 0.02230067253112793, 0.02210406494140625, 0.022121759414672853, 0.022731359481811524, 0.022347776412963868, 0.02222870445251465, 0.022112543106079102, 0.02183705520629883, 0.022801151275634767, 0.025374719619750977, 0.02340447998046875, 0.022286399841308594, 0.0220446720123291, 0.022071296691894532, 0.0218787841796875, 0.02166783905029297, 0.021612319946289062, 0.021585887908935546, 0.021609952926635742, 0.021582080841064454, 0.02168681526184082, 0.022022144317626953, 0.022034143447875975, 0.022331680297851562, 0.0218787841796875, 0.022048511505126954, 0.02227561569213867, 0.022842079162597655, 0.021708799362182618, 0.021630975723266603, 0.02157935905456543, 0.02161408042907715, 0.021732032775878905, 0.02187606430053711, 0.021620800018310547, 0.021662527084350586, 0.021740991592407225, 0.021676607131958008, 0.021591487884521483, 0.02167251205444336, 0.02198316764831543, 0.02177030372619629, 0.02164121627807617, 0.02201215934753418, 0.022738687515258788, 0.021792383193969728, 0.021662080764770508, 0.021546207427978515, 0.021527360916137696, 0.02173744010925293, 0.021841920852661133, 0.02198134422302246, 0.021910655975341798, 0.02177484893798828, 0.02296169662475586, 0.021713600158691407, 0.022083295822143554, 0.02177043151855469, 0.02161465644836426, 0.021919776916503906, 0.021828960418701172, 0.021844512939453126, 0.02218608093261719, 0.02203856086730957, 0.023883712768554687, 0.022138847351074218, 0.02207529640197754, 0.022435840606689454, 0.02222265625, 0.022173280715942382, 0.02221536064147949, 0.022163263320922853, 0.022150783538818358, 0.02214297676086426, 0.022234943389892577, 0.022422016143798826, 0.02216771125793457, 0.02215116882324219, 0.02225071907043457, 0.022237503051757812, 0.022267551422119142, 0.02411510467529297, 0.0234751033782959, 0.022291807174682616, 0.022317888259887696, 0.02236809539794922, 0.022245311737060548, 0.02235603141784668, 0.022349472045898437, 0.02224985694885254, 0.02224905586242676, 0.02230665588378906, 0.02211881637573242, 0.021995647430419922, 0.022259679794311524, 0.02216873550415039, 0.022300607681274415, 0.02219513511657715, 0.022138303756713867, 0.022044416427612304, 0.02216428756713867, 0.022015647888183595, 0.02199996757507324, 0.02206515121459961, 0.021996768951416015, 0.022044544219970704, 0.02199203109741211, 0.021909824371337892, 0.021978944778442384, 0.022116512298583985, 0.022167583465576172, 0.02219148826599121, 0.022389280319213868, 0.02253424072265625, 0.022226943969726562, 0.022347232818603516, 0.022284320831298828, 0.022454784393310546, 0.02232524871826172, 0.022576608657836915, 0.02249577522277832, 0.022331392288208008, 0.02226790428161621, 0.022403072357177735, 0.02268880081176758, 0.022655967712402345, 0.022484672546386718, 0.022418975830078125, 0.022377248764038085]",tokens/s,45.00981253786694,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4347.445248,6253.576192,0.0,5851.05408,5850.293248,s,1,11.8014775390625,11.8014775390625,0.0,11.8014775390625,11.8014775390625,11.8014775390625,11.8014775390625,[11.8014775390625],,kWh,0.00012193073990417435,1.3438751194304425e-05,3.5457250587997424e-05,0.00017082674168647619,,MB,4115.410944,6368.919552,0.0,5951.717376,5923.050496,s,10,2.066099105834961,0.20660991058349612,0.00046705291284472264,0.20671092987060546,0.2072027374267578,0.20720696716308593,0.20721035095214843,"[0.20575343322753906, 0.2062879638671875, 0.20689785766601562, 0.2066741485595703, 0.20674771118164062, 0.20590780639648437, 0.20679180908203126, 0.20721119689941406, 0.20662538146972656, 0.20720179748535156]",tokens/s,1239.049953010575,kWh,6.064800430357037e-06,6.688371367730134e-07,4.022242446816234e-06,1.0755880013946284e-05,tokens/kWh,23800934.89961448,MB,4119.605248,6383.599616,0.0,5966.39744,5923.053056,s,10,22.602645996093756,2.2602645996093758,0.007576640121493429,2.260805419921875,2.2702843749999997,2.270421484375,2.270531171875,"[2.27055859375, 2.262692626953125, 2.254117919921875, 2.264029052734375, 2.25368994140625, 2.27025390625, 2.258918212890625, 2.258134765625, 2.244926025390625, 2.265324951171875]",tokens/s,27.87284285693268,kWh,6.611808822047604e-05,7.292747218992762e-06,4.380696248318444e-05,0.00011721779792265327,tokens/kWh,537461.0435999732,,s,630,22.599567424774154,0.03587232924567329,0.0005094795680022706,0.03577148818969726,0.03618861541748047,0.03646152267456055,0.03780295642852784,"[0.036918529510498045, 0.036020896911621095, 0.03618406295776367, 0.035999073028564456, 0.0362523193359375, 0.03641062545776367, 0.03620735931396484, 0.03605299377441406, 0.036129886627197266, 0.03604982376098633, 0.035888511657714846, 0.035971710205078125, 0.03606937789916992, 0.03597926330566406, 0.03598438262939453, 0.03589804840087891, 0.035941951751708986, 0.035900161743164065, 0.03601408004760742, 0.03603875350952149, 0.03602422332763672, 0.03616915130615234, 0.03615187072753906, 0.03603033447265625, 0.03604288101196289, 0.03596505737304687, 0.03607948684692383, 0.03620249557495117, 0.03619635009765625, 0.036173824310302735, 0.03597443389892578, 0.03584195327758789, 0.035948799133300784, 0.03600032043457031, 0.036016128540039063, 0.036422782897949216, 0.03638691329956055, 0.03590633773803711, 0.03599769592285156, 0.03582886505126953, 0.03585753631591797, 0.0358397445678711, 0.035886207580566404, 0.03582035064697266, 0.03598041534423828, 0.03591292953491211, 0.03611414337158203, 0.03587065505981445, 0.03591363143920898, 0.03580124664306641, 0.035883041381835935, 0.0357498893737793, 0.03581913757324219, 0.03582191848754883, 0.03595705413818359, 0.03596870422363281, 0.03573964691162109, 0.0358394889831543, 0.03594015884399414, 0.03605574417114258, 0.03572256088256836, 0.035918209075927736, 0.0375483512878418, 0.03662438583374023, 0.03589120101928711, 0.0360447998046875, 0.03647875213623047, 0.03572086334228516, 0.03580780792236328, 0.03581542587280274, 0.03576627349853516, 0.035727359771728515, 0.03543606567382813, 0.03580259323120117, 0.03598438262939453, 0.035794944763183595, 0.03562451171875, 0.03568288040161133, 0.03568947219848633, 0.03575423812866211, 0.03553958511352539, 0.035737598419189456, 0.03573350524902344, 0.0357212142944336, 0.03599555206298828, 0.03580732727050781, 0.035767616271972655, 0.03632796859741211, 0.036333984375, 0.035528446197509764, 0.03563296127319336, 0.03561286544799805, 0.035665664672851566, 0.03592217636108398, 0.03573328018188476, 0.035921279907226565, 0.03549679946899414, 0.035712638854980466, 0.03577056121826172, 0.035821151733398435, 0.03537686538696289, 0.03560688018798828, 0.0362749137878418, 0.03559385681152344, 0.03612303924560547, 0.03559961700439453, 0.037822593688964845, 0.03611395263671875, 0.03598220825195313, 0.035925792694091796, 0.03605321502685547, 0.036040702819824216, 0.03621446228027344, 0.035934112548828126, 0.03612636947631836, 0.035833759307861326, 0.036287422180175784, 0.0360052490234375, 0.03570537567138672, 0.0361082878112793, 0.036034561157226565, 0.03671449661254883, 0.035565536499023435, 0.03577443313598633, 0.035999744415283204, 0.0361409912109375, 0.036517791748046875, 0.03569635009765625, 0.03563763046264649, 0.035710975646972655, 0.035901439666748046, 0.03572566223144531, 0.03572723388671875, 0.03566486358642578, 0.035686752319335935, 0.03556393432617187, 0.03591788864135742, 0.03552870559692383, 0.03560780715942383, 0.03555609512329101, 0.035520671844482425, 0.03570057678222656, 0.03535257720947266, 0.035571712493896485, 0.0354969596862793, 0.035484672546386715, 0.03570278549194336, 0.03555516815185547, 0.035526336669921874, 0.03569097518920898, 0.03547679901123047, 0.03583798217773437, 0.03548806381225586, 0.035549537658691406, 0.03568147277832031, 0.035535968780517575, 0.03558371353149414, 0.0356495361328125, 0.03556556701660156, 0.035857761383056644, 0.036052928924560544, 0.03604681777954102, 0.035568382263183596, 0.03568003082275391, 0.03559241485595703, 0.03571916961669922, 0.035579902648925785, 0.03561395263671875, 0.03543695831298828, 0.035891551971435544, 0.03562905502319336, 0.035850238800048825, 0.03570483016967774, 0.03569615936279297, 0.03582953643798828, 0.035619518280029294, 0.03574169540405273, 0.03595212936401367, 0.035840511322021484, 0.035995231628417966, 0.03575849533081055, 0.037754878997802735, 0.03721984100341797, 0.035940864562988284, 0.035671070098876954, 0.03613148880004883, 0.035649856567382815, 0.03589734268188476, 0.03647235107421875, 0.037333118438720704, 0.035654945373535155, 0.03596156692504883, 0.03561881637573242, 0.03643203353881836, 0.04213948822021484, 0.03597644805908203, 0.03560729598999023, 0.0355491828918457, 0.03585436630249023, 0.0358370246887207, 0.035568702697753904, 0.0355912971496582, 0.03547615814208984, 0.03577241516113281, 0.03555728149414063, 0.03561264038085937, 0.03562303924560547, 0.03568220901489258, 0.03557795333862305, 0.03543142318725586, 0.03567504119873047, 0.03758822250366211, 0.03586771011352539, 0.03566307067871094, 0.03597084808349609, 0.03706524658203125, 0.03577471923828125, 0.03559833526611328, 0.035504127502441404, 0.03588710403442383, 0.0358658561706543, 0.03564006423950195, 0.035710975646972655, 0.03581520080566406, 0.03557747268676758, 0.03577705764770508, 0.03578201675415039, 0.03579951858520508, 0.03559446334838867, 0.035571712493896485, 0.0355052490234375, 0.03550505447387695, 0.03559036636352539, 0.035558528900146484, 0.03560665512084961, 0.03563164901733398, 0.035692543029785154, 0.03591494369506836, 0.035916160583496094, 0.03582563018798828, 0.03592230224609375, 0.03597024154663086, 0.036479904174804685, 0.036117889404296874, 0.035865215301513674, 0.035786239624023435, 0.03562473678588867, 0.035932895660400394, 0.03590864181518555, 0.0359567985534668, 0.03579792022705078, 0.03603571319580078, 0.03647929763793945, 0.035869632720947266, 0.03582979202270508, 0.036343711853027344, 0.03597715377807617, 0.03588595199584961, 0.036009761810302736, 0.03587833786010742, 0.036372798919677735, 0.03604323196411133, 0.035885055541992186, 0.0362918701171875, 0.03600048065185547, 0.035859745025634764, 0.03575062561035156, 0.035745792388916016, 0.03571916961669922, 0.03607475280761719, 0.035746177673339846, 0.03566982269287109, 0.03582966232299805, 0.035523231506347654, 0.035751327514648434, 0.03553299331665039, 0.03564380645751953, 0.03554508972167969, 0.03567411041259766, 0.03574483108520508, 0.035746753692626955, 0.035816673278808595, 0.03573126220703125, 0.035629631042480465, 0.03561308670043945, 0.03557580947875977, 0.03571916961669922, 0.03565158462524414, 0.03594649505615234, 0.03572278213500977, 0.035778526306152345, 0.03557782363891602, 0.036399646759033205, 0.03562700653076172, 0.03569247817993164, 0.0354898567199707, 0.03622943878173828, 0.03560006332397461, 0.035487743377685545, 0.03547955322265625, 0.035514366149902346, 0.03563711929321289, 0.03555955123901367, 0.03554099273681641, 0.03562905502319336, 0.03570073699951172, 0.0356514892578125, 0.0356844482421875, 0.03563699340820312, 0.03593967819213867, 0.035644065856933596, 0.03553305435180664, 0.03544678497314453, 0.03547097778320313, 0.03559977722167969, 0.036522911071777346, 0.03590310287475586, 0.035901054382324216, 0.03606780624389649, 0.03580547332763672, 0.03616582489013672, 0.035690303802490234, 0.036001792907714845, 0.03585638427734375, 0.03684966278076172, 0.03598361587524414, 0.03591756820678711, 0.03583321762084961, 0.03593004989624023, 0.03589190292358398, 0.03602022552490235, 0.03613199996948242, 0.036197216033935546, 0.036171775817871094, 0.035745792388916016, 0.035934207916259765, 0.03585171127319336, 0.03599827194213867, 0.036187808990478514, 0.036146591186523434, 0.035703742980957034, 0.03558579254150391, 0.036052352905273435, 0.036080513000488285, 0.03583737564086914, 0.035836479187011716, 0.0357891845703125, 0.03629561614990234, 0.036000446319580076, 0.03589324951171875, 0.03608908843994141, 0.03613363265991211, 0.035896671295166015, 0.03570127868652344, 0.035917953491210936, 0.035913406372070314, 0.035925407409667966, 0.035981952667236326, 0.03590787124633789, 0.03644211196899414, 0.03582755279541015, 0.03594870376586914, 0.04005068969726563, 0.03590102386474609, 0.03579715347290039, 0.03565094375610352, 0.03573632049560547, 0.0357151985168457, 0.035760128021240234, 0.03575193786621094, 0.0364288330078125, 0.0359793586730957, 0.03590233612060547, 0.036560897827148435, 0.03567411041259766, 0.0359436149597168, 0.03570963287353516, 0.035915904998779294, 0.03652223968505859, 0.03569321441650391, 0.03692544174194336, 0.03591372680664062, 0.03601107025146484, 0.03597612762451172, 0.03588425445556641, 0.03563948822021484, 0.03583855819702148, 0.03590329742431641, 0.036073600769042966, 0.036048801422119144, 0.03577667236328125, 0.03599734497070312, 0.03587926483154297, 0.03575356674194336, 0.03566223907470703, 0.03563030242919922, 0.03558639907836914, 0.03570323181152344, 0.035647361755371094, 0.03564556884765625, 0.036139007568359374, 0.035632896423339847, 0.035662078857421876, 0.035672065734863284, 0.03575711822509765, 0.03576108932495117, 0.03577446365356445, 0.035550334930419925, 0.035672958374023435, 0.036113761901855466, 0.03590943908691406, 0.03568726348876953, 0.03570012664794922, 0.03546790313720703, 0.035565536499023435, 0.03549174499511719, 0.03571516799926758, 0.03572252655029297, 0.035711711883544925, 0.035588096618652344, 0.03567523193359375, 0.035707809448242187, 0.03557708740234375, 0.03565235137939453, 0.03553449630737305, 0.03561507034301758, 0.03564262390136719, 0.0359139518737793, 0.035645439147949216, 0.03566384124755859, 0.035950241088867185, 0.03583260726928711, 0.03598771286010742, 0.035860481262207033, 0.03568422317504883, 0.03570278549194336, 0.03578879928588867, 0.035770206451416015, 0.03891788864135742, 0.03607388687133789, 0.036359745025634764, 0.036560897827148435, 0.03566115188598633, 0.036090206146240235, 0.03551264190673828, 0.035786270141601566, 0.035588577270507814, 0.035545024871826175, 0.035722400665283205, 0.037454753875732424, 0.036146591186523434, 0.03581958389282226, 0.03568080139160156, 0.035563072204589846, 0.03578515243530273, 0.03571830368041992, 0.035619712829589846, 0.03574761581420898, 0.0360511360168457, 0.03644828796386719, 0.03575555038452148, 0.035526496887207035, 0.035584415435791016, 0.03570502471923828, 0.035611873626708986, 0.035599040985107425, 0.035489887237548826, 0.03589839935302734, 0.03576726531982422, 0.035846240997314455, 0.03944998550415039, 0.03600028610229492, 0.03598745727539063, 0.03591987228393555, 0.0360447998046875, 0.03583119964599609, 0.035738208770751956, 0.03571494293212891, 0.03557798385620117, 0.03589529418945313, 0.03581542587280274, 0.03561001586914062, 0.03592425537109375, 0.03595705413818359, 0.03579904174804688, 0.03586892700195313, 0.035644287109375, 0.035527553558349606, 0.03562083053588867, 0.035815231323242186, 0.035735774993896484, 0.035606529235839846, 0.03571507263183594, 0.03592396926879883, 0.035743743896484374, 0.0356426887512207, 0.03551475143432617, 0.035608287811279296, 0.03546777725219727, 0.03545087814331055, 0.03538502502441406, 0.035487552642822266, 0.03575187301635742, 0.03578096008300781, 0.03662643051147461, 0.03568595123291016, 0.03557011032104492, 0.03545484924316406, 0.03552188873291016, 0.03580188751220703, 0.035569438934326174, 0.035598560333251955, 0.03542559814453125, 0.03543724822998047, 0.0352911376953125, 0.03536624145507813, 0.035549854278564455, 0.03549798583984375, 0.035784481048583984, 0.0355425910949707, 0.035461982727050784, 0.035485504150390625, 0.03601238250732422, 0.035358367919921876, 0.035437664031982424, 0.03552950286865234, 0.03563942337036133, 0.03572905731201172, 0.0355945930480957, 0.03546112060546875, 0.03541987228393555, 0.03547574234008789, 0.03542591857910156, 0.03562499237060547, 0.03551212692260742, 0.035617313385009765, 0.03561062240600586, 0.035620864868164064, 0.035710399627685546, 0.035547710418701174, 0.03553676986694336, 0.03569219207763672, 0.03563977432250977, 0.035522560119628906, 0.035774177551269534, 0.03572966384887695, 0.03580931091308594, 0.03559219360351563, 0.03592601776123047, 0.0356104621887207, 0.03559849548339844, 0.035631103515625, 0.03563315200805664, 0.03580672073364258, 0.03562543869018555, 0.0357724494934082, 0.035682590484619144, 0.035790561676025394, 0.03575398254394531, 0.03564467239379883, 0.035625728607177734, 0.035622047424316405, 0.035644256591796875, 0.03548780822753906, 0.03559977722167969, 0.03619587326049805, 0.035698974609375, 0.03663187026977539, 0.037456703186035153, 0.03578879928588867, 0.03568956756591797, 0.03582249450683594, 0.03584118270874023, 0.035707809448242187, 0.03580060958862305, 0.03684963226318359, 0.038663646697998044, 0.035805854797363285, 0.035699008941650394, 0.03571712112426758, 0.03569164657592774, 0.03567472076416016, 0.035604480743408204, 0.035587520599365235, 0.03580147171020508, 0.03560047912597656, 0.03623465728759766, 0.035726303100585936, 0.035583103179931644, 0.035694721221923825, 0.03560889434814453, 0.03570323181152344, 0.035579902648925785, 0.0355491828918457, 0.036041759490966795, 0.036335742950439454, 0.0358737907409668, 0.03583321762084961, 0.03595743942260742, 0.03565750503540039, 0.03557366561889649, 0.03553699111938476, 0.035710880279541016, 0.035705982208251955, 0.036240734100341794, 0.035896961212158206, 0.03568025588989258, 0.035695903778076174, 0.03635817718505859, 0.035694625854492186, 0.035754592895507815, 0.03575766372680664, 0.03577468872070313, 0.03580649566650391, 0.035713504791259766, 0.03597894287109375, 0.03553900909423828, 0.03603327941894531, 0.03917961502075195, 0.0357400016784668, 0.03577609634399414, 0.03565641784667969, 0.03583795166015625, 0.03576361465454102, 0.035861087799072267, 0.03588876724243164, 0.03627660751342773, 0.03624345779418945, 0.035933246612548826, 0.0356033935546875]",tokens/s,27.876639767422244,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8216.236032,11251.089408,0.0,10848.567296,10616.027648,s,1,14.9294140625,14.9294140625,0.0,14.9294140625,14.9294140625,14.9294140625,14.9294140625,[14.9294140625],,kWh,0.00021945647610833514,2.420022217123049e-05,6.470477398599558e-05,0.00030836147226556124,,MB,3917.08672,11683.10272,0.0,11265.900544,11070.470656,s,10,3.799405975341797,0.3799405975341797,0.0016273646893260915,0.38001599121093754,0.38233244323730464,0.3824092788696289,0.3824707473754883,"[0.3776495361328125, 0.37773464965820314, 0.3810429992675781, 0.3798990173339844, 0.3788070983886719, 0.3805530090332031, 0.37878521728515624, 0.38013296508789063, 0.38248611450195313, 0.3823153686523437]",tokens/s,673.7895388422398,kWh,1.11395795583333e-05,1.2280768663164228e-06,7.375643760592596e-06,1.974330018524232e-05,tokens/kWh,12966423.931058615,MB,3922.272256,11685.199872,0.0,11267.997696,11070.473216,s,10,28.983058593750002,2.8983058593749993,0.005214278858235374,2.900692138671875,2.903626416015625,2.9037448486328126,2.9038395947265627,"[2.89153125, 2.88903857421875, 2.89418017578125, 2.899248046875, 2.894547607421875, 2.90258251953125, 2.90213623046875, 2.90386328125, 2.902330810546875, 2.90360009765625]",tokens/s,21.736836295664645,kWh,8.494653748583237e-05,9.370225894567959e-06,5.6564768502807394e-05,0.00015088153188320773,tokens/kWh,417546.13181397284,,s,630,28.979988956451415,0.0459999824705578,0.00048814968224423415,0.04596897506713867,0.04642744064331055,0.046601284599304196,0.048061772651672365,"[0.04785343933105469, 0.04586918258666992, 0.04530790328979492, 0.04526662445068359, 0.04538195037841797, 0.04546559906005859, 0.0453201904296875, 0.04543078231811523, 0.045445438385009765, 0.04564140701293945, 0.045364479064941406, 0.04577151870727539, 0.04592761611938476, 0.045605056762695315, 0.04528601455688477, 0.04538729476928711, 0.04601084899902344, 0.04572979354858398, 0.04542780685424805, 0.04585481643676758, 0.04599481582641601, 0.0457891845703125, 0.045770782470703125, 0.04586288070678711, 0.04560281753540039, 0.04576812744140625, 0.04571603012084961, 0.04566790390014648, 0.04555001449584961, 0.045672351837158204, 0.04593878555297851, 0.04576425552368164, 0.04566806411743164, 0.045875839233398434, 0.04655513763427734, 0.04591820907592774, 0.04551628875732422, 0.04580387115478515, 0.04604275131225586, 0.04608371353149414, 0.045916641235351566, 0.046072254180908205, 0.04649574279785156, 0.04579328155517578, 0.045897823333740234, 0.046043041229248044, 0.045973121643066404, 0.04561139297485352, 0.045943904876708984, 0.04607068634033203, 0.045860000610351566, 0.047997791290283205, 0.04561539077758789, 0.045931934356689456, 0.04591238403320313, 0.04625151824951172, 0.04632160186767578, 0.04627449417114258, 0.04609478378295898, 0.04664748764038086, 0.04637843322753906, 0.04612768173217773, 0.046102527618408204, 0.04823849487304688, 0.0458614387512207, 0.045415809631347656, 0.04548668670654297, 0.04539177703857422, 0.04557017517089844, 0.04532368087768555, 0.045206111907958986, 0.045800960540771485, 0.04556380844116211, 0.045502559661865234, 0.04571529769897461, 0.045712032318115235, 0.04552035140991211, 0.04561155319213867, 0.04552083206176758, 0.046023937225341795, 0.04572038269042969, 0.04549203109741211, 0.045605022430419924, 0.045809505462646484, 0.04592019271850586, 0.04609868621826172, 0.04592025756835937, 0.04561305618286133, 0.04532368087768555, 0.04575907135009766, 0.045741470336914065, 0.04596796798706055, 0.04558607864379883, 0.04577423858642578, 0.04587411117553711, 0.045817119598388675, 0.04567859268188477, 0.04597760009765625, 0.045927135467529294, 0.04583833694458008, 0.045838016510009766, 0.045744449615478515, 0.04606911849975586, 0.04567308807373047, 0.0456440315246582, 0.04596854400634766, 0.046165790557861325, 0.04592246246337891, 0.04608060836791992, 0.046198848724365235, 0.045764606475830076, 0.04585203170776367, 0.046142078399658205, 0.04597350311279297, 0.0456616325378418, 0.04642816162109375, 0.045972030639648435, 0.046034942626953124, 0.045876609802246095, 0.0461759033203125, 0.0461300163269043, 0.046069889068603515, 0.046043071746826175, 0.04615993499755859, 0.04614044952392578, 0.04610351943969727, 0.048285694122314454, 0.04602265548706055, 0.04548540878295899, 0.04540892791748047, 0.04548764801025391, 0.04538735961914062, 0.04524531173706055, 0.04523206329345703, 0.04566227340698242, 0.04586195373535156, 0.045408382415771484, 0.045408897399902344, 0.045381824493408204, 0.04564112091064453, 0.04545516967773437, 0.045778942108154294, 0.04602140808105469, 0.045881343841552735, 0.04551270294189453, 0.04559667205810547, 0.046061569213867185, 0.04599587249755859, 0.04586073684692383, 0.04574031829833984, 0.04593657684326172, 0.04566636657714844, 0.045649921417236325, 0.04569283294677735, 0.04589372634887695, 0.0458342399597168, 0.046025856018066406, 0.04591296005249024, 0.04560281753540039, 0.0455491828918457, 0.04597971343994141, 0.04588972854614258, 0.045887615203857424, 0.04604844665527344, 0.046285633087158204, 0.045991294860839846, 0.045787776947021484, 0.04643635177612305, 0.046353759765625, 0.04625388717651367, 0.04611772918701172, 0.04623155212402344, 0.04600419235229492, 0.04569196701049805, 0.045980640411376957, 0.04604108810424805, 0.04599603271484375, 0.046072864532470705, 0.04645782470703125, 0.04621311950683594, 0.04590959930419922, 0.046085918426513675, 0.04609471893310547, 0.04634854507446289, 0.04627865600585938, 0.046655487060546875, 0.0465428466796875, 0.04619468688964844, 0.04646060943603516, 0.0476767692565918, 0.04568153762817383, 0.04547516632080078, 0.04524099349975586, 0.0453072624206543, 0.045304065704345704, 0.04543280029296875, 0.04537180709838867, 0.045664031982421874, 0.04534908676147461, 0.045581630706787106, 0.04587795257568359, 0.045608959197998046, 0.04568668746948242, 0.04577494430541992, 0.047642623901367184, 0.045568000793457034, 0.04557619094848633, 0.04613119888305664, 0.04613324737548828, 0.04584185409545898, 0.04587168121337891, 0.04617420959472656, 0.04571136093139649, 0.04538777542114258, 0.04552499389648437, 0.04577689743041992, 0.04558028793334961, 0.045451168060302735, 0.04591215896606445, 0.04614144134521484, 0.04566825485229492, 0.0457872314453125, 0.045699073791503904, 0.04627817535400391, 0.04604156875610352, 0.04606771087646484, 0.046331489562988284, 0.046133663177490236, 0.045791233062744144, 0.04615318298339844, 0.046172702789306644, 0.04607590484619141, 0.046166015625, 0.04624310302734375, 0.04594278335571289, 0.04573052978515625, 0.046080001831054686, 0.045930496215820314, 0.04579446411132813, 0.04612956619262695, 0.04642214584350586, 0.046055744171142575, 0.045881343841552735, 0.04616191864013672, 0.046129150390625, 0.051031551361083984, 0.04583065414428711, 0.046454113006591795, 0.046027263641357424, 0.04628086471557617, 0.046679584503173825, 0.04631804656982422, 0.04760985565185547, 0.045843711853027345, 0.04542473602294922, 0.04535337448120117, 0.04533478546142578, 0.045641727447509765, 0.045528190612792965, 0.04550662231445313, 0.045345600128173826, 0.045553665161132816, 0.04584444808959961, 0.04569705581665039, 0.0453570556640625, 0.04576825714111328, 0.04570771026611328, 0.04542771148681641, 0.04547052764892578, 0.045793472290039064, 0.04560892868041992, 0.04575030517578125, 0.04624588775634766, 0.046274559020996094, 0.04583446502685547, 0.045905696868896485, 0.046075393676757816, 0.046027263641357424, 0.045586143493652344, 0.045739551544189454, 0.045781761169433596, 0.045758464813232425, 0.04587833786010742, 0.04599494552612305, 0.04619260787963867, 0.045953056335449216, 0.045632961273193356, 0.04582457733154297, 0.04581689453125, 0.04571139144897461, 0.04582633590698242, 0.046322303771972655, 0.04639744186401367, 0.04642406463623047, 0.04619462585449219, 0.04623308944702149, 0.0462457275390625, 0.046215713500976564, 0.045973377227783205, 0.046451007843017575, 0.04592636871337891, 0.04613488006591797, 0.04595260620117188, 0.04587187194824219, 0.04607171249389649, 0.04655737686157226, 0.046243137359619144, 0.04572639846801758, 0.046104480743408206, 0.04639139175415039, 0.04607078552246094, 0.04604969787597656, 0.046559009552001956, 0.04642636871337891, 0.046088768005371095, 0.04826726531982422, 0.046018558502197264, 0.04553097534179688, 0.04558454513549805, 0.04548774337768555, 0.045502368927001956, 0.04547836685180664, 0.04535849761962891, 0.045361759185791016, 0.0458403205871582, 0.046053375244140625, 0.04550867080688477, 0.0454076156616211, 0.04561967849731445, 0.045613216400146483, 0.045620384216308596, 0.04608041763305664, 0.04619308853149414, 0.04588544082641602, 0.04571468734741211, 0.04590464019775391, 0.04595507049560547, 0.045795135498046875, 0.04605152130126953, 0.04608319854736328, 0.04577507019042969, 0.045974174499511716, 0.04591820907592774, 0.04609638214111328, 0.0458994255065918, 0.04618668746948242, 0.045959327697753904, 0.046635009765625, 0.04586905670166016, 0.046172000885009765, 0.046100639343261716, 0.045991966247558594, 0.04642736053466797, 0.046398208618164065, 0.04608201599121094, 0.04621315383911133, 0.046737407684326174, 0.04645657730102539, 0.04623715209960937, 0.046813983917236325, 0.04640937423706055, 0.046031200408935546, 0.04600822448730469, 0.04632175827026367, 0.045948928833007815, 0.04604240036010742, 0.0462322883605957, 0.04636604690551758, 0.04597407913208008, 0.04606777572631836, 0.04650601577758789, 0.04634764862060547, 0.04635033416748047, 0.046305919647216795, 0.04639334487915039, 0.04592575836181641, 0.04651264190673828, 0.04666518402099609, 0.04754022216796875, 0.04563455963134765, 0.04548505783081055, 0.0454917106628418, 0.04547945785522461, 0.04577788925170898, 0.04575616073608398, 0.045547775268554684, 0.04549222564697265, 0.0455211181640625, 0.04570627212524414, 0.045818622589111326, 0.045467647552490234, 0.045854400634765625, 0.046047809600830075, 0.04601625442504883, 0.04553318405151367, 0.045608959197998046, 0.04586700820922852, 0.04674969482421875, 0.045879295349121094, 0.046386688232421876, 0.04607436752319336, 0.04580352020263672, 0.045804862976074216, 0.04604108810424805, 0.04584310531616211, 0.04568476867675781, 0.0458520622253418, 0.045994239807128905, 0.04581411361694336, 0.04586038589477539, 0.04588592147827148, 0.04591971206665039, 0.046271041870117186, 0.04609334564208984, 0.045996288299560546, 0.04597155380249023, 0.04593241500854492, 0.046379104614257816, 0.046174846649169925, 0.04615292739868164, 0.04612982559204101, 0.046399616241455076, 0.04600384140014648, 0.0462790412902832, 0.04637251281738281, 0.04617574310302734, 0.04578335952758789, 0.046208831787109376, 0.046868480682373044, 0.04607049560546875, 0.0462213134765625, 0.04656550216674805, 0.04621094512939453, 0.04628070449829102, 0.046630912780761716, 0.046491649627685545, 0.04604108810424805, 0.046402721405029296, 0.04808790588378906, 0.0461677131652832, 0.04619094467163086, 0.048326560974121094, 0.04615350341796875, 0.04540700912475586, 0.045502464294433595, 0.04554956817626953, 0.04542364883422852, 0.04556284713745117, 0.045674495697021485, 0.04561507034301758, 0.045639713287353514, 0.04572732925415039, 0.045782913208007814, 0.04564022445678711, 0.04587724685668945, 0.0458158073425293, 0.04605715179443359, 0.04581148910522461, 0.04557206344604492, 0.04582457733154297, 0.046219264984130856, 0.0462432975769043, 0.046273056030273436, 0.04621516799926758, 0.045995903015136716, 0.045609088897705076, 0.045758464813232425, 0.04605731201171875, 0.046086273193359374, 0.04591823959350586, 0.046102527618408204, 0.04602848052978516, 0.04578700637817383, 0.04577030563354492, 0.046050174713134766, 0.046186496734619144, 0.04598723220825195, 0.04610518264770508, 0.04619468688964844, 0.04607385635375977, 0.0457704963684082, 0.046429569244384766, 0.04631961441040039, 0.04608499145507813, 0.046516223907470705, 0.046450687408447267, 0.04611072158813476, 0.04606723022460937, 0.046623199462890626, 0.04630886459350586, 0.04600467300415039, 0.04629100799560547, 0.04641340637207031, 0.04620719909667969, 0.04605324935913086, 0.046586177825927735, 0.046177471160888675, 0.04606854248046875, 0.04635644912719727, 0.04649526214599609, 0.04709215927124023, 0.04630323028564453, 0.04666716766357422, 0.046500511169433593, 0.048664161682128906, 0.04616268920898438, 0.04554342269897461, 0.045622848510742185, 0.04557564926147461, 0.04555465698242187, 0.045469696044921876, 0.04561920166015625, 0.045744129180908207, 0.04565599822998047, 0.04550380706787109, 0.04556057739257813, 0.04578012847900391, 0.04592316818237305, 0.04564582443237305, 0.04572774505615235, 0.04601212692260742, 0.0462022705078125, 0.0456835823059082, 0.04575151824951172, 0.0462050895690918, 0.046080097198486325, 0.04606211090087891, 0.04590182495117188, 0.04605747222900391, 0.045794879913330075, 0.045883617401123046, 0.04578531265258789, 0.04580147171020508, 0.04584447860717773, 0.04599356842041016, 0.04579369735717773, 0.04575641632080078, 0.04589363098144531, 0.04630527877807617, 0.04642828750610352, 0.045766399383544924, 0.04640719985961914, 0.046184574127197266, 0.04620540618896484, 0.046403392791748044, 0.046452449798583983, 0.04625388717651367, 0.04616873550415039, 0.04640153503417969, 0.046408897399902345, 0.04625654220581055, 0.04629955291748047, 0.04640752029418945, 0.04627062225341797, 0.045969406127929685, 0.04622073745727539, 0.046031425476074216, 0.04606083297729492, 0.046193374633789065, 0.04642201614379883, 0.04619468688964844, 0.04597350311279297, 0.04652646255493164, 0.046421630859375, 0.046174591064453124, 0.04635647964477539, 0.04659366226196289, 0.04784729766845703, 0.04560108947753906, 0.04543315124511719, 0.04547353744506836, 0.045494529724121095, 0.04536284637451172, 0.04541680145263672, 0.04559667205810547, 0.04592639923095703, 0.04547292709350586, 0.04556067276000977, 0.04597715377807617, 0.04582649612426758, 0.045484031677246094, 0.04579043197631836, 0.04594348907470703, 0.04592403030395508, 0.046098846435546875, 0.046067073822021486, 0.04592089462280274, 0.046607521057128905, 0.04652937698364258, 0.046862335205078126, 0.04570111846923828, 0.04571468734741211, 0.04587324905395508, 0.04573865509033203, 0.04565107345581055, 0.04623401641845703, 0.04623203277587891, 0.046059680938720704, 0.045817440032958984, 0.04623545455932617, 0.046029247283935544, 0.04571446228027344, 0.046000190734863285, 0.046359455108642575, 0.04614553451538086, 0.04608201599121094, 0.046241825103759765, 0.04652617645263672, 0.046400833129882815, 0.04637993621826172, 0.04629715347290039, 0.04615507125854492, 0.04620544052124023, 0.04630268859863281, 0.045988574981689456, 0.0458158073425293, 0.04615734481811523, 0.04625603103637695, 0.04604150390625, 0.04617763137817383, 0.046475616455078125, 0.04630166244506836, 0.046002174377441404, 0.046516223907470705, 0.046417278289794923, 0.04642675018310547, 0.04668415832519531, 0.046635009765625, 0.0464600944519043, 0.0466376953125]",tokens/s,21.73913871902121,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 56307 has 14.69 GiB memory in use. Of the allocated memory 14.29 GiB is allocated by PyTorch, and 313.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.746688,556.72832,0.0,178.25792,176.52224,s,1,7.51967626953125,7.51967626953125,0.0,7.51967626953125,7.51967626953125,7.51967626953125,7.51967626953125,[7.51967626953125],,kWh,1.982594503750761e-05,2.179734761547903e-06,5.404726546004435e-06,2.741040634505995e-05,,MB,1194.688512,669.974528,0.0,262.144,221.118976,s,10,0.23773827171325684,0.023773827171325684,0.00024283595479606624,0.02371673583984375,0.024118610572814943,0.02418594560623169,0.02423981363296509,"[0.02391481590270996, 0.023476768493652343, 0.024103647232055665, 0.023773056030273437, 0.02366041564941406, 0.024253280639648437, 0.023616479873657226, 0.023656160354614257, 0.02347360038757324, 0.023810047149658203]",tokens/s,10768.1442350506,kWh,6.932403663103739e-07,7.645196220950019e-08,4.059873926176255e-07,1.1756797211374995e-06,tokens/kWh,217746377.17856833,MB,1228.357632,684.654592,0.0,276.824064,221.271552,s,10,13.6162353515625,1.36162353515625,0.004976171477569547,1.3627197875976562,1.3672108764648436,1.369263397216797,1.3709054138183594,"[1.362764404296875, 1.36269970703125, 1.37131591796875, 1.3627398681640626, 1.36317236328125, 1.3562159423828124, 1.355752197265625, 1.3607286376953125, 1.354091552734375, 1.3667547607421875]",tokens/s,46.2682954380416,kWh,3.9575149517440624e-05,4.3647022323524135e-06,2.0418029266581237e-05,6.435788101637427e-05,tokens/kWh,978901.0919108914,,s,630,13.611357091903685,0.021605328717307436,0.0004793603854581813,0.02150827217102051,0.021850230407714846,0.022027892112731935,0.024121036033630375,"[0.02136659240722656, 0.0215164794921875, 0.021698240280151368, 0.021655712127685547, 0.02155523109436035, 0.02168614387512207, 0.02161235237121582, 0.021825983047485353, 0.021884639739990233, 0.02169878387451172, 0.02162518310546875, 0.021489376068115233, 0.021884767532348633, 0.021545440673828124, 0.021544639587402343, 0.021530336380004882, 0.021551551818847655, 0.021600095748901368, 0.021439872741699218, 0.021392000198364257, 0.02143833541870117, 0.021472639083862304, 0.024135967254638672, 0.02312588882446289, 0.021810911178588868, 0.02173776054382324, 0.021477535247802736, 0.02143440055847168, 0.02140985679626465, 0.02182588768005371, 0.021575679779052736, 0.02159187126159668, 0.02151030349731445, 0.021499935150146483, 0.021395423889160155, 0.02136476707458496, 0.021339935302734377, 0.021369056701660158, 0.021441631317138672, 0.02128700828552246, 0.021468032836914064, 0.021498016357421875, 0.021790655136108398, 0.021624223709106445, 0.02154902458190918, 0.021496288299560545, 0.021591232299804686, 0.021506591796875, 0.021518943786621093, 0.021501056671142577, 0.02148726463317871, 0.021643680572509767, 0.02153558349609375, 0.021646175384521484, 0.021754688262939453, 0.02164735984802246, 0.021940448760986327, 0.021501056671142577, 0.021501663208007813, 0.021425088882446288, 0.021379072189331053, 0.021536991119384764, 0.021421056747436523, 0.02101702308654785, 0.021399999618530275, 0.021557247161865235, 0.021405696868896484, 0.021420320510864257, 0.02151340866088867, 0.02134003257751465, 0.021434240341186524, 0.021394208908081056, 0.02137276840209961, 0.02144476890563965, 0.021656831741333007, 0.02145254325866699, 0.021547552108764648, 0.021483327865600584, 0.021494495391845704, 0.02148086357116699, 0.021369279861450194, 0.021417760848999025, 0.02136300849914551, 0.021350400924682617, 0.021356800079345702, 0.021284704208374024, 0.021188512802124023, 0.021666944503784178, 0.02145552062988281, 0.021422367095947265, 0.021528703689575195, 0.021296384811401368, 0.021418432235717773, 0.02142972755432129, 0.021324447631835938, 0.021401952743530274, 0.021565088272094725, 0.02145020866394043, 0.0215283203125, 0.02165830421447754, 0.021699871063232422, 0.021638336181640624, 0.021610111236572267, 0.02158835220336914, 0.021537471771240234, 0.02157459259033203, 0.021479328155517577, 0.02157708740234375, 0.021626976013183592, 0.021566240310668946, 0.021578655242919922, 0.021658559799194337, 0.02170579147338867, 0.021830528259277344, 0.021774368286132814, 0.022078975677490235, 0.021792512893676758, 0.02187104034423828, 0.021737823486328123, 0.02186240005493164, 0.021937952041625977, 0.021948991775512697, 0.02188047981262207, 0.022009855270385743, 0.024481439590454103, 0.023181695938110352, 0.02150982475280762, 0.02202150344848633, 0.022033119201660158, 0.021868223190307616, 0.02185004806518555, 0.021687904357910157, 0.021854944229125976, 0.022159263610839842, 0.022081920623779297, 0.02169068717956543, 0.02184694480895996, 0.021613311767578126, 0.021553184509277342, 0.02147737693786621, 0.021483776092529296, 0.021738815307617187, 0.02158140754699707, 0.021500736236572265, 0.02176527976989746, 0.02169862365722656, 0.026327936172485352, 0.02262182426452637, 0.021944063186645508, 0.021870687484741212, 0.0217607364654541, 0.021548992156982423, 0.02157548713684082, 0.02147123146057129, 0.02151628875732422, 0.021580127716064452, 0.021536415100097656, 0.02148476791381836, 0.021438880920410155, 0.021467519760131837, 0.021615999221801758, 0.021488256454467773, 0.021593151092529298, 0.021485759735107423, 0.021507135391235353, 0.02160310363769531, 0.02150275230407715, 0.021628480911254883, 0.021714656829833985, 0.02171913528442383, 0.021863487243652342, 0.021599264144897462, 0.021681983947753905, 0.02163603210449219, 0.021661600112915038, 0.021764095306396485, 0.0217391357421875, 0.021729375839233397, 0.02162076759338379, 0.021944000244140626, 0.021716575622558593, 0.021654367446899414, 0.02160985565185547, 0.021559680938720703, 0.02159654426574707, 0.021668895721435547, 0.021605375289916993, 0.021594432830810546, 0.021518335342407227, 0.02150399971008301, 0.021709152221679687, 0.02188902473449707, 0.0218656005859375, 0.021591936111450195, 0.02156835174560547, 0.021468416213989257, 0.021535680770874022, 0.02148054313659668, 0.021408639907836913, 0.02152448081970215, 0.02137843132019043, 0.021545600891113282, 0.021481472015380858, 0.021436447143554686, 0.021485536575317384, 0.02149344062805176, 0.02163539123535156, 0.021420383453369142, 0.021362335205078124, 0.02138047981262207, 0.021594751358032228, 0.02137660789489746, 0.021499391555786132, 0.02147011184692383, 0.022449600219726563, 0.02164588737487793, 0.02155958366394043, 0.02158393669128418, 0.02158729553222656, 0.021549152374267577, 0.021548671722412108, 0.02177699279785156, 0.021866207122802735, 0.021884191513061525, 0.02242252731323242, 0.021835519790649415, 0.022227264404296874, 0.02184185600280762, 0.021837215423583984, 0.021719776153564452, 0.02168409538269043, 0.021589824676513672, 0.021589599609375, 0.02159881591796875, 0.02153494453430176, 0.02167532730102539, 0.021672351837158203, 0.02173139190673828, 0.021700159072875976, 0.021606847763061522, 0.021892927169799806, 0.021809152603149414, 0.021710304260253905, 0.021492576599121092, 0.021417856216430664, 0.021518335342407227, 0.021376224517822267, 0.021529151916503907, 0.02148111915588379, 0.021357120513916014, 0.02145631980895996, 0.02141417694091797, 0.021029344558715822, 0.021476415634155272, 0.021285823822021484, 0.02128281593322754, 0.021509632110595703, 0.021264352798461915, 0.02138175964355469, 0.02148566436767578, 0.021392351150512696, 0.02136150360107422, 0.021458879470825195, 0.021374912261962892, 0.021343360900878905, 0.02145417594909668, 0.021397151947021485, 0.021501951217651367, 0.02157535934448242, 0.021520544052124023, 0.02142838478088379, 0.02166988754272461, 0.021341663360595703, 0.021469728469848633, 0.021317632675170898, 0.021250272750854494, 0.021408544540405274, 0.021305856704711915, 0.02129929542541504, 0.021445024490356446, 0.021794815063476563, 0.0216494083404541, 0.021599552154541016, 0.021498559951782226, 0.024669408798217773, 0.027470624923706055, 0.021816768646240235, 0.021571647644042968, 0.021488128662109376, 0.021507999420166016, 0.021315744400024414, 0.021516223907470704, 0.021454240798950194, 0.021448448181152345, 0.0213832950592041, 0.02142313575744629, 0.02150592041015625, 0.021527679443359374, 0.021436927795410156, 0.021508544921875, 0.02133286476135254, 0.021478271484375, 0.021581823348999024, 0.021728256225585937, 0.021507200241088868, 0.021416896820068358, 0.021629823684692382, 0.022895904541015626, 0.021436256408691408, 0.0215031681060791, 0.02138083267211914, 0.021462944030761717, 0.0218789119720459, 0.021342496871948242, 0.021495231628417967, 0.021202688217163087, 0.021759647369384766, 0.021487871170043946, 0.021495168685913085, 0.022068191528320312, 0.021354496002197267, 0.021526239395141603, 0.021440799713134766, 0.02154195213317871, 0.021497119903564454, 0.021533632278442384, 0.02234822463989258, 0.021639455795288087, 0.02178691291809082, 0.02157334327697754, 0.021510143280029297, 0.021716991424560548, 0.021518239974975584, 0.021790271759033204, 0.02147724723815918, 0.021378944396972657, 0.02156982421875, 0.02136729621887207, 0.021319583892822267, 0.021343360900878905, 0.02132476806640625, 0.021348352432250976, 0.02162713623046875, 0.02153011131286621, 0.02192745590209961, 0.022114368438720704, 0.02157814407348633, 0.021609920501708985, 0.021959327697753907, 0.02147164726257324, 0.021321216583251954, 0.021495071411132813, 0.021558528900146486, 0.021425888061523436, 0.02142348861694336, 0.02158451271057129, 0.021415775299072265, 0.021557407379150392, 0.0214052791595459, 0.021579296112060546, 0.021452959060668946, 0.02134294319152832, 0.02145894432067871, 0.02146918487548828, 0.02140166473388672, 0.02139743995666504, 0.02135206413269043, 0.021328256607055663, 0.02135990333557129, 0.021315296173095702, 0.021390335083007812, 0.02136649513244629, 0.021463264465332033, 0.021280096054077147, 0.02136899185180664, 0.021541439056396484, 0.021438144683837892, 0.021502016067504882, 0.021125503540039062, 0.021542911529541017, 0.02150716781616211, 0.021465856552124022, 0.021705984115600586, 0.02132467269897461, 0.021297536849975585, 0.021431615829467773, 0.021381471633911135, 0.021415327072143556, 0.021660255432128905, 0.021360639572143555, 0.023858240127563476, 0.02167683219909668, 0.021663904190063477, 0.021458623886108398, 0.021592639923095704, 0.021266176223754884, 0.021278976440429687, 0.021657344818115234, 0.02128691291809082, 0.021370880126953123, 0.0214932804107666, 0.021445087432861328, 0.02157382392883301, 0.02152364730834961, 0.021480064392089843, 0.021593791961669922, 0.021950784683227538, 0.021729280471801758, 0.021604352951049805, 0.021302911758422853, 0.021516063690185546, 0.02138332748413086, 0.021313472747802733, 0.021348543167114258, 0.021307712554931642, 0.021313472747802733, 0.021487327575683595, 0.02147158432006836, 0.021497856140136717, 0.021367040634155274, 0.0216428165435791, 0.021883327484130858, 0.02157360076904297, 0.021518112182617188, 0.0214866886138916, 0.021397600173950194, 0.021535615921020507, 0.021375200271606446, 0.0216343994140625, 0.021422464370727538, 0.02157155227661133, 0.021391519546508787, 0.02129724884033203, 0.021313119888305664, 0.021440704345703124, 0.02131065559387207, 0.02140243148803711, 0.021535072326660156, 0.021449472427368162, 0.021588895797729494, 0.021469024658203124, 0.02101139259338379, 0.02142790412902832, 0.021323455810546874, 0.021323360443115235, 0.021256927490234376, 0.02135590362548828, 0.02130803108215332, 0.021661088943481444, 0.02154966354370117, 0.021672191619873046, 0.022133663177490236, 0.021733631134033204, 0.021709535598754885, 0.021573087692260743, 0.021472799301147462, 0.021500543594360353, 0.021590272903442384, 0.021389312744140625, 0.021393312454223632, 0.021434783935546875, 0.02144611167907715, 0.021376480102539064, 0.02150271987915039, 0.02147929573059082, 0.021456192016601563, 0.02157859230041504, 0.021972959518432617, 0.02150399971008301, 0.021497856140136717, 0.02151628875732422, 0.021491519927978514, 0.021552608489990233, 0.02160099220275879, 0.0216944637298584, 0.02154521560668945, 0.02156723213195801, 0.02408448028564453, 0.022454208374023437, 0.021556480407714844, 0.021659711837768554, 0.02144767951965332, 0.02155084800720215, 0.021359615325927735, 0.021895647048950195, 0.021415775299072265, 0.02160268783569336, 0.021319999694824218, 0.021606559753417968, 0.021476831436157227, 0.02144732856750488, 0.021390047073364258, 0.021418495178222655, 0.02194175910949707, 0.021502880096435546, 0.02160643196105957, 0.021497983932495118, 0.02175993537902832, 0.021633056640625, 0.021353471755981446, 0.022502368927001953, 0.021356544494628905, 0.021319679260253906, 0.02150297546386719, 0.020975616455078124, 0.021389312744140625, 0.021456895828247072, 0.02142646408081055, 0.021393375396728517, 0.021432096481323243, 0.02130940818786621, 0.021851871490478517, 0.021533088684082033, 0.021405088424682618, 0.021502111434936525, 0.021416255950927734, 0.021691936492919922, 0.021596160888671875, 0.02161097526550293, 0.021569440841674805, 0.021600351333618165, 0.021448703765869142, 0.021496095657348634, 0.02140105628967285, 0.021399808883666993, 0.021622783660888673, 0.02129532814025879, 0.02148534393310547, 0.021506240844726562, 0.021433759689331054, 0.021508512496948243, 0.021340063095092773, 0.02126857566833496, 0.02141900825500488, 0.02148044776916504, 0.021336063385009766, 0.021426368713378906, 0.02143008041381836, 0.021508031845092774, 0.021411455154418946, 0.021389759063720704, 0.021316736221313477, 0.02183772850036621, 0.02150499153137207, 0.02183286476135254, 0.021707136154174803, 0.02158665657043457, 0.021503744125366212, 0.021475008010864258, 0.021441919326782227, 0.021293760299682617, 0.021324031829833983, 0.021388736724853516, 0.02142880058288574, 0.021581024169921876, 0.02160310363769531, 0.021423360824584962, 0.021594175338745115, 0.021502784729003906, 0.021476703643798827, 0.021461536407470703, 0.021444480895996095, 0.021692768096923828, 0.02169219207763672, 0.021501056671142577, 0.02172198486328125, 0.02150320053100586, 0.0217126407623291, 0.021554624557495117, 0.02150662422180176, 0.02162124824523926, 0.02140310478210449, 0.021394880294799804, 0.02128700828552246, 0.02142220878601074, 0.021569856643676756, 0.021457216262817384, 0.021395807266235353, 0.02435158348083496, 0.02265769577026367, 0.02180009651184082, 0.021558368682861328, 0.02162063980102539, 0.02207539176940918, 0.02153696060180664, 0.021947391510009767, 0.021596128463745118, 0.021631103515625, 0.022167583465576172, 0.02152230453491211, 0.02148249626159668, 0.021597343444824217, 0.021512128829956054, 0.02139155197143555, 0.021371423721313478, 0.02148761558532715, 0.02141209602355957, 0.021384960174560548, 0.021474720001220703, 0.021643871307373046, 0.021642751693725586, 0.02240153694152832, 0.024633472442626952, 0.02183359909057617, 0.02170787239074707, 0.021553056716918945, 0.021473344802856446, 0.02142300796508789, 0.02148464012145996, 0.021558399200439452, 0.022250335693359376, 0.021557600021362304, 0.02152822494506836, 0.021601408004760743, 0.021382335662841798, 0.021621376037597655, 0.021519968032836914, 0.021360960006713867, 0.02138912010192871, 0.021373247146606444, 0.0214835205078125, 0.021461183547973633, 0.021361536026000976, 0.02125257682800293, 0.021729759216308594, 0.022515615463256835, 0.021492000579833984, 0.021348159790039064, 0.0212739200592041, 0.021522495269775392]",tokens/s,46.28487782270711,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1141.346304,1113.45664,0.0,710.934528,695.312896,s,1,8.467751953125,8.467751953125,0.0,8.467751953125,8.467751953125,8.467751953125,8.467751953125,[8.467751953125],,kWh,3.264550493749615e-05,3.5938141894391103e-06,9.921952382002663e-06,4.6161271508937926e-05,,MB,1532.796928,1444.806656,0.0,1027.60448,994.274816,s,10,0.2503581142425537,0.025035811424255372,0.0004092159056794971,0.024795184135437014,0.02541810321807861,0.02567754793167114,0.025885103702545164,"[0.025360448837280274, 0.02593699264526367, 0.025319583892822267, 0.025353504180908204, 0.02479977607727051, 0.024689855575561522, 0.024636991500854494, 0.02474665641784668, 0.024723712921142577, 0.024790592193603515]",tokens/s,10225.35262236319,kWh,7.314071511250308e-07,8.0611977852469e-08,4.843205263450022e-07,1.2963396553225022e-06,tokens/kWh,197479108.92713726,MB,1566.208,1490.944,0.0,1073.741824,994.277376,s,10,10.751147216796875,1.0751147216796877,0.01380294386801216,1.068537109375,1.0943094970703124,1.0997770629882813,1.1041511157226562,"[1.093094482421875, 1.10524462890625, 1.08628125, 1.0703973388671875, 1.068855224609375, 1.0641353759765626, 1.062273681640625, 1.068218994140625, 1.06585986328125, 1.066786376953125]",tokens/s,58.598397668272085,kWh,3.1149315265541804e-05,3.43527924594161e-06,1.4079663347055186e-05,4.866425785853859e-05,tokens/kWh,1294584.6247801366,,s,630,10.748531547546385,0.017061161186581568,0.00035110411017361966,0.016926639556884768,0.01748899784088135,0.017660950183868408,0.01830529005050659,"[0.016855199813842772, 0.016855039596557618, 0.01688979148864746, 0.016899551391601562, 0.017176671981811522, 0.017150304794311524, 0.01713737678527832, 0.016871776580810547, 0.01685513687133789, 0.01676697540283203, 0.01679974365234375, 0.0169421443939209, 0.016853952407836915, 0.016952735900878906, 0.016896608352661133, 0.01704550361633301, 0.01690332794189453, 0.01684566307067871, 0.017403583526611328, 0.017323423385620117, 0.017830591201782226, 0.01711110305786133, 0.017269119262695313, 0.017153568267822265, 0.01739299201965332, 0.01720003128051758, 0.01711087989807129, 0.017071712493896486, 0.017047615051269532, 0.01695795249938965, 0.017076192855834962, 0.01724166488647461, 0.01722844886779785, 0.019212095260620118, 0.019685375213623048, 0.017350656509399414, 0.01746089553833008, 0.017459808349609376, 0.0172992000579834, 0.017411136627197267, 0.01736390495300293, 0.01735206413269043, 0.017347200393676758, 0.017489919662475584, 0.017450624465942383, 0.017252735137939453, 0.017145599365234375, 0.017381887435913086, 0.017482656478881836, 0.017462080001831054, 0.01740188789367676, 0.017423519134521483, 0.017343263626098632, 0.01759187126159668, 0.017844991683959963, 0.017530336380004882, 0.017492031097412108, 0.018155744552612305, 0.017738784790039062, 0.018187231063842773, 0.018307071685791015, 0.017391616821289063, 0.017667871475219726, 0.017345727920532225, 0.01766476821899414, 0.01748521614074707, 0.01750614356994629, 0.017567712783813475, 0.017488895416259767, 0.017663135528564453, 0.01753379249572754, 0.0174653434753418, 0.017534975051879884, 0.017580032348632812, 0.01766115188598633, 0.017660703659057617, 0.017723392486572266, 0.01755926322937012, 0.017426719665527345, 0.01742233657836914, 0.017358144760131835, 0.01738115119934082, 0.01739049530029297, 0.017573888778686524, 0.017974880218505858, 0.01760041618347168, 0.017742336273193358, 0.01745715141296387, 0.017496063232421876, 0.017510400772094727, 0.01741619110107422, 0.01744476890563965, 0.017882976531982422, 0.01750601577758789, 0.017602304458618163, 0.01761907196044922, 0.017550016403198244, 0.0175863037109375, 0.017747583389282225, 0.018067680358886718, 0.017431583404541016, 0.01746019172668457, 0.01741619110107422, 0.017581439971923827, 0.01775446319580078, 0.017428768157958983, 0.017352544784545898, 0.017481887817382812, 0.01744895935058594, 0.017667999267578127, 0.01755660820007324, 0.01761824035644531, 0.01747667121887207, 0.01748643112182617, 0.017466943740844728, 0.017534591674804687, 0.017374015808105468, 0.017438528060913085, 0.017575904846191405, 0.01766371154785156, 0.017312255859375, 0.01777155113220215, 0.017357568740844726, 0.01738355255126953, 0.017384607315063475, 0.017320192337036133, 0.017301279067993162, 0.017390815734863282, 0.017375648498535155, 0.017379039764404296, 0.01749900817871094, 0.01771321678161621, 0.017375455856323243, 0.017469152450561524, 0.017426431655883787, 0.01744895935058594, 0.017557504653930665, 0.017485824584960938, 0.01738163185119629, 0.017638847351074217, 0.01747999954223633, 0.01743667221069336, 0.01744211196899414, 0.017328832626342775, 0.01735024070739746, 0.017344352722167968, 0.017324607849121095, 0.017198400497436525, 0.017183584213256838, 0.017165407180786133, 0.0171015682220459, 0.01715814399719238, 0.017268224716186522, 0.017125791549682617, 0.01704473686218262, 0.017072992324829103, 0.017125247955322265, 0.017082496643066405, 0.017067455291748048, 0.016990175247192384, 0.017245983123779295, 0.01685759925842285, 0.017075935363769532, 0.016997215270996093, 0.01703500747680664, 0.017373472213745116, 0.01700383949279785, 0.017016544342041015, 0.016932863235473633, 0.016955999374389647, 0.017303647994995116, 0.016990207672119142, 0.016969568252563478, 0.016941503524780275, 0.016912031173706054, 0.01700841522216797, 0.016971967697143556, 0.017008735656738282, 0.01699635124206543, 0.016964704513549804, 0.01701878356933594, 0.01733737564086914, 0.018403295516967774, 0.018300928115844727, 0.017092607498168946, 0.017036991119384767, 0.0169736328125, 0.01724457550048828, 0.017315935134887696, 0.016943103790283204, 0.016959487915039064, 0.01697737693786621, 0.016990304946899414, 0.016922399520874022, 0.01683523178100586, 0.016854272842407227, 0.017158912658691405, 0.017007680892944337, 0.016756767272949218, 0.016827295303344727, 0.01673344039916992, 0.01685171127319336, 0.016850175857543944, 0.01856198310852051, 0.016906047821044923, 0.016895999908447267, 0.016950847625732422, 0.017400255203247072, 0.017168384552001953, 0.017217279434204102, 0.017144159317016603, 0.017485824584960938, 0.01775984001159668, 0.01706118392944336, 0.017460224151611328, 0.01703321647644043, 0.017000640869140625, 0.01700422477722168, 0.01683673667907715, 0.016926719665527345, 0.016870880126953126, 0.016926464080810548, 0.01702889633178711, 0.0169355525970459, 0.016832895278930664, 0.016802976608276367, 0.01684886360168457, 0.016974720001220703, 0.016825664520263673, 0.01698476791381836, 0.016871423721313478, 0.016868896484375, 0.016919008255004885, 0.016859136581420898, 0.016895999908447267, 0.017336320877075196, 0.0169736328125, 0.01691164779663086, 0.016758687973022462, 0.01679667282104492, 0.01685081672668457, 0.016997760772705078, 0.016810752868652343, 0.016867328643798828, 0.01682966423034668, 0.016896799087524415, 0.01684889602661133, 0.016916479110717773, 0.016871423721313478, 0.016788896560668946, 0.01687612724304199, 0.016885408401489256, 0.01686188888549805, 0.016891424179077148, 0.016896480560302733, 0.016910335540771485, 0.01693027114868164, 0.017020448684692383, 0.016857887268066408, 0.016859359741210937, 0.016850944519042968, 0.016842752456665038, 0.016872831344604492, 0.016999040603637695, 0.01676288032531738, 0.01682636833190918, 0.016879615783691407, 0.016922624588012695, 0.016857088088989256, 0.01723391914367676, 0.01733030319213867, 0.017876863479614258, 0.01710233688354492, 0.016982751846313475, 0.016891679763793944, 0.017137664794921875, 0.017047552108764647, 0.01700979232788086, 0.017017728805541994, 0.01690611267089844, 0.017254112243652343, 0.017103008270263672, 0.017037567138671876, 0.016940095901489257, 0.016923168182373046, 0.01693120002746582, 0.016931903839111327, 0.016952287673950194, 0.01687299156188965, 0.01699385643005371, 0.016944032669067383, 0.016863231658935548, 0.016945119857788085, 0.0171376953125, 0.01694483184814453, 0.01696099281311035, 0.016779808044433592, 0.01693302345275879, 0.01694870376586914, 0.017012639999389647, 0.016950048446655274, 0.016922624588012695, 0.01701888084411621, 0.016916479110717773, 0.016869375228881836, 0.01697977638244629, 0.017125568389892577, 0.016840063095092773, 0.01682022476196289, 0.01683475112915039, 0.016871456146240235, 0.016853408813476564, 0.016817983627319337, 0.016905887603759766, 0.01689654350280762, 0.01695155143737793, 0.01684480094909668, 0.016855167388916015, 0.01691334342956543, 0.016847295761108397, 0.016789152145385743, 0.017017696380615236, 0.016869375228881836, 0.01686300849914551, 0.016910560607910158, 0.016854751586914064, 0.016875072479248045, 0.016964096069335938, 0.017286464691162108, 0.01696246337890625, 0.01721139144897461, 0.017090560913085938, 0.01690403175354004, 0.017071680068969728, 0.016944896697998046, 0.016816640853881838, 0.016816287994384765, 0.016887615203857422, 0.016950847625732422, 0.017017663955688475, 0.01724367904663086, 0.016845216751098634, 0.016842815399169923, 0.016923839569091798, 0.016881631851196288, 0.01695420837402344, 0.016746496200561522, 0.01671731185913086, 0.01676860809326172, 0.01677199935913086, 0.016885759353637696, 0.017013919830322265, 0.016837472915649413, 0.01696767997741699, 0.0167587833404541, 0.016866943359375, 0.01676643180847168, 0.016859840393066407, 0.016892127990722657, 0.016922624588012695, 0.016768960952758788, 0.01679088020324707, 0.016800544738769532, 0.016969663619995117, 0.01681612777709961, 0.01682841682434082, 0.016775264739990234, 0.01684390449523926, 0.01679439926147461, 0.016855039596557618, 0.01677926445007324, 0.016764928817749023, 0.01679769515991211, 0.016853311538696288, 0.016856767654418944, 0.016881023406982422, 0.01678099250793457, 0.016939136505126955, 0.01679120063781738, 0.016905183792114257, 0.016773120880126953, 0.016744447708129884, 0.01681612777709961, 0.016750175476074217, 0.01667523193359375, 0.01682022476196289, 0.0169385929107666, 0.016779680252075196, 0.01700454330444336, 0.016891904830932617, 0.017031167984008787, 0.017151519775390624, 0.016869855880737306, 0.016926559448242187, 0.016840543746948242, 0.017088832855224608, 0.016815231323242186, 0.01684774398803711, 0.01678745651245117, 0.016824575424194337, 0.0169138240814209, 0.016761184692382813, 0.016842752456665038, 0.016895999908447267, 0.016867263793945313, 0.01674041557312012, 0.01679974365234375, 0.01682431983947754, 0.016844287872314453, 0.017033727645874023, 0.016865280151367186, 0.016942527770996092, 0.016828800201416017, 0.016832704544067382, 0.01679155158996582, 0.017002208709716797, 0.016819839477539063, 0.016783487319946288, 0.01686992073059082, 0.01681407928466797, 0.017012128829956053, 0.017073856353759766, 0.0168637752532959, 0.016863616943359375, 0.016836639404296874, 0.016778816223144533, 0.01685750389099121, 0.016865280151367186, 0.017059200286865233, 0.016789344787597655, 0.016735008239746094, 0.016787647247314453, 0.016877727508544924, 0.016837919235229492, 0.016836000442504884, 0.016776159286499025, 0.016831647872924804, 0.016777631759643554, 0.016760576248168946, 0.016863744735717775, 0.016803199768066407, 0.016853120803833006, 0.01679974365234375, 0.016746496200561522, 0.01692038345336914, 0.016902271270751952, 0.016941120147705078, 0.01675801658630371, 0.016780031204223632, 0.017041088104248047, 0.017161983489990235, 0.017062112808227538, 0.01691007995605469, 0.01716694450378418, 0.017555456161499023, 0.017004127502441405, 0.016769439697265624, 0.016801055908203126, 0.0168056640625, 0.01674336051940918, 0.01683772850036621, 0.017146623611450196, 0.016851104736328126, 0.016805856704711915, 0.016889888763427733, 0.016728063583374024, 0.01680748748779297, 0.017061471939086914, 0.016900640487670898, 0.01697823905944824, 0.01699635124206543, 0.017512287139892578, 0.017412128448486327, 0.016840831756591797, 0.01680121612548828, 0.016846719741821288, 0.01673081588745117, 0.0167586555480957, 0.016785760879516602, 0.016967456817626955, 0.01683660888671875, 0.016876863479614257, 0.016835199356079102, 0.016921920776367186, 0.01680064010620117, 0.016870431900024414, 0.016996543884277345, 0.01686105537414551, 0.01686582374572754, 0.016838016510009764, 0.01687436866760254, 0.016810047149658204, 0.016743616104125978, 0.016773855209350586, 0.01676291275024414, 0.0168222713470459, 0.016887807846069337, 0.016850080490112305, 0.016835424423217775, 0.016846303939819337, 0.017578271865844725, 0.018799200057983398, 0.01704310417175293, 0.01724825668334961, 0.016913152694702147, 0.01688960075378418, 0.016791807174682618, 0.01683046340942383, 0.016900096893310547, 0.01710598373413086, 0.017131967544555662, 0.017230239868164063, 0.018161760330200196, 0.016887807846069337, 0.016881311416625976, 0.01700899124145508, 0.016799936294555663, 0.016721120834350588, 0.016797855377197267, 0.01686163139343262, 0.01701273536682129, 0.016898048400878905, 0.016775136947631837, 0.01685465621948242, 0.01685750389099121, 0.016976991653442384, 0.017888160705566408, 0.01691769599914551, 0.016880128860473635, 0.01678163146972656, 0.016943103790283204, 0.01681350326538086, 0.016764768600463865, 0.016777952194213866, 0.016898048400878905, 0.01670710372924805, 0.01682793617248535, 0.016833471298217773, 0.016697343826293946, 0.01677107238769531, 0.01685852813720703, 0.01681059265136719, 0.016913759231567384, 0.017086944580078124, 0.016773311614990235, 0.01682431983947754, 0.016790912628173827, 0.01740611267089844, 0.016961151123046875, 0.01686000061035156, 0.016818368911743164, 0.0168155517578125, 0.01676736068725586, 0.01679308891296387, 0.016773408889770507, 0.016711904525756837, 0.016809471130371095, 0.016765439987182617, 0.01693391990661621, 0.01694163131713867, 0.017197471618652344, 0.016875232696533203, 0.01695542335510254, 0.016896255493164064, 0.016940095901489257, 0.016786367416381835, 0.016760831832885743, 0.016853408813476564, 0.017078847885131837, 0.01718681526184082, 0.017168384552001953, 0.016977920532226562, 0.01679155158996582, 0.016777215957641603, 0.016819807052612306, 0.016778783798217775, 0.01683087921142578, 0.016666368484497072, 0.017023712158203124, 0.016811904907226564, 0.016867807388305664, 0.016833887100219727, 0.016829824447631835, 0.016890464782714845, 0.016945503234863282, 0.016831520080566407, 0.016865951538085937, 0.016891839981079102, 0.016878976821899414, 0.016740543365478516, 0.016844959259033204, 0.016808416366577147, 0.016730304718017577, 0.016842336654663087, 0.01681884765625, 0.016805631637573242, 0.01681203269958496, 0.01683987236022949, 0.01675516891479492, 0.016799583435058593, 0.01683251190185547, 0.01713808059692383, 0.01681407928466797, 0.016905567169189454, 0.016755456924438476, 0.016774463653564452, 0.016782016754150392, 0.016810239791870116, 0.01944550323486328, 0.017047136306762696, 0.016995872497558594, 0.01698086357116699, 0.01687660789489746, 0.016881696701049803, 0.016868255615234376, 0.016988351821899415, 0.016831872940063476, 0.01680633544921875, 0.017186368942260742, 0.016882240295410158, 0.017147071838378908, 0.016910463333129882, 0.016807840347290038, 0.016984319686889647, 0.016830015182495116, 0.016816991806030274, 0.01680998420715332, 0.017354496002197267, 0.016898303985595702, 0.016955392837524414]",tokens/s,58.612657665205695,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4051.6608,4705.878016,0.0,4303.355904,4034.388992,s,1,11.65558203125,11.65558203125,0.0,11.65558203125,11.65558203125,11.65558203125,11.65558203125,[11.65558203125],,kWh,9.022081215000147e-05,9.941414426795287e-06,2.8033633537996594e-05,0.00012819586011479335,,MB,3955.494912,4764.598272,0.0,4347.396096,4202.697728,s,10,2.0061112823486327,0.20061112823486327,0.0008757677499556778,0.2009489288330078,0.20129297943115235,0.20137267837524414,0.20143643753051757,"[0.19828892517089844, 0.20027593994140624, 0.20004127502441407, 0.20071484375, 0.20091818237304687, 0.2010269775390625, 0.2012752685546875, 0.20145237731933593, 0.20097967529296876, 0.2011378173828125]",tokens/s,1276.1006941763012,kWh,5.867139011999597e-06,6.467650760084298e-07,3.901536454559707e-06,1.0415440542567733e-05,tokens/kWh,24578893.13022644,MB,3959.84896,4764.598272,0.0,4347.396096,4213.085184,s,10,19.486590698242185,1.9486590698242185,0.006817076695750689,1.9476624755859375,1.9562186401367188,1.9587455505371094,1.9607670788574219,"[1.9426688232421876, 1.9524901123046876, 1.93572265625, 1.9473497314453125, 1.952477783203125, 1.9556571044921875, 1.9612724609375, 1.9479752197265625, 1.9445205078125, 1.946456298828125]",tokens/s,32.329924190219174,kWh,5.694843330633368e-05,6.281540090329729e-06,3.764094122384126e-05,0.00010087091462050466,tokens/kWh,624560.6103307167,,s,630,19.484251640319815,0.030927383556063216,0.000449381620963468,0.030843088150024414,0.03126681518554687,0.031476733589172363,0.03337422531127932,"[0.03200569534301758, 0.03116262435913086, 0.03131161689758301, 0.031125343322753907, 0.031064416885375975, 0.03096835136413574, 0.03107807922363281, 0.03094540786743164, 0.030879936218261718, 0.03091983985900879, 0.030939104080200196, 0.030860160827636717, 0.030948896408081055, 0.031225984573364257, 0.03092515182495117, 0.03094118309020996, 0.03095756721496582, 0.030985984802246094, 0.030886112213134767, 0.03085260772705078, 0.030986591339111327, 0.030697664260864257, 0.030646272659301758, 0.030533632278442382, 0.030801919937133788, 0.030742528915405274, 0.030732095718383787, 0.030549856185913087, 0.03055446434020996, 0.03072204780578613, 0.030684511184692384, 0.03059779167175293, 0.030951520919799805, 0.03171718406677246, 0.030801376342773436, 0.030695552825927733, 0.030906879425048828, 0.030629888534545898, 0.030642175674438478, 0.030479360580444335, 0.03058086395263672, 0.03049750328063965, 0.030890144348144532, 0.030785535812377928, 0.030861312866210938, 0.03109619140625, 0.03061619186401367, 0.03060531234741211, 0.030703071594238282, 0.030677536010742187, 0.030471775054931642, 0.030810527801513672, 0.03103539276123047, 0.030670495986938478, 0.030552032470703126, 0.030603744506835937, 0.031057823181152345, 0.030924800872802735, 0.03083263969421387, 0.03060883140563965, 0.03050694465637207, 0.030505023956298827, 0.0305031681060791, 0.03163337516784668, 0.03096086311340332, 0.03074224090576172, 0.030680927276611328, 0.03048464012145996, 0.030620800018310548, 0.030624576568603516, 0.03049478340148926, 0.03078963279724121, 0.030482463836669922, 0.030525407791137694, 0.03042889595031738, 0.030691616058349608, 0.03076300811767578, 0.0308175048828125, 0.03150723266601563, 0.031062015533447264, 0.030901376724243163, 0.030916927337646484, 0.031402591705322266, 0.030934431076049804, 0.031056447982788084, 0.031068159103393556, 0.03136716842651367, 0.031225439071655273, 0.031181215286254883, 0.031006719589233397, 0.031068159103393556, 0.03126681518554687, 0.031178112030029296, 0.031160959243774412, 0.03183600044250488, 0.031201696395874022, 0.03138508796691895, 0.03126611137390137, 0.031179712295532225, 0.030939327239990235, 0.03138131141662598, 0.031129568099975587, 0.031062047958374025, 0.031530847549438475, 0.031121376037597657, 0.031008960723876954, 0.03102044868469238, 0.031009376525878905, 0.03097088050842285, 0.030962688446044922, 0.03136646461486817, 0.03105219268798828, 0.03122204780578613, 0.03108585548400879, 0.031214303970336914, 0.030878751754760743, 0.03078652763366699, 0.03096575927734375, 0.03069935989379883, 0.030801599502563476, 0.030773727416992188, 0.030687231063842774, 0.030670143127441405, 0.03071455955505371, 0.0306680965423584, 0.030634624481201172, 0.032002433776855466, 0.031074111938476562, 0.030771392822265625, 0.03096771240234375, 0.030695520401000976, 0.030636032104492186, 0.03081625556945801, 0.030574464797973634, 0.030518688201904298, 0.030566368103027344, 0.03060950469970703, 0.03039299201965332, 0.030465984344482423, 0.03052547264099121, 0.031186975479125977, 0.030691328048706053, 0.031182111740112303, 0.03117750358581543, 0.03076028823852539, 0.03062588882446289, 0.031041088104248046, 0.030557119369506835, 0.030538848876953125, 0.03051408004760742, 0.030473920822143556, 0.030687551498413086, 0.031155967712402345, 0.030609216690063477, 0.030517696380615234, 0.03061555290222168, 0.030750272750854492, 0.030767551422119142, 0.030955520629882813, 0.030683263778686524, 0.030469087600708007, 0.030422975540161133, 0.031063007354736327, 0.030475296020507813, 0.030551008224487305, 0.03038515281677246, 0.03050704002380371, 0.03048137664794922, 0.030756864547729492, 0.03056822395324707, 0.03043350410461426, 0.03051241683959961, 0.031013599395751955, 0.03097395133972168, 0.030435327529907227, 0.030735456466674804, 0.03100262451171875, 0.03089699172973633, 0.03069718360900879, 0.03057695960998535, 0.030576671600341797, 0.030639839172363282, 0.03070751953125, 0.03047987174987793, 0.031220703125, 0.030748159408569335, 0.03071219253540039, 0.030653600692749024, 0.03068822479248047, 0.031723455429077146, 0.03210246276855469, 0.030714815139770507, 0.03392387390136719, 0.031185152053833008, 0.030910432815551756, 0.030760959625244142, 0.030691072463989257, 0.030654687881469727, 0.03086470413208008, 0.03071820831298828, 0.03053615951538086, 0.030498432159423827, 0.030887807846069336, 0.030871200561523437, 0.03181654357910156, 0.030979200363159178, 0.0306615047454834, 0.030611711502075194, 0.03132572746276856, 0.030527711868286133, 0.030496000289916992, 0.030528255462646484, 0.0327823371887207, 0.030866880416870118, 0.030635583877563478, 0.03073308753967285, 0.030804000854492187, 0.030779264450073243, 0.030957759857177733, 0.03059267234802246, 0.030888416290283202, 0.03080735969543457, 0.030470144271850585, 0.030460447311401368, 0.030388383865356444, 0.030867456436157226, 0.030746240615844727, 0.03099635124206543, 0.03101692771911621, 0.030761503219604493, 0.03058006477355957, 0.030809823989868163, 0.03134553527832031, 0.030662431716918945, 0.03073823928833008, 0.030712352752685548, 0.03069536018371582, 0.03076300811767578, 0.03077280044555664, 0.03071571159362793, 0.03121011161804199, 0.03094937515258789, 0.03057663917541504, 0.030658559799194338, 0.030701568603515625, 0.030728191375732423, 0.030621696472167968, 0.0309303035736084, 0.030556800842285157, 0.0307073917388916, 0.03097222328186035, 0.031147872924804688, 0.032393280029296874, 0.031107135772705078, 0.031096160888671873, 0.031359647750854494, 0.030881792068481444, 0.031522144317626954, 0.030966079711914063, 0.03240924835205078, 0.032917984008789064, 0.031146207809448243, 0.03088585662841797, 0.030738752365112306, 0.030573280334472656, 0.030766080856323243, 0.03059916877746582, 0.030528959274291993, 0.030580671310424804, 0.030517887115478516, 0.03063158416748047, 0.03076131248474121, 0.03057663917541504, 0.03071308708190918, 0.030784479141235353, 0.03116828727722168, 0.03097804832458496, 0.03101900863647461, 0.031174495697021486, 0.031111328125, 0.031040672302246095, 0.03088265609741211, 0.03073967933654785, 0.030845087051391603, 0.030923040390014648, 0.030843103408813476, 0.030802047729492188, 0.030660608291625976, 0.030843072891235352, 0.03101795196533203, 0.030870367050170898, 0.03085312080383301, 0.031268863677978515, 0.03124224090576172, 0.03132620811462403, 0.031178752899169923, 0.030881792068481444, 0.03101081657409668, 0.03103708839416504, 0.03080182456970215, 0.03067740821838379, 0.030842912673950194, 0.030705280303955078, 0.030568832397460936, 0.03138764762878418, 0.03073023986816406, 0.030711807250976563, 0.031035263061523436, 0.03083647918701172, 0.03070604705810547, 0.030785535812377928, 0.031309215545654294, 0.031062847137451173, 0.03098806381225586, 0.030922752380371094, 0.03241001510620117, 0.031244287490844725, 0.03113907241821289, 0.031169279098510742, 0.03404326248168945, 0.03127948760986328, 0.031000831604003905, 0.03092585563659668, 0.030890975952148438, 0.033560577392578124, 0.031041536331176758, 0.030707359313964844, 0.03060361671447754, 0.03069126319885254, 0.030666816711425782, 0.031108224868774414, 0.03143945693969727, 0.031201568603515625, 0.03089788818359375, 0.03059766387939453, 0.03063167953491211, 0.030736383438110353, 0.030660287857055664, 0.030779136657714843, 0.030630464553833007, 0.030600448608398438, 0.03045452880859375, 0.030527488708496094, 0.03061555290222168, 0.031737600326538086, 0.030808319091796876, 0.030666751861572264, 0.031423583984375, 0.0309400634765625, 0.030822431564331055, 0.030606624603271484, 0.030528192520141603, 0.03086729621887207, 0.030895872116088866, 0.030957088470458985, 0.031001472473144533, 0.031079584121704102, 0.030864479064941407, 0.03138659286499024, 0.03121436882019043, 0.03085433578491211, 0.030947904586791992, 0.030918912887573244, 0.03102720069885254, 0.03102899169921875, 0.030961919784545898, 0.03094940757751465, 0.03103331184387207, 0.030963712692260743, 0.03112550354003906, 0.031105024337768555, 0.030946367263793944, 0.03107731246948242, 0.030863359451293947, 0.030902271270751954, 0.031018815994262695, 0.030777536392211913, 0.030843072891235352, 0.03205990219116211, 0.03154668807983398, 0.031111135482788085, 0.03204537582397461, 0.031043455123901366, 0.031099327087402345, 0.030994688034057617, 0.0309901123046875, 0.030771072387695313, 0.030908735275268554, 0.031032703399658204, 0.03130409622192383, 0.031121631622314454, 0.03131391906738281, 0.0311910400390625, 0.031104223251342773, 0.031434879302978516, 0.03126953506469726, 0.031086463928222657, 0.031211231231689455, 0.031198944091796875, 0.031214271545410156, 0.031129600524902344, 0.03124838447570801, 0.03125657653808594, 0.031143936157226562, 0.031157407760620117, 0.031176895141601563, 0.03122243118286133, 0.031190080642700194, 0.031312383651733396, 0.03120787239074707, 0.03123200035095215, 0.031301631927490234, 0.03122364807128906, 0.03126681518554687, 0.03104960060119629, 0.031054111480712892, 0.031748096466064454, 0.03103936004638672, 0.031135520935058593, 0.03114633560180664, 0.031148191452026366, 0.031307615280151364, 0.03112550354003906, 0.031035200119018554, 0.030996671676635744, 0.031086368560791014, 0.03108006477355957, 0.031260608673095706, 0.030764896392822264, 0.030683263778686524, 0.030833343505859374, 0.03078144073486328, 0.030865215301513673, 0.030984575271606446, 0.03096953582763672, 0.03080726432800293, 0.030856096267700195, 0.031074304580688477, 0.03073539161682129, 0.0306309757232666, 0.030713024139404296, 0.031657407760620114, 0.03083900833129883, 0.03074412727355957, 0.030663455963134766, 0.03080396842956543, 0.03078963279724121, 0.030943231582641603, 0.031143936157226562, 0.030930944442749023, 0.030704832077026366, 0.030717855453491212, 0.030636959075927735, 0.030793727874755858, 0.030639839172363282, 0.030639904022216797, 0.030537439346313477, 0.030442079544067382, 0.030757055282592774, 0.030929023742675782, 0.030572416305541993, 0.030682559967041015, 0.030881952285766602, 0.03125103950500488, 0.03076870346069336, 0.030855072021484374, 0.030654495239257812, 0.03058633613586426, 0.03062665557861328, 0.03366844940185547, 0.031205888748168945, 0.030727935791015626, 0.03081667137145996, 0.030571840286254884, 0.030587423324584962, 0.030619808197021484, 0.030504959106445313, 0.030463136672973633, 0.030757152557373046, 0.030634559631347657, 0.03061555290222168, 0.030793727874755858, 0.030807392120361328, 0.030822912216186524, 0.030707263946533205, 0.030868352890014647, 0.03080806350708008, 0.03105353546142578, 0.03078963279724121, 0.03373030471801758, 0.031437055587768555, 0.031322111129760744, 0.03106012725830078, 0.031338336944580075, 0.03114112091064453, 0.031111072540283204, 0.03084988784790039, 0.030838783264160157, 0.030717248916625976, 0.030753471374511718, 0.030677024841308593, 0.030957536697387697, 0.030914623260498045, 0.030869440078735353, 0.03181779289245606, 0.031000448226928712, 0.030957759857177733, 0.030970144271850585, 0.030783424377441405, 0.031090911865234376, 0.03082444763183594, 0.031131488800048828, 0.03061974334716797, 0.030519359588623046, 0.030496448516845704, 0.030887584686279296, 0.03049894332885742, 0.030927391052246095, 0.030468095779418947, 0.030919712066650392, 0.030929792404174806, 0.030875743865966795, 0.03079782485961914, 0.03073148727416992, 0.030628639221191405, 0.030660512924194337, 0.03073846435546875, 0.030943296432495118, 0.030762399673461914, 0.03089468765258789, 0.030905664443969725, 0.03072275161743164, 0.030515199661254884, 0.030709760665893555, 0.03095961570739746, 0.03068838310241699, 0.03057084846496582, 0.03062019157409668, 0.030663808822631835, 0.03081100845336914, 0.030699487686157226, 0.030571935653686523, 0.03058460807800293, 0.03045667266845703, 0.03094633674621582, 0.03062019157409668, 0.03063363265991211, 0.030615583419799804, 0.03062451171875, 0.030623743057250977, 0.03070979118347168, 0.03081388854980469, 0.03085456085205078, 0.0312042236328125, 0.0312801284790039, 0.030923744201660157, 0.032280609130859374, 0.03384630584716797, 0.03086137580871582, 0.0310031681060791, 0.030828927993774412, 0.030631744384765625, 0.030817760467529296, 0.03071788787841797, 0.0307326717376709, 0.030644607543945313, 0.03072140884399414, 0.031653631210327146, 0.03097216033935547, 0.030914560317993164, 0.031098335266113282, 0.03070751953125, 0.03076579284667969, 0.030789567947387696, 0.030625856399536133, 0.0306331844329834, 0.030591455459594727, 0.030531103134155274, 0.03062156867980957, 0.03074742317199707, 0.03122377586364746, 0.030957727432250975, 0.030920543670654298, 0.030845087051391603, 0.03141222381591797, 0.031102975845336913, 0.030592287063598633, 0.03077187156677246, 0.03080179214477539, 0.030773279190063476, 0.030800031661987304, 0.030738271713256837, 0.030791168212890626, 0.03057935905456543, 0.030656511306762696, 0.03078873634338379, 0.030744735717773437, 0.030743263244628907, 0.03078870391845703, 0.030601600646972656, 0.03071017646789551, 0.030875680923461914, 0.030921823501586915, 0.030640384674072266, 0.030728031158447265, 0.03070863914489746, 0.030660608291625976, 0.030633504867553712, 0.030900703430175782, 0.03082428741455078, 0.03093302345275879, 0.03079408073425293, 0.031106847763061524, 0.031089727401733398, 0.03092947196960449, 0.030713951110839844, 0.030683423995971678, 0.03095347213745117, 0.030629888534545898, 0.030958784103393554, 0.030937919616699217, 0.03423231887817383, 0.03131596755981445, 0.031039487838745116, 0.030996480941772462, 0.030838111877441406, 0.03075472068786621, 0.030977888107299803, 0.030767616271972657, 0.03067535972595215]",tokens/s,32.33380535366863,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1474.31424,1326.383104,0.0,947.912704,945.250304,s,1,8.1235908203125,8.1235908203125,0.0,8.1235908203125,8.1235908203125,8.1235908203125,8.1235908203125,[8.1235908203125],,kWh,3.681505969166968e-05,4.053782199671776e-06,1.08444531200097e-05,5.1713295011351154e-05,,MB,1517.285376,1519.321088,0.0,1111.49056,1098.82368,s,10,1.6054995269775392,0.1605499526977539,0.000562217233122397,0.16068774414062498,0.16113861389160156,0.1612920928955078,0.1614148760986328,"[0.15935037231445312, 0.16074298095703124, 0.16031491088867186, 0.16026255798339845, 0.16086659240722656, 0.16110450744628907, 0.16076307678222657, 0.16063250732421874, 0.16001644897460937, 0.16144557189941405]",tokens/s,1594.5193112696659,kWh,4.704023410647971e-06,5.185785808379412e-07,3.1308005646033637e-06,8.353402556089277e-06,tokens/kWh,30646194.563362308,MB,1527.250944,1653.538816,0.0,1245.708288,1164.242432,s,10,88.924287109375,8.8924287109375,0.01463182725578284,8.894529296875,8.90892509765625,8.910288720703125,8.911379619140625,"[8.863833984375, 8.874208984375, 8.8817548828125, 8.88940234375, 8.8925712890625, 8.8964873046875, 8.901982421875, 8.903771484375, 8.9086220703125, 8.91165234375]",tokens/s,7.084678668552195,kWh,0.00026004556043518647,2.8684570792508123e-05,0.00017283075687119642,0.00046156088809889105,tokens/kWh,136493.36766702388,,s,630,88.92091477966304,0.1411443091740684,0.0003321701887789103,0.14115912628173827,0.14155792083740235,0.1416606658935547,0.14184947067260742,"[0.14091314697265625, 0.1400463409423828, 0.14019378662109375, 0.1403412780761719, 0.14071157836914064, 0.14059756469726561, 0.140287841796875, 0.1404704284667969, 0.14042112731933593, 0.14061305236816407, 0.14044627380371094, 0.14091673278808595, 0.14092445373535156, 0.14058134460449218, 0.14066204833984375, 0.140548828125, 0.14014402770996093, 0.14071049499511717, 0.14068080139160155, 0.14068368530273437, 0.14069728088378905, 0.14054237365722655, 0.14061270141601562, 0.14057350158691406, 0.14058090209960938, 0.1405625, 0.1407724151611328, 0.1408070068359375, 0.14080995178222655, 0.14059344482421876, 0.14050918579101562, 0.1408125762939453, 0.14064405822753906, 0.14094857788085938, 0.1414276123046875, 0.14073036193847657, 0.14074406433105469, 0.14078016662597656, 0.14097613525390626, 0.14056646728515626, 0.14067132568359375, 0.14057647705078125, 0.140930908203125, 0.1405853729248047, 0.14074649047851562, 0.14074266052246093, 0.1403574981689453, 0.14065061950683594, 0.14066259765625, 0.1412671356201172, 0.14080819702148437, 0.1402882537841797, 0.14103030395507812, 0.1404550018310547, 0.1405806121826172, 0.1409269714355469, 0.14149609375, 0.14093927001953124, 0.14108905029296875, 0.14070346069335937, 0.14065397644042968, 0.14053817749023437, 0.14121200561523437, 0.14041920471191408, 0.14058505249023437, 0.14048573303222656, 0.14045890808105468, 0.1407631378173828, 0.14065078735351563, 0.14117654418945313, 0.1406402587890625, 0.1406543731689453, 0.14084451293945313, 0.14039251708984374, 0.14085804748535155, 0.14065565490722656, 0.1406164093017578, 0.14096954345703125, 0.14056108093261718, 0.14060304260253906, 0.14066943359375, 0.14078140258789062, 0.14096771240234374, 0.14086781311035157, 0.14094950866699218, 0.14097613525390626, 0.14097613525390626, 0.14083065795898436, 0.14094090270996093, 0.14127740478515624, 0.14087564086914062, 0.14126527404785155, 0.14073849487304688, 0.1407611541748047, 0.1407836151123047, 0.14070950317382813, 0.14091065979003906, 0.1409438018798828, 0.14117808532714843, 0.14086611938476562, 0.14110723876953124, 0.14052120971679688, 0.14061395263671875, 0.14123526000976563, 0.14069036865234374, 0.14108262634277344, 0.1409551696777344, 0.14087590026855468, 0.140957275390625, 0.14098713684082032, 0.14085673522949219, 0.140778076171875, 0.14141439819335938, 0.14121369934082031, 0.14089631652832033, 0.14098220825195312, 0.1410248565673828, 0.1407983703613281, 0.14067916870117186, 0.141046875, 0.14133529663085936, 0.14077967834472657, 0.1410885772705078, 0.14058709716796874, 0.1410274200439453, 0.14074879455566405, 0.1414819793701172, 0.1406320343017578, 0.14080975341796875, 0.1405788116455078, 0.14077548217773436, 0.140767578125, 0.14120150756835936, 0.14111439514160157, 0.14093606567382813, 0.14092707824707032, 0.14038761901855468, 0.14095750427246093, 0.1405572204589844, 0.14129151916503907, 0.14103321838378907, 0.14111318969726563, 0.1406754913330078, 0.14069325256347656, 0.1408350067138672, 0.1406519317626953, 0.14105410766601562, 0.14085699462890625, 0.14122685241699218, 0.14077337646484375, 0.14075820922851562, 0.14098915100097656, 0.1408527069091797, 0.14077719116210938, 0.14111036682128905, 0.14086038208007812, 0.14077772521972656, 0.14123190307617187, 0.1409031982421875, 0.1410068817138672, 0.14092643737792968, 0.14090847778320312, 0.14137834167480468, 0.14119949340820312, 0.14095872497558593, 0.14095021057128906, 0.1410662384033203, 0.14060748291015626, 0.14102857971191407, 0.14121244812011718, 0.14095578002929687, 0.14115213012695313, 0.14081622314453124, 0.14129078674316406, 0.1409135284423828, 0.14096298217773437, 0.1407189483642578, 0.14160211181640625, 0.1412403564453125, 0.14106655883789063, 0.14107887268066407, 0.14084095764160157, 0.14129766845703126, 0.14104098510742188, 0.1412626953125, 0.14095616149902343, 0.14118739318847656, 0.14098588562011719, 0.1411047668457031, 0.14146517944335937, 0.140505126953125, 0.14051954650878906, 0.1408916473388672, 0.14044364929199218, 0.14086749267578125, 0.14098722839355468, 0.14116181945800782, 0.14096646118164063, 0.14073251342773438, 0.1410846710205078, 0.14103453063964844, 0.14096015930175781, 0.14124691772460937, 0.14110508728027343, 0.14089360046386717, 0.1405447998046875, 0.1410396728515625, 0.14141229248046874, 0.14115225219726563, 0.141127685546875, 0.14089010620117187, 0.1409697570800781, 0.14090229797363282, 0.14100691223144532, 0.14112384033203124, 0.140943359375, 0.14119081115722656, 0.14099020385742186, 0.1411037139892578, 0.14079994201660156, 0.14107449340820313, 0.14137962341308594, 0.14098835754394531, 0.14149020385742186, 0.14116455078125, 0.14111485290527342, 0.1410089569091797, 0.141339111328125, 0.14120755004882812, 0.14123622131347657, 0.14131132507324218, 0.141191650390625, 0.14100679016113282, 0.14122575378417968, 0.1411846466064453, 0.14110092163085938, 0.1412843780517578, 0.14137338256835938, 0.14112127685546874, 0.14088943481445312, 0.14117100524902343, 0.14134538269042968, 0.14094744873046874, 0.14173388671875, 0.1417090606689453, 0.1409927978515625, 0.14111946105957032, 0.14147584533691407, 0.14122950744628907, 0.1409974060058594, 0.14135621643066407, 0.14124911499023438, 0.14080812072753907, 0.14106434631347656, 0.14090316772460937, 0.14094137573242188, 0.14078764343261718, 0.14124447631835937, 0.14094540405273437, 0.14107192993164064, 0.14081272888183594, 0.14098629760742187, 0.14104786682128906, 0.14087948608398437, 0.1411075897216797, 0.14107752990722655, 0.14116117858886718, 0.14137779235839842, 0.14069480895996095, 0.14110794067382812, 0.1410867156982422, 0.14105746459960938, 0.1413347473144531, 0.14127040100097657, 0.1412352294921875, 0.14103955078125, 0.14105142211914062, 0.14108694458007812, 0.14104156494140624, 0.1414041290283203, 0.14131033325195314, 0.14109660339355468, 0.14108026123046874, 0.14129379272460937, 0.14114370727539063, 0.14088800048828126, 0.14112380981445313, 0.1410167999267578, 0.14114044189453126, 0.14090614318847655, 0.14132540893554688, 0.14110432434082032, 0.1412960968017578, 0.14134803771972657, 0.14135177612304686, 0.14112477111816407, 0.14128215026855467, 0.14144102478027343, 0.1413396759033203, 0.1413314208984375, 0.14117231750488282, 0.1411895294189453, 0.1410109405517578, 0.1414615020751953, 0.1413253479003906, 0.14125106811523438, 0.1414619903564453, 0.1411438446044922, 0.14117683410644533, 0.14145968627929686, 0.14107606506347656, 0.14098602294921875, 0.14136595153808593, 0.14145738220214843, 0.141125732421875, 0.14125836181640625, 0.14098045349121094, 0.1406304931640625, 0.141155517578125, 0.14097430419921875, 0.1408042297363281, 0.1411931915283203, 0.14117324829101563, 0.14086326599121093, 0.140996826171875, 0.1409129638671875, 0.14099130249023437, 0.14105186462402344, 0.1415013427734375, 0.14101539611816405, 0.14118159484863282, 0.14115927124023436, 0.1410498504638672, 0.14098591613769532, 0.1414351043701172, 0.14136767578125, 0.14105401611328125, 0.14114297485351562, 0.14099737548828126, 0.14109504699707032, 0.14125637817382813, 0.14104386901855467, 0.14164405822753906, 0.1409231719970703, 0.14128099060058594, 0.14119731140136718, 0.1410287628173828, 0.14150083923339843, 0.14118896484375, 0.14140623474121095, 0.1410780487060547, 0.14124520874023438, 0.140906494140625, 0.14107034301757812, 0.14149337768554687, 0.14125555419921876, 0.141459716796875, 0.14138064575195314, 0.14129020690917968, 0.14112358093261718, 0.1411494140625, 0.14130050659179688, 0.14126838684082033, 0.1414068145751953, 0.14136642456054688, 0.141455810546875, 0.1411219482421875, 0.14143618774414063, 0.14122000122070313, 0.14144940185546875, 0.1413504638671875, 0.14166099548339844, 0.14205746459960938, 0.14088552856445313, 0.14123802185058593, 0.14116732788085937, 0.14148809814453125, 0.14142057800292968, 0.14111001586914063, 0.14105331420898437, 0.14058790588378905, 0.14132199096679687, 0.14124986267089842, 0.1409297637939453, 0.14166026306152343, 0.1415801544189453, 0.1410846710205078, 0.14109933471679686, 0.14105158996582032, 0.14124850463867186, 0.1412460174560547, 0.14157868957519532, 0.1410190734863281, 0.14139808654785158, 0.14098136901855468, 0.1408275146484375, 0.14113154602050781, 0.1414126434326172, 0.14120498657226563, 0.14177325439453126, 0.1409249267578125, 0.1407422332763672, 0.141103515625, 0.14103753662109375, 0.14172163391113282, 0.14125465393066405, 0.1415202178955078, 0.14110992431640626, 0.14101475524902343, 0.14113821411132813, 0.14140406799316407, 0.14150869750976564, 0.14212300109863282, 0.14134259033203125, 0.1410952911376953, 0.1411822052001953, 0.1411012725830078, 0.14132048034667968, 0.14128128051757813, 0.141540771484375, 0.1415870361328125, 0.1413233642578125, 0.14125289916992187, 0.14118115234375, 0.1415970916748047, 0.14127513122558594, 0.1414757080078125, 0.14155789184570314, 0.14129971313476564, 0.1411746520996094, 0.1413448944091797, 0.14137753295898436, 0.14144511413574218, 0.14146560668945313, 0.1413017578125, 0.141461181640625, 0.14157037353515625, 0.14140316772460937, 0.14147273254394532, 0.14152499389648437, 0.1415355224609375, 0.14115440368652343, 0.14105430603027344, 0.1410768585205078, 0.14114201354980468, 0.14118911743164062, 0.14103330993652344, 0.14093959045410157, 0.14133859252929687, 0.14137242126464844, 0.1411961669921875, 0.14122125244140624, 0.14143869018554686, 0.1410592041015625, 0.14174595642089843, 0.14114714050292967, 0.1411287078857422, 0.14108038330078124, 0.14141629028320313, 0.14125823974609375, 0.1409291229248047, 0.14135760498046876, 0.1410474853515625, 0.14117123413085939, 0.14119436645507813, 0.14120640563964842, 0.14140165710449218, 0.1414405059814453, 0.14144607543945312, 0.14124832153320313, 0.14134471130371093, 0.14116685485839844, 0.14116864013671876, 0.14139974975585937, 0.14128477478027343, 0.14142477416992189, 0.14146409606933594, 0.14134707641601563, 0.14145542907714845, 0.14159353637695313, 0.14130476379394533, 0.14162358093261718, 0.14142031860351562, 0.14124237060546874, 0.1410867156982422, 0.1413605499267578, 0.14173654174804687, 0.14172808837890624, 0.14161404418945311, 0.14129740905761717, 0.1413477783203125, 0.1413305206298828, 0.14149746704101562, 0.14171420288085937, 0.14129270935058594, 0.1413824920654297, 0.1411788787841797, 0.14122189331054688, 0.14151065063476562, 0.1415146942138672, 0.14180972290039062, 0.14133485412597657, 0.14142022705078125, 0.14140211486816406, 0.14115020751953125, 0.14143775939941405, 0.14173721313476562, 0.14108937072753908, 0.14117056274414064, 0.1409701690673828, 0.14117269897460938, 0.14116358947753907, 0.14133544921875, 0.14150253295898438, 0.14108262634277344, 0.1408184356689453, 0.14107852172851562, 0.14129971313476564, 0.14138674926757813, 0.14119778442382813, 0.14169346618652343, 0.14154339599609375, 0.14124797058105468, 0.1411589813232422, 0.14185664367675782, 0.1412752685546875, 0.14132633972167968, 0.1414607391357422, 0.14117964172363281, 0.141127685546875, 0.14095542907714845, 0.14141658020019532, 0.14147596740722657, 0.14144610595703125, 0.14137855529785157, 0.14123519897460937, 0.14108924865722655, 0.14104835510253907, 0.14133436584472656, 0.1416848907470703, 0.14157005310058593, 0.1415720977783203, 0.1416171569824219, 0.14143043518066406, 0.1413410186767578, 0.14120355224609374, 0.14171720886230468, 0.14160914611816405, 0.1415068817138672, 0.14204444885253906, 0.1414127655029297, 0.1418275146484375, 0.14153298950195312, 0.14173846435546875, 0.14175830078125, 0.14171180725097657, 0.14144447326660156, 0.141304443359375, 0.1413570556640625, 0.14150656127929687, 0.14165196228027344, 0.14159461975097656, 0.14157618713378906, 0.1415265350341797, 0.14148483276367188, 0.14153494262695313, 0.1411744384765625, 0.1412696990966797, 0.14068531799316406, 0.14130989074707032, 0.14100486755371094, 0.14111669921875, 0.14115708923339843, 0.14146931457519532, 0.14154118347167968, 0.14108285522460937, 0.14133436584472656, 0.14112409973144532, 0.14095974731445313, 0.14117263793945312, 0.14198179626464844, 0.14138983154296875, 0.14141007995605467, 0.14138316345214844, 0.14127381896972657, 0.14123008728027345, 0.14127740478515624, 0.14158416748046876, 0.14147789001464844, 0.14150825500488282, 0.14162281799316406, 0.14142886352539064, 0.14126559448242187, 0.14148812866210939, 0.14190751647949218, 0.1417527618408203, 0.14164787292480469, 0.14117225646972656, 0.1413586883544922, 0.14116543579101562, 0.14146488952636718, 0.14179420471191406, 0.14158210754394532, 0.1416592254638672, 0.14134701538085936, 0.14158717346191407, 0.14171340942382812, 0.1413324737548828, 0.14132415771484375, 0.14199411010742188, 0.14161509704589845, 0.14164492797851563, 0.1412657928466797, 0.1414632568359375, 0.1413104705810547, 0.14140599060058595, 0.14164178466796876, 0.14155564880371094, 0.14166639709472656, 0.141671875, 0.1415581817626953, 0.14161517333984375, 0.14151609802246093, 0.14178134155273436, 0.14151641845703125, 0.1415933074951172, 0.1413324737548828, 0.1413324737548828, 0.14165536499023437, 0.1418319091796875]",tokens/s,7.084947355310901,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4300.63616,4676.517888,0.0,4273.995776,4273.234432,s,1,10.7903603515625,10.7903603515625,0.0,10.7903603515625,10.7903603515625,10.7903603515625,10.7903603515625,[10.7903603515625],,kWh,9.58337975874656e-05,1.0563784547395522e-05,2.864113402399615e-05,0.00013503871615885726,,MB,4333.039616,4789.764096,0.0,4372.56192,4345.85856,s,10,2.024333602905273,0.20243336029052733,0.00048565171788214644,0.20238699340820313,0.2029660125732422,0.20312638549804687,0.20325468383789064,"[0.20233203125, 0.20236595153808593, 0.20234002685546876, 0.2012783966064453, 0.20240803527832033, 0.202344482421875, 0.2025451202392578, 0.20293037414550782, 0.20250242614746095, 0.20328675842285157]",tokens/s,1264.61369624352,kWh,5.9826247107139354e-06,6.597784534220591e-07,3.982480510245136e-06,1.062488367438113e-05,tokens/kWh,24094381.439419504,MB,4338.64704,4804.44416,0.0,4387.241984,4345.86112,s,10,24.545524169921872,2.454552416992187,0.010085666419448419,2.4556544189453122,2.4644199218750003,2.466619384765625,2.468378955078125,"[2.46881884765625, 2.451069091796875, 2.46393115234375, 2.448162841796875, 2.4323544921875, 2.462563232421875, 2.45482861328125, 2.4610888671875, 2.446226806640625, 2.456480224609375]",tokens/s,25.666593862028954,kWh,7.096715758678555e-05,7.827595168423304e-06,4.468055842015284e-05,0.00012347531117536174,tokens/kWh,510223.4560115935,,s,630,24.542743213653573,0.03895673525976757,0.0005036473651778079,0.03888315200805664,0.03946882514953613,0.03976927738189698,0.040650614585876474,"[0.039059070587158205, 0.03841654586791992, 0.038413822174072264, 0.03836979293823242, 0.03950495910644531, 0.038489025115966795, 0.03851241683959961, 0.03845142364501953, 0.039102752685546874, 0.03877040100097656, 0.038649696350097656, 0.0423502082824707, 0.03867289733886719, 0.038481441497802735, 0.038461952209472655, 0.03857839965820312, 0.03866559982299805, 0.038913726806640625, 0.03919513702392578, 0.03867875289916992, 0.03863343811035156, 0.03883625411987305, 0.03867238235473633, 0.03875020980834961, 0.038825279235839845, 0.038910655975341796, 0.03917004776000976, 0.03927798461914062, 0.03867708969116211, 0.03875743865966797, 0.03869744110107422, 0.03873020935058594, 0.038899711608886715, 0.03898316955566406, 0.0394183349609375, 0.0390032958984375, 0.03877977752685547, 0.03907174301147461, 0.03906876754760742, 0.038969982147216795, 0.03888304138183594, 0.03956540679931641, 0.03940752029418945, 0.04014137649536133, 0.04013875198364258, 0.041062400817871096, 0.03938483047485351, 0.039911678314208984, 0.03946905517578125, 0.03953868865966797, 0.040024288177490236, 0.03947020721435547, 0.04088902282714844, 0.03936048126220703, 0.03927452850341797, 0.03954684829711914, 0.03904307174682617, 0.03957696151733398, 0.0392765121459961, 0.03974374389648438, 0.039885215759277344, 0.03930316925048828, 0.039757919311523435, 0.03976806259155274, 0.0390854721069336, 0.03946665573120117, 0.03905023956298828, 0.03888326263427734, 0.038860801696777345, 0.038981311798095705, 0.03904774475097656, 0.03875417709350586, 0.03885433578491211, 0.039030593872070314, 0.03901676940917969, 0.0389134407043457, 0.03886252975463867, 0.038994369506835935, 0.03869292831420899, 0.038875614166259766, 0.03924569702148437, 0.03906070327758789, 0.038981887817382814, 0.03897942352294922, 0.03912527847290039, 0.03894636917114258, 0.03884288024902344, 0.0388298225402832, 0.03907398223876953, 0.039158302307128905, 0.03938719940185547, 0.038596256256103516, 0.038604415893554685, 0.038580894470214844, 0.038580223083496096, 0.03864985656738281, 0.038735870361328126, 0.03868467330932617, 0.03871334457397461, 0.038742305755615235, 0.0385863037109375, 0.03846080017089844, 0.03860083389282227, 0.03894054412841797, 0.038742431640625, 0.03882924652099609, 0.03924825668334961, 0.03883494567871094, 0.038717121124267576, 0.03867558288574219, 0.038941150665283204, 0.03896124649047852, 0.038844734191894534, 0.03893657684326172, 0.03884787368774414, 0.038713088989257814, 0.038699039459228514, 0.03966883087158203, 0.038975231170654295, 0.03917180633544922, 0.03877507019042969, 0.03886608123779297, 0.03885302352905273, 0.03886240005493164, 0.038766624450683594, 0.03860566329956055, 0.04250361633300781, 0.03919676971435547, 0.03946879959106445, 0.03847446441650391, 0.038622848510742186, 0.03857651138305664, 0.038885120391845704, 0.03883647918701172, 0.03929087829589844, 0.039551136016845706, 0.038785984039306644, 0.03897158432006836, 0.03869974517822266, 0.03904092788696289, 0.03890560150146485, 0.03869235229492188, 0.03901887893676758, 0.03885062408447266, 0.04055446243286133, 0.039948734283447265, 0.03939638519287109, 0.038773727416992185, 0.03859251022338867, 0.03915776062011719, 0.03885055923461914, 0.0401363525390625, 0.039338336944580075, 0.039276481628417965, 0.038968704223632813, 0.038771007537841795, 0.03865433502197266, 0.0389857292175293, 0.03925382232666016, 0.03867667388916016, 0.03908403015136719, 0.03882393646240234, 0.038971233367919925, 0.039172191619873044, 0.038854721069335935, 0.03873513412475586, 0.03924246215820312, 0.03912704086303711, 0.03901235198974609, 0.039215103149414066, 0.039024639129638675, 0.03890995025634766, 0.03880755233764648, 0.03914956665039063, 0.03889561462402344, 0.03888745498657226, 0.03872457504272461, 0.038851039886474606, 0.03894268798828125, 0.039330368041992185, 0.038968704223632813, 0.03922732925415039, 0.039164257049560544, 0.03888982391357422, 0.03893657684326172, 0.03920588684082031, 0.03974614334106445, 0.039112350463867185, 0.03893267059326172, 0.039605281829833985, 0.03879913711547851, 0.038744064331054685, 0.03871334457397461, 0.038864959716796876, 0.03887478256225586, 0.03891228866577148, 0.03871072006225586, 0.03887366485595703, 0.03868876647949219, 0.03907347106933594, 0.039241825103759766, 0.03884463882446289, 0.038708446502685546, 0.03863017654418945, 0.03861708831787109, 0.03865388870239258, 0.03887491226196289, 0.03924563217163086, 0.039042911529541015, 0.03915840148925781, 0.03875020980834961, 0.03878297424316406, 0.03872153472900391, 0.038830078125, 0.038989952087402344, 0.03963011169433594, 0.03897161483764648, 0.03857385635375977, 0.038859359741210936, 0.03888115310668945, 0.03889302444458008, 0.03888387298583985, 0.0388070068359375, 0.03874889755249023, 0.03879679870605469, 0.03911702346801758, 0.039274078369140625, 0.038959232330322266, 0.03876470565795898, 0.03887295913696289, 0.03866172790527344, 0.038624126434326174, 0.03955916976928711, 0.038983585357666016, 0.03845439910888672, 0.03855459213256836, 0.04161648178100586, 0.039074718475341795, 0.038391422271728516, 0.038473567962646484, 0.03855926513671875, 0.038542335510253906, 0.03846736145019531, 0.03825084686279297, 0.03939315032958984, 0.03850239944458008, 0.03854438400268555, 0.03853801727294922, 0.03836336135864258, 0.03837961578369141, 0.03840966415405273, 0.03858428955078125, 0.03923353576660156, 0.03900735855102539, 0.03816499328613281, 0.038881664276123044, 0.03817846298217773, 0.03856963348388672, 0.038290111541748044, 0.03892361450195313, 0.03860137557983399, 0.03828940963745117, 0.038795265197753906, 0.038262847900390626, 0.038173919677734376, 0.03829628753662109, 0.03815663909912109, 0.038493793487548826, 0.03831119918823242, 0.03824310302734375, 0.038052097320556644, 0.03834239959716797, 0.03845119857788086, 0.038563838958740236, 0.0383606071472168, 0.03835939025878906, 0.038391937255859376, 0.0383606071472168, 0.0393097915649414, 0.039732990264892576, 0.039543041229248045, 0.039534591674804685, 0.038489761352539065, 0.038203487396240236, 0.038363166809082035, 0.0382462387084961, 0.038107521057128904, 0.03823001480102539, 0.038868736267089844, 0.03836924743652344, 0.039448287963867186, 0.03817705535888672, 0.03846518325805664, 0.03847846221923828, 0.03844668960571289, 0.038613025665283206, 0.03852326583862305, 0.03857926559448242, 0.03884515380859375, 0.0394180793762207, 0.03902054214477539, 0.0388873291015625, 0.03890800094604492, 0.03868057632446289, 0.038795265197753906, 0.03929679870605469, 0.03860438537597656, 0.038913951873779294, 0.03867494583129883, 0.038560993194580076, 0.03853823852539062, 0.03843686294555664, 0.038305473327636716, 0.03828870391845703, 0.03844585418701172, 0.03909344100952149, 0.03886102294921875, 0.038695518493652346, 0.038610145568847655, 0.038800094604492186, 0.03931110382080078, 0.03898400115966797, 0.03894432067871094, 0.038719615936279296, 0.03907430267333984, 0.03901020812988281, 0.04031478500366211, 0.038948863983154294, 0.03896934509277344, 0.03888127899169922, 0.03876665496826172, 0.039091392517089846, 0.03888409423828125, 0.03924140930175781, 0.03909791946411133, 0.03902153778076172, 0.03887696075439453, 0.03892854309082031, 0.03902572631835938, 0.03879923248291016, 0.03880249786376953, 0.03877667236328125, 0.03862259292602539, 0.03867862319946289, 0.038623775482177734, 0.03891999816894531, 0.03905868911743164, 0.038919105529785156, 0.038817790985107424, 0.03870460891723633, 0.03874256134033203, 0.03949977493286133, 0.03876249694824219, 0.0388770866394043, 0.038860897064208984, 0.03873177719116211, 0.03858422470092773, 0.03869728088378906, 0.03882963180541992, 0.03853286361694336, 0.03901078414916992, 0.0389051513671875, 0.03865875244140625, 0.038744064331054685, 0.039193950653076175, 0.04045996856689453, 0.04344521713256836, 0.04020217514038086, 0.038895679473876954, 0.03994348907470703, 0.03909251022338867, 0.03937235260009766, 0.039207744598388675, 0.0391927375793457, 0.038727550506591796, 0.038735870361328126, 0.03922041702270508, 0.0402624626159668, 0.03973241424560547, 0.03855414581298828, 0.03888947296142578, 0.039890945434570314, 0.03904207992553711, 0.03860374450683594, 0.04007894515991211, 0.03890582275390625, 0.03873222351074219, 0.03877068710327149, 0.038586368560791014, 0.038559680938720704, 0.03845766448974609, 0.03828083038330078, 0.03937497711181641, 0.03860403060913086, 0.03859260940551758, 0.038963230133056644, 0.038453887939453125, 0.03915980911254883, 0.03864188766479492, 0.03946585464477539, 0.03840707015991211, 0.03858563232421875, 0.03853590393066406, 0.03862860870361328, 0.038615806579589844, 0.03876812744140625, 0.03918460845947266, 0.03890131378173828, 0.038978271484375, 0.03903907012939453, 0.03904297637939453, 0.03895817565917969, 0.03888163375854492, 0.03913580703735352, 0.038971199035644534, 0.03893062210083008, 0.03904880142211914, 0.038945182800292966, 0.03933180618286133, 0.03891596984863281, 0.038835968017578125, 0.03884688186645508, 0.03913113784790039, 0.03957712173461914, 0.03902076721191406, 0.039147777557373045, 0.03905321502685547, 0.039067745208740234, 0.039222335815429686, 0.03902870559692383, 0.03909132766723633, 0.03887292861938477, 0.03893862533569336, 0.038819839477539066, 0.038793216705322264, 0.038828033447265625, 0.03978211212158203, 0.03916624069213867, 0.03921920013427734, 0.03905286407470703, 0.03890016174316406, 0.03983465576171875, 0.039013504028320316, 0.039024478912353514, 0.039452671051025394, 0.03912041473388672, 0.039115230560302736, 0.03877273559570313, 0.038811038970947266, 0.038873470306396485, 0.038776702880859375, 0.0388135986328125, 0.03897721481323242, 0.03891686248779297, 0.038756065368652344, 0.03866419219970703, 0.03884182357788086, 0.03880019378662109, 0.03913872146606445, 0.0395843505859375, 0.03903833770751953, 0.03903728103637695, 0.03904723358154297, 0.03886102294921875, 0.03888742446899414, 0.03948262405395508, 0.03897420883178711, 0.0386743049621582, 0.038989952087402344, 0.03940966415405273, 0.03876451110839844, 0.03858582305908203, 0.03847225570678711, 0.04051740646362305, 0.0390228157043457, 0.038553504943847655, 0.03842572784423828, 0.03850953674316406, 0.03880527877807617, 0.03848828887939453, 0.03853107070922852, 0.03953376007080078, 0.038710079193115234, 0.038682113647460936, 0.038885505676269534, 0.039840129852294924, 0.039853759765625, 0.03891593551635742, 0.03895663833618164, 0.039117694854736325, 0.0389222412109375, 0.03938294219970703, 0.039034976959228515, 0.03912278366088867, 0.03905756759643555, 0.039757823944091795, 0.0391978874206543, 0.0390840950012207, 0.03923839950561524, 0.03899756622314453, 0.03954902267456055, 0.03945302581787109, 0.03902006530761719, 0.040124160766601566, 0.03976406478881836, 0.038959457397460935, 0.03891408157348633, 0.03895702362060547, 0.03885446548461914, 0.038473087310791014, 0.03857436752319336, 0.0389304313659668, 0.03869046401977539, 0.03844927978515625, 0.038738624572753906, 0.038827777862548825, 0.040689888000488283, 0.03909231948852539, 0.03868467330932617, 0.03841843032836914, 0.038373374938964845, 0.038330368041992184, 0.03845734405517578, 0.03843686294555664, 0.03902259063720703, 0.03882620620727539, 0.03872959899902344, 0.03941571044921875, 0.038526561737060545, 0.03868643188476562, 0.03838777542114258, 0.03845593643188477, 0.03840204620361328, 0.03863142395019531, 0.03889766311645508, 0.03873177719116211, 0.03853456115722656, 0.03880595016479492, 0.03977027130126953, 0.03891759872436523, 0.03903033447265625, 0.03895769500732422, 0.03897603225708008, 0.03884422302246094, 0.038784320831298826, 0.038685375213623044, 0.03877068710327149, 0.038624385833740234, 0.03868662261962891, 0.03901356887817383, 0.03873769760131836, 0.038612991333007815, 0.03846963119506836, 0.03851171112060547, 0.03875727844238281, 0.038957054138183594, 0.038860801696777345, 0.038739551544189454, 0.038756767272949216, 0.038616321563720704, 0.03905791854858399, 0.03898534393310547, 0.03893123245239258, 0.039026527404785155, 0.038917217254638675, 0.03923855972290039, 0.03903692626953125, 0.03956256103515625, 0.03894793701171875, 0.038932510375976566, 0.038981792449951175, 0.03896700668334961, 0.03886489486694336, 0.038809600830078124, 0.040498977661132814, 0.03928086471557617, 0.038991870880126955, 0.03872931289672851, 0.03860521697998047, 0.03868467330932617, 0.03861520004272461, 0.03853500747680664, 0.03857984161376953, 0.03863385772705078, 0.03889289474487305, 0.038795936584472654, 0.03860591888427734, 0.03864438247680664, 0.03881804656982422, 0.03863343811035156, 0.039079967498779296, 0.038752254486083985, 0.038709278106689456, 0.03893859100341797, 0.03877068710327149, 0.03901235198974609, 0.03894607925415039, 0.03895977783203125, 0.03858147048950195, 0.038682945251464845, 0.03925455856323242, 0.039019775390625, 0.040075584411621096, 0.03969615936279297, 0.03900019073486328, 0.03894720077514648, 0.03888723373413086, 0.039099998474121093, 0.03924448013305664, 0.03906310272216797, 0.03907968139648438, 0.039245857238769534, 0.03904790496826172, 0.03906092834472656, 0.038902336120605466, 0.0392540168762207, 0.03907379150390625, 0.038722721099853516, 0.0391995849609375, 0.03938035202026367, 0.03874582290649414, 0.03951504135131836, 0.04015625762939453, 0.03886582565307617, 0.038711296081542966, 0.03877814483642578, 0.03864591979980469, 0.03874873733520508, 0.0388587532043457, 0.03887465667724609]",tokens/s,25.669502162640065,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 4 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 28.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 189283 has 14.74 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 80.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 105496 has 14.64 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 312.03 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1349, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1142, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 868, in forward hidden_states = self.mlp(hidden_states) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 781, in forward final_hidden_states.index_add_(0, top_x, current_hidden_states.to(hidden_states.dtype)) RuntimeError: CUDA error: invalid configuration argument CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1505.906688,1596.915712,0.0,1201.668096,1189.151232,s,1,8.179638671875,8.179638671875,0.0,8.179638671875,8.179638671875,8.179638671875,8.179638671875,[8.179638671875],,kWh,3.4542718758334936e-05,3.8032259343420668e-06,1.0888064266001071e-05,4.9234008958678075e-05,,MB,1524.883456,1791.950848,0.0,1382.023168,1351.367168,s,10,0.4748804168701172,0.04748804168701172,8.626458128418844e-05,0.04748456001281738,0.04758217430114746,0.04760303974151611,0.04761973209381103,"[0.04757753753662109, 0.04741312026977539, 0.047552288055419924, 0.04748134231567383, 0.04741155242919922, 0.04733055877685547, 0.04744131088256836, 0.047623905181884765, 0.04748777770996094, 0.0475610237121582]",tokens/s,5390.830847211323,kWh,1.4249498766260126e-06,1.5714683171962336e-07,9.500861259219454e-07,2.532182834267581e-06,tokens/kWh,101098544.91373903,MB,1528.918016,1833.893888,0.0,1423.966208,1407.328256,s,10,13.786848022460937,1.3786848022460938,0.0034189820094292227,1.3792277221679687,1.3835193969726562,1.3840247131347656,1.384428966064453,"[1.3790538330078126, 1.3803687744140625, 1.375017822265625, 1.3734044189453125, 1.379401611328125, 1.3756253662109375, 1.37638134765625, 1.3834071044921874, 1.37965771484375, 1.384530029296875]",tokens/s,45.695723850268834,kWh,4.049869935587363e-05,4.466583156182899e-06,1.8252484791678163e-05,6.32177673037347e-05,tokens/kWh,996555.2832214333,,s,630,13.784107973098747,0.021879536465236122,0.0002631747546613138,0.021808064460754395,0.022150712776184082,0.02236365451812744,0.022853109550476074,"[0.021778495788574218, 0.02174732780456543, 0.021688703536987305, 0.02183366394042969, 0.022048831939697266, 0.02204876708984375, 0.02185385513305664, 0.021788864135742186, 0.02177427291870117, 0.021960927963256837, 0.02182963180541992, 0.02185625648498535, 0.02177961540222168, 0.022281280517578127, 0.02176153564453125, 0.021694751739501954, 0.021700607299804688, 0.02172313690185547, 0.02201190376281738, 0.02191564750671387, 0.02171494483947754, 0.021778432846069336, 0.021729280471801758, 0.02169856071472168, 0.02166169548034668, 0.02194867134094238, 0.02190105628967285, 0.021749696731567382, 0.021851327896118163, 0.021825439453125, 0.02172003173828125, 0.021716991424560548, 0.021737472534179687, 0.02176924705505371, 0.021820383071899416, 0.021746784210205077, 0.021676895141601562, 0.021897279739379882, 0.021954559326171876, 0.022089824676513672, 0.022149023056030274, 0.022235136032104492, 0.02239190483093262, 0.022292959213256837, 0.022095680236816406, 0.022239328384399414, 0.0223045768737793, 0.02197987174987793, 0.022015071868896483, 0.022520767211914063, 0.02183718490600586, 0.0217872314453125, 0.02183884811401367, 0.02181427192687988, 0.021807104110717773, 0.02188902473449707, 0.021886144638061523, 0.021817983627319334, 0.021745311737060548, 0.021783136367797853, 0.021722272872924806, 0.02184419250488281, 0.021764671325683594, 0.021803680419921874, 0.021819391250610352, 0.021774240493774414, 0.021673919677734375, 0.021659807205200197, 0.021809152603149414, 0.021731327056884766, 0.021931392669677734, 0.021693056106567382, 0.021699583053588867, 0.021825824737548828, 0.021707487106323243, 0.021755456924438477, 0.022004159927368164, 0.022732223510742187, 0.02278438377380371, 0.021881023406982423, 0.022161407470703123, 0.021897216796875, 0.021882463455200195, 0.02214134407043457, 0.022901920318603514, 0.022288799285888672, 0.02204275131225586, 0.0219736328125, 0.02192918395996094, 0.021956607818603514, 0.02182192039489746, 0.022082624435424806, 0.02186502456665039, 0.021887487411499023, 0.02190652847290039, 0.021796735763549804, 0.02182636833190918, 0.021842016220092773, 0.021910720825195313, 0.02179078483581543, 0.022016767501831055, 0.021749536514282228, 0.021898527145385743, 0.02183468818664551, 0.021695552825927736, 0.021814207077026367, 0.02178656005859375, 0.021741024017333986, 0.021899871826171875, 0.021931264877319338, 0.021879552841186523, 0.021783552169799804, 0.021744640350341796, 0.021725183486938478, 0.02190745544433594, 0.022089471817016603, 0.02180940818786621, 0.021788671493530275, 0.02197283172607422, 0.021821599960327148, 0.021941791534423827, 0.021915935516357423, 0.021915199279785157, 0.021753856658935547, 0.021834239959716797, 0.021700735092163085, 0.02161664009094238, 0.02190745544433594, 0.02192793655395508, 0.021721088409423828, 0.021618015289306642, 0.02179715156555176, 0.02173334312438965, 0.022065568923950195, 0.021669792175292968, 0.021928031921386718, 0.021805055618286134, 0.02188083267211914, 0.022034431457519533, 0.021769535064697264, 0.021689023971557617, 0.02172867202758789, 0.02186240005493164, 0.02177276802062988, 0.02166592025756836, 0.021780384063720702, 0.02191574478149414, 0.021737472534179687, 0.021704704284667968, 0.021772064208984376, 0.02189334487915039, 0.022226943969726562, 0.02191321563720703, 0.021843807220458984, 0.021807199478149415, 0.02202684783935547, 0.021812448501586913, 0.021887615203857423, 0.021664928436279297, 0.02175881576538086, 0.02168320083618164, 0.02171494483947754, 0.0217807674407959, 0.021747936248779298, 0.021801279067993163, 0.021738719940185548, 0.021783424377441407, 0.021702783584594727, 0.021651424407958985, 0.021983232498168945, 0.021819391250610352, 0.02179452705383301, 0.021839391708374022, 0.021889184951782225, 0.022256223678588868, 0.021809152603149414, 0.02174729537963867, 0.021780672073364257, 0.021819616317749025, 0.021831872940063477, 0.021679935455322267, 0.021708799362182618, 0.0216760311126709, 0.022042240142822266, 0.02206528091430664, 0.021762304306030274, 0.021847711563110352, 0.021942655563354493, 0.021951839447021483, 0.022052671432495115, 0.02184796714782715, 0.02192207908630371, 0.0217161922454834, 0.021605152130126953, 0.021589632034301757, 0.0220546875, 0.021807712554931642, 0.021784223556518555, 0.021709152221679687, 0.02183737564086914, 0.022038976669311525, 0.021979135513305666, 0.021700191497802734, 0.021729503631591797, 0.021921503067016603, 0.021718591690063477, 0.02177935981750488, 0.021845407485961914, 0.02186204719543457, 0.021814207077026367, 0.021882015228271483, 0.021729536056518554, 0.02180134391784668, 0.021696735382080078, 0.021753856658935547, 0.021651456832885742, 0.021884735107421876, 0.021780351638793945, 0.021678176879882813, 0.021704927444458007, 0.021675296783447266, 0.021695104598999024, 0.021702560424804687, 0.021726879119873047, 0.02170854377746582, 0.021965599060058592, 0.02174787139892578, 0.021773439407348633, 0.022024927139282228, 0.021826976776123046, 0.02182819175720215, 0.02189427185058594, 0.0218919677734375, 0.021823488235473632, 0.021762048721313477, 0.021796863555908205, 0.021739519119262696, 0.021708383560180664, 0.021684576034545898, 0.02169196891784668, 0.02158355140686035, 0.02200454330444336, 0.021777952194213867, 0.02182806396484375, 0.021772287368774415, 0.02176425552368164, 0.021878623962402345, 0.021934080123901366, 0.021733375549316408, 0.02169036865234375, 0.02169254493713379, 0.021765888214111326, 0.021672607421875, 0.021807392120361327, 0.02176582336425781, 0.0217127685546875, 0.02174611282348633, 0.021827392578125, 0.022003807067871094, 0.02201113510131836, 0.021797727584838868, 0.022521535873413087, 0.021841279983520506, 0.021838560104370117, 0.021717023849487305, 0.02177039909362793, 0.021802400588989256, 0.02174425506591797, 0.021786624908447266, 0.021927679061889648, 0.021824928283691408, 0.021744480133056642, 0.022013887405395508, 0.02195167922973633, 0.021879167556762696, 0.021920255661010742, 0.02189030456542969, 0.02190822410583496, 0.021834943771362306, 0.021916479110717774, 0.021819391250610352, 0.02181875228881836, 0.021875328063964843, 0.02192153549194336, 0.02211164855957031, 0.021867359161376953, 0.02183123207092285, 0.021916095733642577, 0.021823488235473632, 0.021989280700683594, 0.02185635185241699, 0.021876735687255858, 0.02185420799255371, 0.021743583679199218, 0.02174569511413574, 0.02197020721435547, 0.021906143188476564, 0.021861440658569337, 0.021826240539550783, 0.021868799209594728, 0.02189030456542969, 0.021777280807495115, 0.021839744567871095, 0.021823488235473632, 0.021716928482055663, 0.021748960494995116, 0.02403619194030762, 0.021998624801635742, 0.021828575134277342, 0.021712928771972655, 0.021823455810546875, 0.021803007125854493, 0.021743616104125976, 0.022015775680541992, 0.021736831665039064, 0.0217893123626709, 0.021675519943237305, 0.021699071884155274, 0.021777664184570313, 0.021785343170166015, 0.021893119812011717, 0.02239676856994629, 0.02177155113220215, 0.02190835189819336, 0.02166774368286133, 0.021689855575561523, 0.02163158416748047, 0.021722944259643554, 0.021587392807006837, 0.021757728576660158, 0.021648319244384765, 0.021680160522460936, 0.021710048675537108, 0.022042943954467775, 0.021712352752685547, 0.02182601547241211, 0.021811456680297853, 0.021712736129760744, 0.021797632217407225, 0.021775136947631835, 0.021697248458862305, 0.021653696060180663, 0.02170675277709961, 0.0216760311126709, 0.021687583923339845, 0.021740255355834962, 0.021714559555053713, 0.021675615310668944, 0.021623296737670897, 0.022134559631347656, 0.02285532760620117, 0.02276438331604004, 0.02244607925415039, 0.02189107131958008, 0.0219965763092041, 0.021826528549194337, 0.021700607299804688, 0.021756128311157228, 0.022300447463989258, 0.021731327056884766, 0.02171683120727539, 0.0221812801361084, 0.021960447311401367, 0.021693376541137694, 0.021827648162841797, 0.021798912048339843, 0.021843584060668945, 0.021735807418823243, 0.021719039916992186, 0.021743616104125976, 0.021755903244018555, 0.021770240783691407, 0.021763711929321288, 0.02178291130065918, 0.021743616104125976, 0.021809152603149414, 0.02171452713012695, 0.021782943725585938, 0.021818944931030274, 0.02172153663635254, 0.021780479431152345, 0.021728927612304688, 0.02169215965270996, 0.021665599822998045, 0.021697311401367186, 0.021811199188232423, 0.021782527923583983, 0.021794815063476563, 0.021702655792236326, 0.021722944259643554, 0.02173766326904297, 0.021687519073486327, 0.021733631134033204, 0.021796640396118165, 0.021865215301513672, 0.021803007125854493, 0.021908863067626953, 0.021854240417480467, 0.021721696853637694, 0.021746847152709962, 0.02170163154602051, 0.021778207778930664, 0.021614431381225586, 0.021782720565795898, 0.021730527877807618, 0.022096607208251955, 0.021760095596313478, 0.021831680297851562, 0.021778079986572267, 0.02183407974243164, 0.021798816680908203, 0.02190985679626465, 0.02186809539794922, 0.0217458553314209, 0.021718271255493166, 0.02168294334411621, 0.021683616638183592, 0.021661951065063478, 0.021676383972167968, 0.02192140769958496, 0.02196019172668457, 0.022227807998657225, 0.0222761287689209, 0.022470304489135742, 0.022237184524536133, 0.022408639907836914, 0.021993663787841795, 0.021846912384033204, 0.02191958427429199, 0.021774335861206053, 0.02182713508605957, 0.021907039642333984, 0.021875551223754883, 0.022380544662475587, 0.021667295455932618, 0.02169705581665039, 0.02167955207824707, 0.021755552291870116, 0.02185513687133789, 0.021820608139038085, 0.022218656539916993, 0.021695552825927736, 0.021689056396484375, 0.02163430404663086, 0.02178553581237793, 0.021682207107543944, 0.021946367263793946, 0.022006944656372072, 0.022039392471313476, 0.021823488235473632, 0.021774335861206053, 0.021710847854614256, 0.021798912048339843, 0.02187468719482422, 0.021796319961547853, 0.022030624389648437, 0.02185241508483887, 0.021772287368774415, 0.021927871704101563, 0.021796384811401368, 0.021819328308105467, 0.02177084732055664, 0.02173855972290039, 0.022092832565307616, 0.02183353614807129, 0.02183900833129883, 0.021838399887084962, 0.021995904922485352, 0.02176963233947754, 0.021727872848510743, 0.021798879623413085, 0.021766271591186524, 0.022150463104248046, 0.021879360198974608, 0.021960704803466798, 0.02185830307006836, 0.02190745544433594, 0.021852224349975587, 0.021851104736328127, 0.022323200225830078, 0.021980127334594726, 0.02202828788757324, 0.022224704742431642, 0.0221529598236084, 0.022525503158569337, 0.022256511688232422, 0.021970943450927736, 0.021934080123901366, 0.02179180717468262, 0.021732383728027344, 0.021900800704956053, 0.02389232063293457, 0.023971904754638673, 0.022113407135009765, 0.021984128952026366, 0.02185215950012207, 0.021929983139038087, 0.021814815521240233, 0.021746143341064453, 0.02168832015991211, 0.021798303604125976, 0.02186835289001465, 0.02178656005859375, 0.021768224716186522, 0.021749343872070313, 0.0217542724609375, 0.021720127105712892, 0.02173014450073242, 0.02173347282409668, 0.021786624908447266, 0.021764095306396485, 0.02182713508605957, 0.02177609634399414, 0.02175049591064453, 0.02165555191040039, 0.021652544021606445, 0.021626976013183592, 0.021627744674682616, 0.021984512329101563, 0.02337420845031738, 0.022847679138183592, 0.022040735244750975, 0.022428800582885742, 0.021906335830688475, 0.021751808166503905, 0.021934080123901366, 0.02266659164428711, 0.02228700828552246, 0.02206105613708496, 0.022120288848876953, 0.022055072784423826, 0.021886816024780275, 0.02174991989135742, 0.02169990348815918, 0.021733184814453126, 0.021746559143066405, 0.021685407638549804, 0.021727775573730467, 0.02178041648864746, 0.02172496032714844, 0.021723583221435548, 0.02173513603210449, 0.021815296173095702, 0.02172140884399414, 0.02177142333984375, 0.02200054359436035, 0.02197715187072754, 0.021808416366577148, 0.021881568908691407, 0.021975040435791016, 0.022929407119750975, 0.021826751708984377, 0.021756736755371094, 0.021714015960693358, 0.021803680419921874, 0.021803264617919923, 0.021942272186279296, 0.02187059211730957, 0.022054912567138672, 0.021719039916992186, 0.021618688583374023, 0.021731199264526366, 0.021733503341674804, 0.02169206428527832, 0.02189961624145508, 0.021807104110717773, 0.02176598358154297, 0.02173967933654785, 0.021781984329223632, 0.02187913513183594, 0.021919551849365233, 0.021684608459472655, 0.02165555191040039, 0.021800960540771484, 0.021841920852661133, 0.02169343948364258, 0.02167091178894043, 0.021673759460449218, 0.021694719314575197, 0.021692384719848634, 0.02165113639831543, 0.021967168807983398, 0.021766143798828123, 0.021750848770141603, 0.02185100746154785, 0.021690208435058592, 0.021763904571533203, 0.021770656585693358, 0.021813247680664064, 0.02172313690185547, 0.02171494483947754, 0.021624704360961915, 0.021819360733032228, 0.021769792556762695, 0.02178927993774414, 0.02177340888977051, 0.021697439193725587, 0.02176348876953125, 0.02175030326843262, 0.021692480087280273, 0.021794815063476563, 0.021581823348999024, 0.02180019187927246, 0.021822208404541014, 0.021845184326171874, 0.02176083183288574, 0.021874015808105468, 0.021840543746948243, 0.022032384872436524, 0.02237824058532715, 0.02223708724975586, 0.022657567977905274, 0.022343488693237306, 0.022171648025512695, 0.02235935974121094, 0.02226655960083008, 0.022486080169677736, 0.02240812873840332, 0.02245631980895996, 0.022599679946899414, 0.022517759323120116, 0.02231888008117676, 0.022552799224853516, 0.02229043197631836, 0.02222198486328125, 0.022266719818115236, 0.022179840087890625, 0.022580480575561522, 0.02223904037475586, 0.02236716842651367]",tokens/s,45.704807393377656,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1506.066432,1596.915712,0.0,1201.668096,1189.151232,s,1,8.4394453125,8.4394453125,0.0,8.4394453125,8.4394453125,8.4394453125,8.4394453125,[8.4394453125],,kWh,3.494024245833695e-05,3.846210831008193e-06,1.0811675316000496e-05,4.959812860534564e-05,,MB,1522.200576,1791.950848,0.0,1382.023168,1351.367168,s,10,0.4749305305480957,0.04749305305480957,0.00011023556915181127,0.04746963310241699,0.0475606086730957,0.047669617080688474,0.047756823806762695,"[0.04777862548828125, 0.047507423400878906, 0.04743027114868164, 0.047454689025878904, 0.047536384582519534, 0.04740451049804688, 0.04734524917602539, 0.047470401763916016, 0.047534111022949216, 0.04746886444091797]",tokens/s,5390.262017995812,kWh,1.4211896730987327e-06,1.5673211640795952e-07,9.467806926990326e-07,2.524702482205725e-06,tokens/kWh,101398086.23166707,MB,1527.451648,1833.893888,0.0,1423.966208,1407.328256,s,10,13.804923950195311,1.3804923950195311,0.005752157842506621,1.379092041015625,1.3883178833007812,1.3886030334472657,1.3888311535644533,"[1.377676513671875, 1.3759522705078124, 1.383525146484375, 1.3769593505859374, 1.3882545166015625, 1.3716900634765625, 1.37472216796875, 1.380507568359375, 1.38888818359375, 1.3867481689453125]",tokens/s,45.635890662844744,kWh,4.023062634273427e-05,4.4370357060596444e-06,1.8130345680100675e-05,6.27980077288946e-05,tokens/kWh,1003216.5394796827,,s,630,13.802479480743418,0.021908697588481598,0.00033537922035371343,0.021842127799987794,0.02214827880859375,0.02232315511703491,0.023321606845855717,"[0.02208777618408203, 0.022468511581420898, 0.021942304611206054, 0.02188287925720215, 0.021726303100585938, 0.021657855987548828, 0.021713184356689452, 0.021792896270751955, 0.021780736923217775, 0.021810176849365235, 0.021832704544067383, 0.021899295806884767, 0.021881887435913086, 0.021863359451293946, 0.02205695915222168, 0.02188902473449707, 0.021795936584472656, 0.021681055068969727, 0.02186854362487793, 0.021819391250610352, 0.021790271759033204, 0.021774784088134765, 0.021819263458251952, 0.02200752067565918, 0.02195088005065918, 0.021934080123901366, 0.021901311874389647, 0.02181679916381836, 0.02196329689025879, 0.021825216293334962, 0.021825632095336913, 0.02169264030456543, 0.021663679122924804, 0.021791967391967773, 0.021822303771972657, 0.021876735687255858, 0.0217227840423584, 0.021752511978149414, 0.021717920303344726, 0.021838336944580077, 0.021879039764404296, 0.022120447158813478, 0.021747711181640626, 0.02202828788757324, 0.0220446720123291, 0.02226380729675293, 0.0221529598236084, 0.02221696090698242, 0.02189673614501953, 0.021852863311767577, 0.021908319473266602, 0.021764448165893555, 0.02162505531311035, 0.021723520278930663, 0.021855552673339843, 0.02184262466430664, 0.02181724739074707, 0.0217109432220459, 0.021793920516967772, 0.021940223693847655, 0.021809535980224608, 0.021858495712280275, 0.021655296325683592, 0.021861215591430665, 0.0221265926361084, 0.02202828788757324, 0.021768192291259765, 0.021701856613159178, 0.02181353569030762, 0.021916160583496092, 0.0217969913482666, 0.02179574394226074, 0.02168931198120117, 0.02181065559387207, 0.022006303787231445, 0.021870304107666015, 0.021764127731323243, 0.02162441635131836, 0.021696928024291993, 0.021627071380615235, 0.021692480087280273, 0.021713056564331056, 0.021618816375732423, 0.02194112014770508, 0.02173401641845703, 0.021988639831542967, 0.02163580894470215, 0.0216529598236084, 0.021639904022216796, 0.021700159072875976, 0.021860832214355468, 0.02169856071472168, 0.021800960540771484, 0.02172047996520996, 0.021696128845214845, 0.0216560001373291, 0.021658143997192382, 0.0216944637298584, 0.02165113639831543, 0.021819007873535155, 0.021785280227661134, 0.02177142333984375, 0.021707584381103515, 0.021708768844604494, 0.02165510368347168, 0.021713312149047852, 0.02234582328796387, 0.025286848068237305, 0.021980607986450195, 0.021975488662719728, 0.021821151733398436, 0.021725439071655275, 0.021667999267578127, 0.021706911087036134, 0.021709983825683593, 0.021715456008911133, 0.021805055618286134, 0.02193017578125, 0.021912736892700194, 0.02188355255126953, 0.021760000228881835, 0.02176367950439453, 0.02175632095336914, 0.021768192291259765, 0.021776384353637695, 0.021628480911254883, 0.02236582374572754, 0.0216625919342041, 0.02176406478881836, 0.02185795211791992, 0.021551488876342773, 0.021654720306396483, 0.02156979179382324, 0.021749664306640625, 0.022313087463378907, 0.022829599380493164, 0.023365631103515624, 0.022073280334472655, 0.022161376953125, 0.022216064453125, 0.021853120803833007, 0.021657215118408204, 0.024447296142578127, 0.021847904205322264, 0.021849952697753906, 0.021932319641113283, 0.02182310485839844, 0.02184832000732422, 0.021929983139038087, 0.021878528594970702, 0.022477216720581054, 0.02195644760131836, 0.021795103073120117, 0.021894208908081053, 0.021889503479003907, 0.02184815979003906, 0.021881088256835938, 0.02190048027038574, 0.021815839767456054, 0.021929119110107424, 0.021950912475585938, 0.021988895416259764, 0.02203526306152344, 0.02194246482849121, 0.02191155242919922, 0.021777664184570313, 0.021861120223999022, 0.02188662338256836, 0.02180291175842285, 0.021757535934448242, 0.021876991271972655, 0.021860895156860353, 0.02191321563720703, 0.02187718391418457, 0.02206934356689453, 0.02184592056274414, 0.021780479431152345, 0.021735424041748046, 0.021750816345214842, 0.021758943557739257, 0.021957887649536132, 0.022002431869506837, 0.021825279235839844, 0.021740095138549805, 0.021677280426025392, 0.021670047760009765, 0.02187107276916504, 0.021782463073730468, 0.021798816680908203, 0.02171219253540039, 0.021844671249389647, 0.021613983154296874, 0.021748319625854492, 0.022103967666625975, 0.021784671783447264, 0.021767295837402344, 0.02174611282348633, 0.02170444869995117, 0.02181190490722656, 0.02183302307128906, 0.021783199310302735, 0.021693599700927733, 0.022080352783203125, 0.02197711944580078, 0.021842016220092773, 0.021736576080322267, 0.021826335906982422, 0.021841920852661133, 0.021825023651123047, 0.021694847106933594, 0.021784095764160155, 0.02175651168823242, 0.02183590316772461, 0.02175164794921875, 0.021927808761596678, 0.021698720932006837, 0.02195609664916992, 0.02197875213623047, 0.022218847274780275, 0.021961503982543946, 0.021954559326171876, 0.02182963180541992, 0.021722431182861327, 0.02176233673095703, 0.022051231384277344, 0.021921567916870117, 0.02176652717590332, 0.021864063262939454, 0.021719263076782225, 0.021855615615844728, 0.02209270477294922, 0.021880544662475587, 0.02178771209716797, 0.021922752380371092, 0.021782527923583983, 0.021762271881103516, 0.021923103332519532, 0.021899776458740236, 0.021854368209838868, 0.021895008087158205, 0.02183600044250488, 0.021774208068847656, 0.021790624618530274, 0.02175702476501465, 0.021885759353637697, 0.0219420166015625, 0.021987903594970704, 0.021851551055908202, 0.022118783950805663, 0.02193561553955078, 0.021934431076049806, 0.021827743530273436, 0.021836736679077148, 0.021729280471801758, 0.02169036865234375, 0.021800575256347657, 0.021739904403686523, 0.021817344665527344, 0.021940288543701173, 0.02198428726196289, 0.021869760513305664, 0.021978847503662108, 0.021869823455810546, 0.02172185516357422, 0.02179043197631836, 0.021817344665527344, 0.023213823318481444, 0.024300064086914062, 0.0221014404296875, 0.02187731170654297, 0.021934080123901366, 0.021737472534179687, 0.02176646423339844, 0.021939903259277343, 0.022360063552856444, 0.021862079620361328, 0.021866111755371093, 0.02308371162414551, 0.0220153923034668, 0.021895456314086913, 0.021962432861328124, 0.021844608306884766, 0.02180735969543457, 0.021839584350585937, 0.0218439998626709, 0.021925344467163085, 0.021821632385253906, 0.021754079818725586, 0.021723264694213866, 0.021786144256591797, 0.021811391830444334, 0.02175116729736328, 0.022369184494018556, 0.021955968856811524, 0.021814176559448242, 0.02185513687133789, 0.021918752670288085, 0.021806880950927733, 0.021917695999145507, 0.022011423110961915, 0.02187516784667969, 0.02182054328918457, 0.021808000564575197, 0.021806848526000976, 0.021857568740844727, 0.024050016403198243, 0.02310736083984375, 0.022021184921264647, 0.02186422348022461, 0.021796863555908205, 0.0218787841796875, 0.0220446720123291, 0.022828863143920897, 0.022120672225952147, 0.022045888900756837, 0.021937856674194334, 0.02186777687072754, 0.021787391662597657, 0.02161471939086914, 0.021600128173828125, 0.02168556785583496, 0.021665824890136718, 0.02172947120666504, 0.021600223541259764, 0.021586431503295898, 0.02182143974304199, 0.021936479568481444, 0.021799680709838867, 0.02166655921936035, 0.02157583999633789, 0.021661535263061523, 0.021710687637329102, 0.021893152236938475, 0.022117759704589842, 0.02195542335510254, 0.022003423690795897, 0.02239523124694824, 0.022083295822143554, 0.022098207473754884, 0.021929983139038087, 0.021894720077514647, 0.02172972869873047, 0.021757951736450197, 0.02180463981628418, 0.021903455734252928, 0.021848384857177734, 0.021663999557495116, 0.021595903396606445, 0.021675359725952147, 0.02171299171447754, 0.021713056564331056, 0.021596832275390623, 0.021626623153686523, 0.021722591400146485, 0.021776575088500977, 0.02185273551940918, 0.02182032012939453, 0.021738592147827147, 0.021759775161743163, 0.021716543197631836, 0.02167238426208496, 0.021725183486938478, 0.02162892723083496, 0.021689632415771484, 0.021662431716918944, 0.021785728454589842, 0.021618688583374023, 0.0216909122467041, 0.021723487854003905, 0.02169241523742676, 0.021769376754760743, 0.021821952819824218, 0.021748064041137695, 0.021811199188232423, 0.021670207977294922, 0.021669023513793944, 0.021658143997192382, 0.021792512893676758, 0.021791200637817382, 0.021731199264526366, 0.021686656951904297, 0.021786815643310548, 0.021659648895263672, 0.021618688583374023, 0.02180838394165039, 0.021828351974487306, 0.021901311874389647, 0.021702655792236326, 0.021694303512573242, 0.021785888671875, 0.021635711669921873, 0.021704959869384765, 0.021684288024902344, 0.02183558464050293, 0.02200726318359375, 0.022334112167358398, 0.021788768768310547, 0.02180415916442871, 0.021670623779296874, 0.021626752853393556, 0.02190905570983887, 0.021723743438720702, 0.02159619140625, 0.021794815063476563, 0.02186854362487793, 0.02175811195373535, 0.02173526382446289, 0.021790719985961913, 0.021937824249267577, 0.021840223312377928, 0.021713119506835937, 0.021724895477294923, 0.021868320465087892, 0.0218176326751709, 0.022011327743530273, 0.02206572723388672, 0.021842144012451173, 0.02194000053405762, 0.021929536819458008, 0.021920255661010742, 0.021747392654418947, 0.021852096557617186, 0.021974720001220704, 0.021913248062133787, 0.021808095932006836, 0.021724288940429687, 0.021955455780029297, 0.02176540756225586, 0.02180374336242676, 0.021784576416015625, 0.021720447540283204, 0.021792512893676758, 0.021817792892456056, 0.021710784912109374, 0.021664255142211913, 0.02181046485900879, 0.021875423431396486, 0.02223308753967285, 0.02190332794189453, 0.02196272087097168, 0.021780832290649414, 0.021789920806884765, 0.021792640686035158, 0.021898143768310546, 0.021855615615844728, 0.021670528411865234, 0.02163302421569824, 0.021752864837646484, 0.021871007919311524, 0.02182406425476074, 0.02178825569152832, 0.021668256759643553, 0.021816864013671874, 0.02180963134765625, 0.021902912139892577, 0.02167612838745117, 0.02163337516784668, 0.021744895935058593, 0.02172319984436035, 0.021834272384643555, 0.021811359405517577, 0.02180214309692383, 0.021840351104736328, 0.021862783432006837, 0.02191564750671387, 0.021812480926513673, 0.021850879669189454, 0.021948320388793945, 0.02190140724182129, 0.021876735687255858, 0.02195462417602539, 0.021839103698730468, 0.022005887985229493, 0.021907487869262696, 0.021976640701293945, 0.021804000854492186, 0.02209174346923828, 0.021980863571166992, 0.021970848083496093, 0.021933919906616212, 0.021910112380981447, 0.022138784408569336, 0.022306911468505858, 0.021975040435791016, 0.02230588722229004, 0.022006656646728514, 0.022063039779663087, 0.022029600143432616, 0.021865280151367187, 0.022585119247436523, 0.022034656524658202, 0.021929983139038087, 0.02188902473449707, 0.02185420799255371, 0.021772287368774415, 0.021792192459106446, 0.021955135345458985, 0.021921279907226563, 0.021891584396362306, 0.02186835289001465, 0.021932287216186522, 0.02189267158508301, 0.022294271469116212, 0.022198911666870116, 0.022417407989501953, 0.02215920066833496, 0.02197443199157715, 0.021887744903564453, 0.022714368820190428, 0.02211167907714844, 0.02228384017944336, 0.022207040786743164, 0.02214784049987793, 0.021814783096313475, 0.021842111587524415, 0.021934080123901366, 0.021989280700683594, 0.022206560134887695, 0.022039968490600585, 0.02193276786804199, 0.021874048233032226, 0.022082048416137694, 0.022331392288208008, 0.022171520233154298, 0.022017311096191407, 0.022227807998657225, 0.022124544143676757, 0.022050304412841795, 0.021873151779174805, 0.021927679061889648, 0.021909824371337892, 0.02182975959777832, 0.02187884712219238, 0.021843584060668945, 0.02179033660888672, 0.02174208068847656, 0.02173097610473633, 0.021801055908203124, 0.02180678367614746, 0.021709375381469727, 0.021747711181640626, 0.021784576416015625, 0.02186419105529785, 0.021989152908325194, 0.021915615081787108, 0.021774112701416017, 0.02176063919067383, 0.02172480010986328, 0.022883071899414062, 0.023624895095825195, 0.022219295501708983, 0.021976736068725584, 0.021887584686279295, 0.022031328201293946, 0.021953311920166016, 0.02196873664855957, 0.02239299201965332, 0.02224508857727051, 0.022255680084228516, 0.022353216171264647, 0.022068031311035158, 0.022126848220825196, 0.0221628475189209, 0.021832128524780274, 0.021816383361816405, 0.022019264221191406, 0.02187775993347168, 0.021813087463378907, 0.02181545639038086, 0.02178166389465332, 0.02215990447998047, 0.022110048294067382, 0.021965311050415038, 0.021794336318969727, 0.021868192672729492, 0.022483327865600585, 0.022421056747436524, 0.022393247604370118, 0.021807647705078124, 0.021798303604125976, 0.02170230484008789, 0.02195916748046875, 0.022182239532470702, 0.022289600372314453, 0.02194710350036621, 0.025202144622802736, 0.02198796844482422, 0.022403072357177735, 0.022409023284912108, 0.022170047760009765, 0.021812992095947267, 0.021729280471801758, 0.022263872146606446, 0.02225161552429199, 0.022152223587036134, 0.021957439422607423, 0.022183168411254884, 0.02192665672302246, 0.0218024959564209, 0.021822975158691405, 0.021716064453125, 0.02188035202026367, 0.021989503860473634, 0.02184976005554199, 0.022272319793701173, 0.021989856719970703, 0.022089536666870118, 0.021940223693847655, 0.021835775375366212, 0.02166988754272461, 0.021722400665283203, 0.021801824569702147, 0.021854080200195313, 0.02190336036682129, 0.02184601593017578, 0.021814367294311524, 0.021939104080200195, 0.02185420799255371, 0.021730655670166014, 0.021809375762939454, 0.02193174362182617, 0.021799583435058594, 0.021864511489868163, 0.021795040130615236, 0.021738431930541993, 0.02190598487854004, 0.022024192810058595, 0.02194486427307129, 0.02189695930480957, 0.021736928939819336]",tokens/s,45.64397294550935,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 19654 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,6768.16896,7762.542592,0.0,7367.294976,7351.94368,s,1,12.6473876953125,12.6473876953125,0.0,12.6473876953125,12.6473876953125,12.6473876953125,12.6473876953125,[12.6473876953125],,kWh,0.0001608182069083417,1.773204727626298e-05,4.962337303200323e-05,0.0002281736272166079,,MB,1658.08128,8372.813824,0.0,7962.886144,7872.44544,s,10,3.208184783935547,0.3208184783935547,0.0015482526784383604,0.3211701965332031,0.32249743347167964,0.322578581237793,0.3226434994506836,"[0.31727294921875, 0.3193846435546875, 0.3202622375488281, 0.3202374572753906, 0.32068597412109373, 0.3224794006347656, 0.3218287658691406, 0.3216544189453125, 0.32171920776367186, 0.32265972900390627]",tokens/s,797.9590243114346,kWh,9.335483573828066e-06,1.0295335762255684e-06,6.214831360749928e-06,1.6579848510803562e-05,tokens/kWh,15440430.582534475,MB,1666.674688,8624.472064,0.0,8214.544384,8118.577152,s,10,27.089406494140626,2.7089406494140627,0.004729172827181731,2.7089176025390627,2.7149712158203125,2.7152866333007815,2.7155389672851564,"[2.704410888671875, 2.714901123046875, 2.711612060546875, 2.701280029296875, 2.707578369140625, 2.70420947265625, 2.71560205078125, 2.7102568359375, 2.713653076171875, 2.705902587890625]",tokens/s,23.25632346859528,kWh,7.935913866033882e-05,8.753395499857032e-06,5.275871581805027e-05,0.00014087124997824615,tokens/kWh,447216.8736326872,,s,630,27.0860598335266,0.04299374576750256,0.0003475571523909005,0.042998958587646484,0.04339941749572754,0.04347417964935303,0.043852793464660644,"[0.043175136566162106, 0.04260927963256836, 0.042326175689697265, 0.042278911590576174, 0.04229935836791992, 0.04226233673095703, 0.04241388702392578, 0.04229721450805664, 0.04244534301757812, 0.04283391952514649, 0.04293603134155274, 0.04262704086303711, 0.04262508773803711, 0.042666240692138674, 0.04270489501953125, 0.042513664245605466, 0.04242918395996094, 0.042425472259521486, 0.04255424118041992, 0.04276627349853516, 0.043072895050048826, 0.04264400100708008, 0.04272947311401367, 0.04285164642333984, 0.042676544189453124, 0.042777118682861326, 0.04273299026489258, 0.042717758178710936, 0.0426673583984375, 0.04304553604125977, 0.04272127914428711, 0.04272281646728516, 0.04283030319213867, 0.04328656005859375, 0.04295091247558594, 0.04286233520507812, 0.042995712280273435, 0.04302627182006836, 0.04289539337158203, 0.04282102584838867, 0.04287561416625976, 0.042931617736816405, 0.043772350311279296, 0.04319452667236328, 0.04307891082763672, 0.043223583221435546, 0.04314470291137695, 0.043184864044189454, 0.04308582305908203, 0.04333884811401367, 0.04351084899902344, 0.04338876724243164, 0.04333878326416016, 0.04329776000976562, 0.043237377166748046, 0.043345375061035155, 0.04350620651245117, 0.04333977508544922, 0.04346060943603516, 0.043302913665771485, 0.0434952621459961, 0.043472705841064455, 0.043336032867431644, 0.043364864349365234, 0.042727294921875, 0.042464958190917966, 0.042301822662353515, 0.04230569458007812, 0.042411678314208986, 0.04323148727416992, 0.042772289276123046, 0.042735809326171874, 0.04303811264038086, 0.04293443298339844, 0.04286240005493164, 0.04269680023193359, 0.04265014266967773, 0.0425533447265625, 0.042906688690185546, 0.04272019195556641, 0.042668033599853515, 0.04272947311401367, 0.04288716888427734, 0.04304608154296875, 0.04324560165405274, 0.043031326293945314, 0.0432042236328125, 0.04300611114501953, 0.04313520050048828, 0.04304217529296875, 0.04284409713745117, 0.044810943603515625, 0.042923648834228514, 0.042874591827392575, 0.04308972930908203, 0.04332015991210938, 0.04316364669799805, 0.04313907241821289, 0.043199806213378905, 0.042832576751708984, 0.04298342514038086, 0.04301801681518555, 0.043191520690917966, 0.04295100784301758, 0.042984096527099606, 0.0432803840637207, 0.04321459197998047, 0.043235584259033205, 0.04326326370239258, 0.043324127197265624, 0.043549728393554685, 0.04349216079711914, 0.04337606430053711, 0.043301601409912106, 0.04330873489379883, 0.04346502304077148, 0.04342572784423828, 0.04344015884399414, 0.04330918502807617, 0.04337615966796875, 0.04341142272949219, 0.04343392181396484, 0.04336064147949219, 0.04335113525390625, 0.04332835388183594, 0.043323551177978516, 0.04378412628173828, 0.04307769775390625, 0.04273385620117188, 0.04267740631103516, 0.04379296112060547, 0.04258611297607422, 0.042510337829589843, 0.04253696060180664, 0.04282777786254883, 0.042802783966064455, 0.042844703674316406, 0.04269452667236328, 0.04265286254882812, 0.04262790298461914, 0.042447872161865234, 0.04236185455322266, 0.042792095184326175, 0.04294332885742187, 0.042979358673095706, 0.04304598236083984, 0.04305516815185547, 0.04295148849487305, 0.0432042236328125, 0.043106590270996094, 0.043219039916992184, 0.0431267204284668, 0.04299987030029297, 0.04300182342529297, 0.04302214431762695, 0.04272297668457031, 0.04273849487304687, 0.04298521423339844, 0.04286054229736328, 0.042727294921875, 0.04290070343017578, 0.04313910293579101, 0.04299660873413086, 0.042823680877685545, 0.042790912628173826, 0.04308377456665039, 0.04322742462158203, 0.04312998580932617, 0.0431129264831543, 0.043554943084716795, 0.04348435211181641, 0.043399295806884765, 0.043442878723144535, 0.04337254333496094, 0.04320665740966797, 0.043200511932373044, 0.04338278579711914, 0.04298076629638672, 0.04298179244995117, 0.04310444641113281, 0.043286720275878904, 0.043224895477294925, 0.04326358413696289, 0.0430838737487793, 0.04315343856811524, 0.04418588638305664, 0.04316774368286133, 0.043069438934326174, 0.04311859130859375, 0.04332729721069336, 0.0427760009765625, 0.042334720611572264, 0.04224227142333985, 0.04214505767822266, 0.04234400177001953, 0.042380287170410154, 0.04238131332397461, 0.042256385803222656, 0.04229529571533203, 0.04232585525512695, 0.042410144805908205, 0.04254265594482422, 0.042756542205810544, 0.04262911987304688, 0.04316998291015625, 0.04266374588012695, 0.042491233825683594, 0.04270662307739258, 0.04284515380859375, 0.04293001556396484, 0.04296720123291015, 0.04312678527832031, 0.043099838256835936, 0.04295507049560547, 0.043017822265625, 0.04270940780639648, 0.04268646240234375, 0.0426618881225586, 0.04274380874633789, 0.04292348861694336, 0.045267486572265626, 0.04263427352905273, 0.04264595031738281, 0.04264400100708008, 0.0425799674987793, 0.04271004867553711, 0.04305193710327149, 0.0428073616027832, 0.04279500961303711, 0.04286054229736328, 0.043222496032714844, 0.04295529556274414, 0.04301433563232422, 0.043036449432373045, 0.043216926574707035, 0.04325510406494141, 0.04335599899291992, 0.043103073120117186, 0.04330873489379883, 0.04307366561889649, 0.043159679412841795, 0.04308486557006836, 0.04294547271728515, 0.04293228912353515, 0.04294569778442383, 0.04314339065551758, 0.04308569717407226, 0.04297999954223633, 0.042998046875, 0.04310201644897461, 0.04306275177001953, 0.04313296127319336, 0.04342041778564453, 0.042923072814941406, 0.04268304061889648, 0.04245695877075195, 0.042523040771484374, 0.042502143859863284, 0.04253081512451172, 0.04278681564331055, 0.042659839630126956, 0.042567550659179686, 0.04259648132324219, 0.04259804916381836, 0.04273958587646484, 0.042856929779052734, 0.042649600982666014, 0.042641407012939454, 0.0430797119140625, 0.04279497528076172, 0.0428721923828125, 0.04324940872192383, 0.04305395126342773, 0.04312063980102539, 0.0432474250793457, 0.04321913528442383, 0.043122337341308596, 0.04296739196777344, 0.04294655990600586, 0.04298886489868164, 0.04289401626586914, 0.043014144897460936, 0.04270284652709961, 0.04286054229736328, 0.0427845458984375, 0.04271491241455078, 0.04285673522949219, 0.042626911163330075, 0.04286291122436523, 0.04267212677001953, 0.042883071899414066, 0.04324262237548828, 0.04294569778442383, 0.04293913650512695, 0.04305372619628906, 0.04340768051147461, 0.04332134246826172, 0.04333939361572266, 0.043321758270263674, 0.043337696075439455, 0.04309196853637695, 0.04382668685913086, 0.04306585693359375, 0.04290764617919922, 0.04312441635131836, 0.043477310180664065, 0.043245567321777346, 0.04310220718383789, 0.04324966430664062, 0.04322009658813476, 0.04309439849853516, 0.04290729522705078, 0.04290224075317383, 0.0431426887512207, 0.04330131149291992, 0.043869632720947266, 0.043270721435546874, 0.042651649475097655, 0.04259635162353516, 0.04260051345825195, 0.0426044807434082, 0.04250787353515625, 0.04245340728759766, 0.0425920639038086, 0.04257574462890625, 0.04250860977172852, 0.042684417724609375, 0.0425984001159668, 0.04245884704589844, 0.042473758697509766, 0.04240588760375977, 0.04229324722290039, 0.04250447845458984, 0.04273347091674805, 0.04280096054077148, 0.04284524917602539, 0.04294547271728515, 0.04292812728881836, 0.042928382873535155, 0.043020030975341794, 0.042979488372802736, 0.04280073547363281, 0.04271129608154297, 0.04288857650756836, 0.042961536407470705, 0.042638561248779294, 0.042549312591552736, 0.04258233642578125, 0.04249436950683594, 0.04265369415283203, 0.042676223754882815, 0.04270451354980469, 0.04268207931518555, 0.04268304061889648, 0.042706367492675784, 0.04297580718994141, 0.04300799942016602, 0.043044864654541014, 0.043243518829345705, 0.04332921600341797, 0.04344579315185547, 0.04337334442138672, 0.04329676818847656, 0.04333772659301758, 0.04314908981323242, 0.043308704376220704, 0.04311507034301758, 0.04339859390258789, 0.0432213134765625, 0.043151649475097656, 0.04324476623535156, 0.04338137435913086, 0.04332284927368164, 0.043380577087402346, 0.0433111686706543, 0.043299583435058596, 0.04343603134155274, 0.04347420883178711, 0.04339913558959961, 0.04303260803222656, 0.04261068725585938, 0.042665985107421874, 0.042510112762451174, 0.042610912322998046, 0.04250009536743164, 0.04269430541992188, 0.042690910339355466, 0.0426794548034668, 0.042686431884765626, 0.04272422409057617, 0.042659103393554686, 0.04273353576660156, 0.04263616180419922, 0.04261452865600586, 0.04288703918457031, 0.04281983947753906, 0.04295465469360352, 0.04302963256835937, 0.04312092971801758, 0.043067935943603516, 0.04314457702636719, 0.04344911956787109, 0.0433889274597168, 0.04321062469482422, 0.04319004821777344, 0.04307145690917969, 0.04307622528076172, 0.042833663940429687, 0.04305100631713867, 0.04298342514038086, 0.0429317741394043, 0.04286028671264648, 0.04282233428955078, 0.04310131072998047, 0.04322739028930664, 0.04371212768554687, 0.04313008117675781, 0.04309328079223633, 0.04327679824829102, 0.04336844635009766, 0.04324966430664062, 0.043227134704589845, 0.0434628791809082, 0.04340918350219727, 0.04336460876464844, 0.04359302520751953, 0.04339471817016602, 0.04343452835083008, 0.04332284927368164, 0.043485729217529294, 0.043612415313720704, 0.04323328018188476, 0.043361854553222653, 0.043153854370117185, 0.04337254333496094, 0.04334796905517578, 0.04329471969604492, 0.04329676818847656, 0.043466751098632815, 0.0434741439819336, 0.04342454528808594, 0.044015777587890624, 0.043071327209472654, 0.04274176025390625, 0.04261273574829102, 0.042592254638671875, 0.04266569519042969, 0.042514720916748044, 0.04254316711425781, 0.04259564971923828, 0.0426943359375, 0.04262499237060547, 0.042585247039794924, 0.04269622421264648, 0.04262659072875977, 0.042509056091308596, 0.043292671203613284, 0.04283596801757812, 0.04307558441162109, 0.042925216674804687, 0.04285116958618164, 0.04293632125854492, 0.04298150253295899, 0.04311846542358398, 0.04318767929077148, 0.04304540634155273, 0.04296499252319336, 0.0429439697265625, 0.04309657669067383, 0.0428787841796875, 0.04274812698364258, 0.04267827224731445, 0.04266921615600586, 0.04284288024902344, 0.042853759765625, 0.0429288330078125, 0.04280732727050781, 0.042731201171875, 0.042799488067626956, 0.04263724899291992, 0.042774528503417966, 0.043022335052490236, 0.04317388916015625, 0.04325580978393555, 0.04331433486938477, 0.04313993453979492, 0.04331699371337891, 0.04341785430908203, 0.043493377685546876, 0.04351795196533203, 0.043425792694091796, 0.043380062103271486, 0.043374591827392575, 0.044028575897216794, 0.043262046813964845, 0.04314720153808594, 0.04341574478149414, 0.04328015899658203, 0.04346265411376953, 0.04312998580932617, 0.0431396484375, 0.04314963150024414, 0.04316159820556641, 0.04323667144775391, 0.04386345672607422, 0.04311513519287109, 0.04253692626953125, 0.04272332763671875, 0.042768222808837894, 0.0427848014831543, 0.042829376220703125, 0.04252102279663086, 0.04254937744140625, 0.04265475082397461, 0.04252156829833984, 0.04261999893188476, 0.04249078369140625, 0.0427047348022461, 0.04272483062744141, 0.04268716812133789, 0.04311203384399414, 0.043108158111572266, 0.042885921478271485, 0.04316140747070313, 0.04310812759399414, 0.04309833526611328, 0.04340675354003906, 0.04357590484619141, 0.04315468978881836, 0.04315350341796875, 0.0431025276184082, 0.042889217376708984, 0.04286294555664062, 0.04276428985595703, 0.042773696899414064, 0.04284627151489258, 0.042910465240478514, 0.042882591247558596, 0.042931968688964844, 0.04295753479003906, 0.04287078475952148, 0.04290124893188477, 0.043014400482177736, 0.04313638305664062, 0.04307567977905274, 0.04341420745849609, 0.043764896392822265, 0.04334457778930664, 0.043402400970458985, 0.04345328140258789, 0.043361568450927736, 0.04321254348754883, 0.04325680160522461, 0.04315468978881836, 0.04310940933227539, 0.043173599243164065, 0.043200096130371096, 0.04331875228881836, 0.043138080596923825, 0.04319753646850586, 0.04346559906005859, 0.0434005126953125, 0.04327088165283203, 0.04349737548828125, 0.04348096084594726, 0.043432064056396484, 0.043490623474121096, 0.04354662322998047, 0.04286159896850586, 0.04266902542114258, 0.04239321517944336, 0.042377376556396486, 0.0426539192199707, 0.042573471069335934, 0.04252447891235352, 0.04252726364135742, 0.04244406509399414, 0.04257839965820313, 0.042493438720703124, 0.042558208465576175, 0.042543102264404296, 0.04253081512451172, 0.04278499221801758, 0.04280889511108398, 0.04281708908081055, 0.0427977294921875, 0.04288943862915039, 0.04315881729125977, 0.04314742279052734, 0.04312303924560547, 0.043089759826660155, 0.04301635360717773, 0.04305676651000977, 0.043032958984375, 0.04296089553833008, 0.04294620895385742, 0.04287932968139648, 0.04281327819824219, 0.042948257446289065, 0.04295731353759766, 0.04287078475952148, 0.042831871032714845, 0.0428361930847168, 0.04291766357421875, 0.04284633636474609, 0.0431962890625, 0.04320367813110351, 0.043170719146728515, 0.04303987121582031, 0.04301433563232422, 0.0432803840637207, 0.04325446319580078, 0.043128833770751954, 0.043096065521240234, 0.043093345642089845, 0.04324121475219726, 0.04332646560668945, 0.04329564666748047, 0.04312326431274414, 0.042932193756103514, 0.042936798095703124, 0.04297836685180664, 0.043021247863769534, 0.04305660629272461, 0.043409950256347654, 0.04320460891723633, 0.04338614273071289, 0.04315119934082031, 0.04305395126342773, 0.043186176300048826]",tokens/s,23.25919694012482,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,3032.055808,3638.427648,0.0,3235.905536,3224.4864,s,1,10.3475380859375,10.3475380859375,0.0,10.3475380859375,10.3475380859375,10.3475380859375,10.3475380859375,[10.3475380859375],,kWh,9.007372377500738e-05,9.926381557804438e-06,2.8064466896002283e-05,0.00012806457222881408,,MB,2823.032832,4066.246656,0.0,3649.04448,3598.945792,s,10,1.3556054382324219,0.13556054382324217,0.0003410636958371221,0.13555110931396486,0.13592356414794923,0.13607124099731446,0.13618938247680665,"[0.1362189178466797, 0.1358907470703125, 0.13554966735839843, 0.1350122528076172, 0.13528675842285157, 0.1353695068359375, 0.13555255126953125, 0.13579107666015625, 0.13572950744628906, 0.13520445251464844]",tokens/s,1888.4550974788006,kWh,4.021186964554633e-06,4.4346250977542444e-07,2.6644389656437197e-06,7.129088439973778e-06,tokens/kWh,35909219.27193003,MB,2832.539648,4068.343808,0.0,3651.141632,3598.948352,s,10,21.517157714843748,2.151715771484375,0.02767607002700983,2.1378133544921876,2.190906518554687,2.191800915527344,2.1925164331054687,"[2.13872021484375, 2.18800341796875, 2.163997314453125, 2.11986572265625, 2.136906494140625, 2.1269326171875, 2.130882568359375, 2.1926953125, 2.190707763671875, 2.1284462890625]",tokens/s,29.278959997834214,kWh,6.254353053794572e-05,6.898397178869131e-06,3.811153809955639e-05,0.00010755346581637124,tokens/kWh,585755.1825206777,,s,630,21.5131389274597,0.034147839567396386,0.0006749107914666502,0.03394793510437012,0.03490240592956543,0.03507964630126953,0.03663309234619141,"[0.03378585433959961, 0.034155902862548826, 0.03361574554443359, 0.03364940643310547, 0.03436671829223633, 0.033919742584228516, 0.03388332748413086, 0.033710559844970706, 0.0336080322265625, 0.03357491302490234, 0.03367737579345703, 0.03408035278320312, 0.03364284896850586, 0.03359539031982422, 0.03358924865722656, 0.03390224075317383, 0.033494945526123046, 0.033550590515136716, 0.03344339370727539, 0.03360764694213867, 0.03344630432128906, 0.033593601226806644, 0.03359120178222656, 0.033519710540771484, 0.03356480026245117, 0.03360345458984375, 0.03357900619506836, 0.03614720153808594, 0.034247711181640626, 0.03392015838623047, 0.034070465087890626, 0.033888126373291014, 0.033693695068359376, 0.03426713562011719, 0.033734657287597655, 0.03369807815551758, 0.03419516754150391, 0.03379731369018555, 0.033650943756103516, 0.033669696807861325, 0.03364390563964844, 0.034111488342285154, 0.03371072006225586, 0.033908702850341796, 0.033687744140625, 0.03393648147583008, 0.03387433624267578, 0.03397872161865234, 0.03364031982421875, 0.03393532943725586, 0.03411788940429687, 0.0340417594909668, 0.0339865608215332, 0.033966079711914066, 0.033976318359375, 0.03418316650390625, 0.034110912322998045, 0.034071102142333984, 0.03418931198120117, 0.03496137619018555, 0.035291168212890626, 0.03487641525268555, 0.0352817268371582, 0.034457599639892575, 0.03458483123779297, 0.03453414535522461, 0.03482316970825195, 0.03420345687866211, 0.0343941764831543, 0.034334846496582035, 0.03510476684570313, 0.03420774459838867, 0.03455590438842773, 0.034648063659667966, 0.03470883178710937, 0.03480416107177734, 0.03495920181274414, 0.03454982376098633, 0.03482160186767578, 0.034703777313232424, 0.034694942474365234, 0.034511520385742185, 0.0347955207824707, 0.03447808074951172, 0.03461017608642578, 0.03487027359008789, 0.034635841369628904, 0.03477088165283203, 0.034543617248535156, 0.03506784057617188, 0.036639968872070314, 0.03452399826049805, 0.0346827507019043, 0.03618624114990234, 0.03470336151123047, 0.03486659240722656, 0.03452143859863281, 0.034488033294677735, 0.03430412673950195, 0.03460956954956055, 0.03458611297607422, 0.038957569122314455, 0.03477686309814453, 0.035390880584716795, 0.0345362548828125, 0.035334144592285156, 0.03468454360961914, 0.03455833435058594, 0.0345860481262207, 0.034791969299316404, 0.03438800048828125, 0.034540958404541015, 0.034484832763671876, 0.034551807403564457, 0.03432217788696289, 0.034412193298339847, 0.03431894302368164, 0.034336769104003906, 0.03447916793823242, 0.03429177474975586, 0.03427423858642578, 0.03442476654052734, 0.03440969467163086, 0.03442707061767578, 0.03440291213989258, 0.03435500717163086, 0.03439763259887695, 0.03473904037475586, 0.034070304870605465, 0.034590206146240234, 0.03491299057006836, 0.03483647918701172, 0.03478636932373047, 0.035033409118652346, 0.03499379348754883, 0.0356033935546875, 0.03661625671386719, 0.03441664123535156, 0.03453036880493164, 0.034460609436035156, 0.034471424102783206, 0.034466304779052735, 0.034797569274902344, 0.0342999038696289, 0.03400089645385742, 0.03417897415161133, 0.03421523284912109, 0.03435308837890625, 0.03507491302490234, 0.03753919982910156, 0.03440006256103516, 0.03446435165405273, 0.034265022277832034, 0.03431657409667969, 0.03425484848022461, 0.0339046401977539, 0.034688575744628906, 0.03670198440551758, 0.03426131057739258, 0.03468527984619141, 0.0341890869140625, 0.03558035278320312, 0.034705184936523435, 0.03384880065917969, 0.033844959259033205, 0.033591327667236326, 0.033702335357666015, 0.03383967971801758, 0.03356665420532227, 0.033613662719726566, 0.03359743881225586, 0.033783809661865234, 0.03391692733764649, 0.03389750289916992, 0.03367420959472656, 0.033672191619873046, 0.03362822341918945, 0.034444225311279296, 0.0340684814453125, 0.033849342346191406, 0.034748416900634765, 0.03372851181030274, 0.03363430404663086, 0.03374204635620117, 0.033522464752197265, 0.0334491195678711, 0.03358601760864258, 0.033527233123779296, 0.033382144927978516, 0.0337388801574707, 0.03393289566040039, 0.03424732971191406, 0.03361356735229492, 0.03365631866455078, 0.03348883056640625, 0.033968414306640625, 0.03357222366333008, 0.03374982452392578, 0.03353987121582031, 0.033481246948242185, 0.033457504272460935, 0.033522113800048825, 0.03358924865722656, 0.033795646667480465, 0.0340832633972168, 0.033982463836669925, 0.033680862426757815, 0.03349343872070312, 0.033783008575439456, 0.03354508972167969, 0.033547519683837894, 0.03357977676391601, 0.03445670318603516, 0.03370393753051758, 0.03348569488525391, 0.034369537353515625, 0.03355852890014648, 0.03356198501586914, 0.03355302429199219, 0.03360768127441406, 0.03354380798339844, 0.03351180648803711, 0.03365232086181641, 0.03382275390625, 0.03354867172241211, 0.033584449768066404, 0.033587905883789064, 0.033492992401123044, 0.03343465423583984, 0.033387454986572265, 0.033421054840087894, 0.033470943450927736, 0.033495967864990234, 0.03368022537231445, 0.0338776626586914, 0.03372278213500977, 0.03377356719970703, 0.03371363067626953, 0.03350947189331055, 0.033697952270507814, 0.033597599029541014, 0.03351683044433594, 0.03357987213134766, 0.03345395278930664, 0.03349462509155274, 0.033349342346191406, 0.03346230316162109, 0.03342211151123047, 0.0335145263671875, 0.03357731246948242, 0.03364108657836914, 0.0336445426940918, 0.033457855224609374, 0.03375718307495117, 0.034147838592529296, 0.03450246429443359, 0.03419347381591797, 0.0344951057434082, 0.0335618896484375, 0.033927902221679684, 0.033673023223876955, 0.03377376174926758, 0.03425075149536133, 0.03388415908813477, 0.033751041412353515, 0.03378585433959961, 0.033613697052001956, 0.0346478385925293, 0.03360393524169922, 0.0339128303527832, 0.03351244735717773, 0.03362617492675781, 0.03346934509277344, 0.034136096954345704, 0.033906688690185545, 0.033529857635498046, 0.03371535873413086, 0.03351433563232422, 0.0332770881652832, 0.03342627334594726, 0.033494686126708986, 0.03376367950439453, 0.03354214477539062, 0.03351468658447266, 0.03340937423706054, 0.033522144317626956, 0.03398988723754883, 0.037945919036865235, 0.03374227142333985, 0.03410825729370117, 0.03391683197021484, 0.03444713592529297, 0.034103519439697264, 0.033751296997070315, 0.034098430633544924, 0.03392969512939453, 0.03382684707641601, 0.033883296966552734, 0.03370684814453125, 0.03348009490966797, 0.033608287811279294, 0.033614849090576174, 0.033598464965820314, 0.03349628829956055, 0.035602558135986326, 0.03441872024536133, 0.033950271606445315, 0.03387919998168945, 0.03388118362426758, 0.0338983039855957, 0.03486105728149414, 0.033710079193115236, 0.033624000549316406, 0.03370348739624023, 0.03349071884155273, 0.033761825561523434, 0.03386163330078125, 0.03377353668212891, 0.033658912658691406, 0.03382271957397461, 0.034051422119140626, 0.033893024444580075, 0.03438963317871094, 0.03376985549926758, 0.033896350860595705, 0.03379766464233398, 0.03362630462646484, 0.03372995376586914, 0.03363759994506836, 0.033742591857910155, 0.034181121826171876, 0.03374899291992187, 0.033701889038085936, 0.03344998550415039, 0.03349913787841797, 0.03349631881713867, 0.033501182556152344, 0.033753856658935544, 0.03370348739624023, 0.03358355331420899, 0.03347788619995117, 0.03344009780883789, 0.033468704223632816, 0.03354227066040039, 0.03390828704833984, 0.03360335922241211, 0.033763999938964846, 0.033687263488769534, 0.033831199645996096, 0.03351551818847656, 0.03394559860229492, 0.033964031219482424, 0.033670654296875, 0.03377948760986328, 0.03353673553466797, 0.033709728240966796, 0.03345443344116211, 0.033546207427978515, 0.03352809524536133, 0.03371513748168945, 0.0337529296875, 0.03380857467651367, 0.03356547164916992, 0.03370393753051758, 0.03337420654296875, 0.03391897583007813, 0.03354118347167969, 0.035127967834472654, 0.03477532958984375, 0.03419551849365234, 0.03397625732421875, 0.0337367057800293, 0.03353539276123047, 0.033555007934570315, 0.033536033630371095, 0.03383456039428711, 0.0337919692993164, 0.03373823928833008, 0.03401696014404297, 0.03380460739135742, 0.03353430557250976, 0.033524959564208985, 0.03348524856567383, 0.03342326354980469, 0.033438880920410155, 0.03372947311401367, 0.03356047821044922, 0.03344614410400391, 0.03344550323486328, 0.033468639373779294, 0.03353395080566406, 0.03342540740966797, 0.03342089462280273, 0.033540512084960936, 0.033437088012695314, 0.03359600067138672, 0.03390784072875976, 0.03377366256713867, 0.0335511360168457, 0.033451969146728516, 0.03367942428588867, 0.03345817565917969, 0.03363155364990234, 0.0336286735534668, 0.034135326385498044, 0.03341609573364258, 0.03369270324707031, 0.03352265548706055, 0.03386163330078125, 0.033511425018310545, 0.03345577621459961, 0.033476959228515624, 0.033345535278320314, 0.033476608276367184, 0.03347040176391602, 0.03332102584838867, 0.03353395080566406, 0.033395809173583986, 0.03355731201171875, 0.03369491195678711, 0.03369583892822266, 0.03389728164672851, 0.03395174407958984, 0.033976318359375, 0.033871200561523436, 0.03401590347290039, 0.034297119140625, 0.03426736068725586, 0.03388371276855469, 0.03668169784545899, 0.03444947052001953, 0.034337120056152345, 0.03426108932495117, 0.034204128265380856, 0.034179073333740234, 0.034463134765625, 0.034470817565917966, 0.034719776153564454, 0.03440412902832031, 0.0342608642578125, 0.034409889221191405, 0.034237857818603515, 0.03421488189697266, 0.034312065124511716, 0.03446121597290039, 0.034914623260498046, 0.03499446487426758, 0.03489971160888672, 0.03486880111694336, 0.03503168106079101, 0.03494303894042969, 0.03521676635742187, 0.03513727951049805, 0.03486377716064453, 0.03568592071533203, 0.034673343658447264, 0.03471769714355469, 0.03517350387573242, 0.034859905242919924, 0.03473616027832031, 0.03494499206542969, 0.03468211364746094, 0.0350379524230957, 0.034729888916015625, 0.034766944885253906, 0.034712894439697266, 0.0344901123046875, 0.034300159454345704, 0.03448710250854492, 0.034626911163330075, 0.034541919708251954, 0.03480572891235351, 0.034662910461425785, 0.03469251251220703, 0.03486956787109375, 0.034703296661376955, 0.034453857421875, 0.03470307159423828, 0.03461065673828125, 0.03465619277954102, 0.03476038360595703, 0.034588832855224606, 0.034431743621826175, 0.03476275253295898, 0.03456383895874023, 0.034740318298339845, 0.03462380981445313, 0.0350076789855957, 0.03501942443847656, 0.035051742553710935, 0.035167423248291016, 0.03504803085327148, 0.03501670455932617, 0.03508351898193359, 0.03537382507324219, 0.034742206573486326, 0.03480377578735352, 0.03488351821899414, 0.03496956634521484, 0.03470140838623047, 0.03496944046020508, 0.034850433349609376, 0.03487593460083008, 0.03472185516357422, 0.03479059219360352, 0.03491244888305664, 0.034822273254394534, 0.03482220840454102, 0.035098175048828124, 0.03483868789672852, 0.034793663024902347, 0.0350333137512207, 0.0349730224609375, 0.034982879638671874, 0.0347770881652832, 0.034952289581298826, 0.034980064392089845, 0.034957889556884766, 0.03472140884399414, 0.03490591812133789, 0.03474051284790039, 0.034848159790039065, 0.034786304473876956, 0.034895870208740236, 0.03462144088745117, 0.03469696044921875, 0.034678241729736325, 0.0345239372253418, 0.03501996612548828, 0.03439680099487305, 0.034594974517822265, 0.035360385894775394, 0.034716064453125, 0.03510476684570313, 0.034719200134277345, 0.034621376037597656, 0.034474369049072265, 0.03507222366333008, 0.03452707290649414, 0.03493699264526367, 0.034902015686035154, 0.03488278579711914, 0.034644992828369144, 0.03459648132324219, 0.03464236831665039, 0.0344719352722168, 0.034455265045166016, 0.03455590438842773, 0.03455516815185547, 0.03461193466186523, 0.03446988677978516, 0.034453857421875, 0.03453440093994141, 0.03823455810546875, 0.035186912536621096, 0.03447977447509765, 0.03477743911743164, 0.0350904312133789, 0.034523136138916014, 0.03426508712768555, 0.034490367889404294, 0.03431782531738281, 0.03434044647216797, 0.034253726959228514, 0.034291648864746095, 0.03427324676513672, 0.03429753494262695, 0.03390201568603515, 0.034123870849609376, 0.0340607681274414, 0.034090015411376955, 0.03408560180664062, 0.03413407897949219, 0.03404000091552734, 0.03406374359130859, 0.03415619277954102, 0.036582366943359375, 0.03451903915405274, 0.03412998580932617, 0.03394486236572265, 0.03376911926269531, 0.033754112243652344, 0.03393360137939453, 0.033756351470947264, 0.0335877456665039, 0.03391078567504883, 0.033711265563964844, 0.033590366363525394, 0.03357219314575195, 0.03361833572387695, 0.033658878326416015, 0.03379404830932617, 0.0335766716003418, 0.03342291259765625, 0.033565406799316404, 0.03371164703369141, 0.033753856658935544, 0.03353548812866211, 0.033758880615234375, 0.033659454345703124, 0.03354608154296875, 0.033517120361328125, 0.03368339157104492, 0.034682975769042966, 0.03340140914916992, 0.033433792114257815, 0.034008895874023434, 0.033689502716064454, 0.033901729583740235, 0.034038719177246095, 0.03356086349487305, 0.03349260711669922, 0.03354224014282227, 0.03369779205322266, 0.033439327239990234, 0.0336102409362793, 0.03356016159057617, 0.03347897720336914, 0.03359539031982422, 0.033560352325439455, 0.03334681701660156, 0.03345881652832031, 0.03335113525390625, 0.03348271942138672, 0.03374582290649414, 0.03352371215820313, 0.033562625885009766, 0.03365609741210938, 0.03353276824951172, 0.03351696014404297]",tokens/s,29.284429488616265,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4261.888,6278.742016,0.0,5876.219904,5603.426816,s,1,11.182859375,11.182859375,0.0,11.182859375,11.182859375,11.182859375,11.182859375,[11.182859375],,kWh,0.00011571948042083401,1.2751005459489618e-05,3.6454473607998306e-05,0.00016492495948832193,,MB,2293.256192,6605.897728,0.0,6188.695552,6011.44064,s,10,1.992250045776367,0.1992250045776367,0.0004931238999607749,0.19932622528076172,0.1998184585571289,0.19983545303344727,0.19984904861450195,"[0.19893594360351563, 0.1987425231933594, 0.19860163879394532, 0.1994628448486328, 0.19843193054199218, 0.19981468200683594, 0.19918960571289063, 0.19965017700195312, 0.19956825256347657, 0.19985244750976563]",tokens/s,1284.9792652419715,kWh,5.888194575083464e-06,6.491842037685981e-07,3.8926197807601096e-06,1.042999855961217e-05,tokens/kWh,24544586.323463414,MB,2304.770048,6605.897728,0.0,6188.695552,6011.4432,s,10,20.860714599609373,2.0860714599609373,0.007767765203903572,2.0835799560546873,2.0964466552734375,2.0995255737304688,2.1019887084960938,"[2.074931396484375, 2.079135986328125, 2.086658447265625, 2.095762451171875, 2.083851318359375, 2.08330859375, 2.082281494140625, 2.09037109375, 2.1026044921875, 2.081809326171875]",tokens/s,30.200307711980155,kWh,6.105752522158378e-05,6.734693829470635e-06,4.051041574163999e-05,0.00010830263479269441,tokens/kWh,581703.2994681094,,s,630,20.857117656707764,0.0331065359630282,0.0003879618508980447,0.03307737731933594,0.03340918769836426,0.03353102798461914,0.033999976272583006,"[0.03327699279785156, 0.03316640090942383, 0.032803390502929686, 0.03272230529785156, 0.032568065643310544, 0.032688129425048826, 0.0327086067199707, 0.03260006332397461, 0.03259088134765625, 0.03259286499023437, 0.032704513549804685, 0.0327086067199707, 0.03255401611328125, 0.03293487930297852, 0.03272003173828125, 0.03284873580932617, 0.032871807098388675, 0.032814815521240236, 0.03280953598022461, 0.0328707504272461, 0.03281903839111328, 0.03300105667114258, 0.03306550216674805, 0.033019744873046875, 0.032922943115234374, 0.032823585510253904, 0.032903263092041016, 0.03287910461425781, 0.03290012741088867, 0.03297788619995117, 0.033007614135742186, 0.03302809524536133, 0.03297280120849609, 0.032821121215820315, 0.03281068801879883, 0.032811454772949215, 0.032740833282470704, 0.03291756820678711, 0.03278412628173828, 0.0328251838684082, 0.03301875305175781, 0.03287833786010742, 0.032798656463623045, 0.03309804916381836, 0.033058815002441407, 0.03322473526000977, 0.03313046264648437, 0.03312639999389649, 0.03309568023681641, 0.03316326522827148, 0.033124542236328124, 0.033197601318359374, 0.03310204696655274, 0.03317756652832031, 0.033134689331054686, 0.03307705688476562, 0.03303372955322265, 0.03304499053955078, 0.03310406494140625, 0.0333496322631836, 0.0331890869140625, 0.03298928070068359, 0.03292131042480469, 0.03360768127441406, 0.03333324813842774, 0.03283148956298828, 0.032732192993164065, 0.03266409683227539, 0.032966560363769534, 0.032763839721679684, 0.03263343811035156, 0.0326097297668457, 0.032797248840332034, 0.03258572769165039, 0.03267583847045898, 0.032581249237060544, 0.03257382583618164, 0.032892929077148435, 0.03291494369506836, 0.03282201766967773, 0.03290419387817383, 0.033489662170410155, 0.03287795257568359, 0.03302873611450195, 0.032935935974121096, 0.03278960037231445, 0.032772705078125, 0.03271094512939453, 0.03279670333862305, 0.032761856079101564, 0.032783550262451173, 0.03291424179077149, 0.032878688812255856, 0.03304025650024414, 0.032868255615234376, 0.03286188888549805, 0.03292793655395508, 0.032753887176513674, 0.033177631378173825, 0.0332042236328125, 0.033353023529052735, 0.03310015869140625, 0.03297459030151367, 0.033029857635498046, 0.03314115142822266, 0.03316371154785156, 0.033130401611328124, 0.033220703125, 0.03315068817138672, 0.03310224151611328, 0.033122081756591794, 0.03319612884521484, 0.03320832061767578, 0.03321187210083008, 0.033135009765625, 0.0330015983581543, 0.033037696838378906, 0.032963199615478514, 0.033142784118652346, 0.03321241760253906, 0.03314681625366211, 0.033246528625488284, 0.03316812896728515, 0.03348003387451172, 0.033456798553466796, 0.03320832061767578, 0.034158176422119144, 0.03362416076660156, 0.03304246520996094, 0.033252670288085935, 0.03385033416748047, 0.03289907073974609, 0.03277619171142578, 0.03269612884521484, 0.032766143798828126, 0.0328540153503418, 0.03300348663330078, 0.03286223983764648, 0.03290275192260742, 0.03285001754760742, 0.03296697616577148, 0.03299641418457031, 0.03301062393188477, 0.033060832977294924, 0.03295439910888672, 0.03297689437866211, 0.03300556945800781, 0.03299488067626953, 0.03318790435791016, 0.032971134185791014, 0.0328908805847168, 0.033076385498046874, 0.03321311950683594, 0.033081504821777345, 0.03323494338989258, 0.03307462310791016, 0.03312604904174805, 0.03309865570068359, 0.033067008972167966, 0.03311820983886719, 0.033124351501464845, 0.032968063354492184, 0.033086078643798825, 0.03314223861694336, 0.03306140899658203, 0.03315097427368164, 0.03333660888671875, 0.03309590530395508, 0.03310400009155273, 0.03303462219238281, 0.03301721572875976, 0.03316595077514648, 0.03313190460205078, 0.0331099853515625, 0.03319852828979492, 0.03324655914306641, 0.03314163208007812, 0.03305088043212891, 0.03300076675415039, 0.03307769775390625, 0.0332861442565918, 0.03325337600708008, 0.0331611213684082, 0.033116256713867184, 0.03311113739013672, 0.03317238235473633, 0.033298431396484376, 0.03360665512084961, 0.03339571380615235, 0.03386457443237305, 0.03375727844238281, 0.033271713256835936, 0.03340492630004883, 0.03323699188232422, 0.0328724479675293, 0.03298303985595703, 0.032715904235839845, 0.03273305511474609, 0.03281785583496094, 0.03288300704956055, 0.03277967834472656, 0.032825408935546876, 0.033019615173339845, 0.032981822967529294, 0.03288883209228516, 0.032894977569580076, 0.03303833770751953, 0.03312947082519531, 0.032939006805419925, 0.03346243286132813, 0.03332694244384766, 0.033132545471191405, 0.03297183990478516, 0.032930816650390625, 0.03315235137939453, 0.03309747314453125, 0.03320857620239258, 0.03326528167724609, 0.03346121597290039, 0.033260990142822265, 0.03338630294799805, 0.033252094268798826, 0.03316073608398438, 0.0332347183227539, 0.03297705459594726, 0.03306480026245117, 0.03303699111938477, 0.03306496047973633, 0.03323849487304688, 0.03310976028442383, 0.03307190322875977, 0.033259521484375, 0.033157119750976564, 0.03317055892944336, 0.03479235076904297, 0.033662944793701174, 0.03351174545288086, 0.03336159896850586, 0.03334348678588867, 0.0332861442565918, 0.03326076889038086, 0.033301280975341796, 0.033255424499511715, 0.033165313720703124, 0.033160671234130856, 0.03326787185668945, 0.033503616333007816, 0.03357283020019531, 0.03338857650756836, 0.0363397102355957, 0.03339878463745117, 0.033288192749023435, 0.03354828643798828, 0.03402687835693359, 0.03301248168945312, 0.032837505340576174, 0.033067008972167966, 0.03277231979370117, 0.03264080047607422, 0.032688129425048826, 0.03263843154907226, 0.03270912170410156, 0.03273712158203125, 0.03276819229125977, 0.032974239349365234, 0.03277884674072266, 0.03297196960449219, 0.032717086791992187, 0.03273168182373047, 0.03280281448364258, 0.03291561508178711, 0.03304022216796875, 0.03304780960083008, 0.032885120391845706, 0.03273971176147461, 0.03281919860839844, 0.03262374496459961, 0.03277840042114258, 0.03312508773803711, 0.03318508911132813, 0.0330162239074707, 0.03293974304199219, 0.03316998291015625, 0.03308748626708984, 0.03301526260375977, 0.033323009490966796, 0.033165534973144534, 0.033513790130615236, 0.03303628921508789, 0.03322880172729492, 0.033148929595947264, 0.03310124969482422, 0.03319046401977539, 0.033185504913330076, 0.03328169631958008, 0.03309632110595703, 0.03303164672851563, 0.03305731201171875, 0.03322220611572266, 0.03328448104858398, 0.0331541748046875, 0.03311276626586914, 0.03321676635742187, 0.033244865417480465, 0.033278465270996094, 0.03316921615600586, 0.033379871368408205, 0.03324156951904297, 0.033113536834716795, 0.03313926315307617, 0.03329411315917969, 0.033457504272460935, 0.03330502319335937, 0.03316371154785156, 0.03347455978393555, 0.03350400161743164, 0.03311820983886719, 0.03274342346191406, 0.03270012664794922, 0.03286249542236328, 0.03283267211914063, 0.03270537567138672, 0.032857406616210935, 0.03297555160522461, 0.032876449584960936, 0.03278243255615235, 0.032870273590087894, 0.03286614227294922, 0.03282099151611328, 0.0327380485534668, 0.03292073440551758, 0.03294070434570313, 0.03295606231689453, 0.03287481689453125, 0.032873886108398434, 0.033012641906738284, 0.03298214340209961, 0.03279110336303711, 0.03279052734375, 0.032882686614990234, 0.03278438568115234, 0.03271615982055664, 0.03276044845581055, 0.03291555023193359, 0.033029918670654294, 0.03295449447631836, 0.033067008972167966, 0.032964286804199217, 0.032930110931396486, 0.033091583251953126, 0.03312844848632813, 0.0331141128540039, 0.03323699188232422, 0.0331038703918457, 0.033255424499511715, 0.03299123382568359, 0.03308262252807617, 0.03336268615722656, 0.03323907089233399, 0.033403934478759764, 0.03352467346191406, 0.03363558578491211, 0.03351424026489258, 0.03324879837036133, 0.033307071685791015, 0.03323411178588867, 0.03336854553222656, 0.03315955352783203, 0.03312598419189453, 0.03329270553588867, 0.032967872619628906, 0.03325215911865234, 0.03331071853637695, 0.03329337692260742, 0.03335059356689453, 0.03327577590942383, 0.03345849609375, 0.033255424499511715, 0.033772415161132815, 0.033365760803222656, 0.032809215545654295, 0.03273030471801758, 0.03273551940917969, 0.03288323211669922, 0.03273257446289062, 0.03271932983398437, 0.03250719833374023, 0.03283232116699219, 0.03284377670288086, 0.03353177642822266, 0.03273699188232422, 0.03276639938354492, 0.03285398483276367, 0.03272294235229492, 0.0328045425415039, 0.032981151580810546, 0.032830753326416016, 0.032936607360839844, 0.03281327819824219, 0.03285939025878906, 0.032829536437988284, 0.03277686309814453, 0.03281919860839844, 0.03296220779418945, 0.032909568786621095, 0.03300566482543945, 0.032981246948242185, 0.03371596908569336, 0.033645599365234376, 0.033065696716308594, 0.03291980743408203, 0.033091583251953126, 0.03311820983886719, 0.03324643325805664, 0.03307190322875977, 0.033068321228027345, 0.033207008361816406, 0.032984703063964844, 0.033007999420166016, 0.03322880172729492, 0.03309587097167969, 0.033038143157958985, 0.03315324783325195, 0.033211967468261716, 0.033140960693359374, 0.03332611083984375, 0.03303081512451172, 0.03295878219604492, 0.03308272171020508, 0.03314944076538086, 0.033334625244140624, 0.033256256103515625, 0.033019744873046875, 0.03294825744628906, 0.033048702239990235, 0.033570816040039066, 0.03337318420410156, 0.033364990234375, 0.03329206466674805, 0.03313654327392578, 0.03301612854003906, 0.033934112548828124, 0.03326092910766602, 0.03299203109741211, 0.03286447906494141, 0.03272886276245117, 0.03274137496948242, 0.033148094177246096, 0.033190719604492186, 0.033468414306640625, 0.03319750213623047, 0.03328643035888672, 0.033150497436523436, 0.033178367614746095, 0.033072319030761715, 0.033180126190185545, 0.03315292739868164, 0.03297222518920898, 0.03298239898681641, 0.033166976928710935, 0.032968704223632815, 0.03294406509399414, 0.03288070297241211, 0.0332407341003418, 0.03296403121948242, 0.033173984527587894, 0.033236766815185545, 0.03321491241455078, 0.033767776489257814, 0.033506431579589845, 0.035393375396728516, 0.03382268905639649, 0.03301267242431641, 0.03318783950805664, 0.032933887481689454, 0.03303833770751953, 0.033087425231933594, 0.03299334335327148, 0.03297875213623047, 0.03281939315795898, 0.03336742401123047, 0.03287308883666992, 0.03289913558959961, 0.033054656982421875, 0.03307075119018555, 0.03293324661254883, 0.03279337692260742, 0.03284537506103516, 0.03285670471191406, 0.032884288787841796, 0.03343990325927734, 0.03316265487670898, 0.032996223449707034, 0.03306905746459961, 0.033107967376708985, 0.0331096305847168, 0.03316678237915039, 0.033215423583984376, 0.033454078674316406, 0.03342131042480469, 0.033478656768798826, 0.03327596664428711, 0.03325251388549805, 0.03346102523803711, 0.03414387130737305, 0.03348227310180664, 0.03280310440063477, 0.039010879516601565, 0.03256899261474609, 0.03262057495117188, 0.03268777465820313, 0.032756385803222654, 0.03264713668823242, 0.03300969696044922, 0.03311001586914063, 0.03318783950805664, 0.03313260650634765, 0.03335916900634766, 0.033005760192871096, 0.033114273071289065, 0.033048385620117186, 0.033184062957763674, 0.033177406311035156, 0.03315951919555664, 0.03313628768920898, 0.03306326293945312, 0.033142784118652346, 0.03330643081665039, 0.033328575134277345, 0.033227519989013674, 0.03346432113647461, 0.033708030700683594, 0.033425472259521485, 0.033368000030517576, 0.03330867385864258, 0.03344800186157226, 0.033392574310302736, 0.033273120880126954, 0.03348348617553711, 0.03325040054321289, 0.03327900695800781, 0.03359324645996094, 0.03320966339111328, 0.03305926513671875, 0.033726688385009765, 0.03333280181884766, 0.033199966430664064, 0.033641056060791014, 0.033724193572998044, 0.03335804748535156, 0.033329151153564454, 0.03335987091064453, 0.03338240051269531, 0.03339004898071289, 0.03331507110595703, 0.03333148956298828, 0.03327590560913086, 0.03344547271728516, 0.03322307205200195, 0.0332632942199707, 0.03337161636352539, 0.03340784072875977, 0.03349612808227539, 0.03358780670166016, 0.03353011322021485, 0.03347180938720703, 0.033336063385009766, 0.033447681427001955, 0.03336627197265625, 0.033336639404296875, 0.03294259262084961, 0.033074943542480466, 0.03308579254150391, 0.03310950469970703, 0.03279753494262695, 0.03278953552246094, 0.03295920181274414, 0.032780288696289066, 0.03273523330688476, 0.032720897674560545, 0.0328642578125, 0.032968223571777346, 0.032868480682373045, 0.032870494842529296, 0.03295654296875, 0.03300979232788086, 0.032986976623535155, 0.032823455810546874, 0.03299673461914063, 0.032940673828125, 0.032892929077148435, 0.03290700912475586, 0.0330522232055664, 0.03311276626586914, 0.03285606384277344, 0.03295008087158203, 0.03286259078979492, 0.032839488983154294, 0.03308095932006836, 0.033239425659179686, 0.03336806488037109, 0.033216384887695315, 0.033088958740234375, 0.03285615921020508, 0.033067615509033206, 0.03288169479370117, 0.03286486434936523, 0.032874271392822264, 0.032883201599121094, 0.032780384063720705, 0.03288883209228516, 0.03299123382568359, 0.03287039947509766, 0.032959518432617185, 0.03301635360717774, 0.03316310501098633, 0.03306147384643555, 0.03311206436157227, 0.03332710266113281, 0.03328799819946289, 0.0333191032409668, 0.03354009628295898, 0.03320217514038086, 0.033220672607421876, 0.033210304260253905, 0.033331039428710935, 0.03336438369750976, 0.0333076171875, 0.033137439727783206, 0.03307062530517578]",tokens/s,30.20551594756855,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,8222.408704,11243.814912,0.0,10848.567296,10616.027648,s,1,14.2419736328125,14.2419736328125,0.0,14.2419736328125,14.2419736328125,14.2419736328125,14.2419736328125,[14.2419736328125],,kWh,0.00021355691618333593,2.35493461762268e-05,6.55511635520023e-05,0.00030265742591156504,,MB,4069.998592,11675.828224,0.0,11265.900544,11070.470656,s,10,3.8241392517089845,0.3824139251708984,0.0012889247604856932,0.3826279144287109,0.38372066345214845,0.38415348358154294,0.3844997396850586,"[0.38028997802734377, 0.38029132080078126, 0.3819477233886719, 0.38262469482421874, 0.3820273132324219, 0.38263113403320315, 0.3833066711425781, 0.3836244812011719, 0.3828096313476563, 0.3845863037109375]",tokens/s,669.4316894595174,kWh,1.1202163411574049e-05,1.2354040652425912e-06,7.444728177999863e-06,1.9882295654816503e-05,tokens/kWh,12875776.743516224,MB,4074.22976,11677.925376,0.0,11267.997696,11070.473216,s,10,29.081416992187503,2.90814169921875,0.007295716749810582,2.9061455078125,2.91719306640625,2.9194087890625,2.9211813671875,"[2.89908349609375, 2.898954833984375, 2.9021630859375, 2.904309814453125, 2.90538232421875, 2.90690869140625, 2.91390625, 2.91670068359375, 2.91238330078125, 2.92162451171875]",tokens/s,21.663318543564937,kWh,8.528220892301002e-05,9.406210770988544e-06,5.671276759239928e-05,0.00015140118728639785,tokens/kWh,416112.9851698331,,s,630,29.07887252426148,0.046156940514700755,0.0006409928978163092,0.04608177566528321,0.046541160202026366,0.04677731609344483,0.04871881748199463,"[0.048164958953857424, 0.04631155014038086, 0.045741825103759765, 0.04528819274902344, 0.04536524963378906, 0.045560958862304685, 0.04535491180419922, 0.045319137573242185, 0.04544307327270508, 0.04581785583496094, 0.045707263946533204, 0.04564377593994141, 0.045555519104003905, 0.0458856315612793, 0.04575027084350586, 0.04581894302368164, 0.0458240966796875, 0.045709312438964846, 0.04558921432495117, 0.04567772674560547, 0.04589209747314453, 0.046155967712402345, 0.045975841522216794, 0.04598169708251953, 0.04588339233398438, 0.04588457489013672, 0.045689697265625, 0.04586700820922852, 0.04571731185913086, 0.04568492889404297, 0.04603289413452148, 0.04934041595458984, 0.04544633483886719, 0.04575129699707031, 0.045991744995117184, 0.04591734313964844, 0.0456668815612793, 0.046156063079833984, 0.04584041595458985, 0.04580758285522461, 0.045764606475830076, 0.046639102935791016, 0.04634124755859375, 0.046091136932373045, 0.04625408172607422, 0.046419456481933595, 0.04605206298828125, 0.045895454406738284, 0.04620083236694336, 0.046066944122314456, 0.04586572647094726, 0.04624991989135742, 0.046303329467773435, 0.046088062286376956, 0.04606886291503906, 0.04619548797607422, 0.04612428665161133, 0.04603116989135742, 0.04631324768066406, 0.046482273101806644, 0.046430206298828124, 0.046235649108886716, 0.046524417877197265, 0.04808476638793945, 0.046153953552246094, 0.04573344039916992, 0.04545171356201172, 0.04529507064819336, 0.04539446258544922, 0.045725311279296875, 0.04552096176147461, 0.04553254318237305, 0.045460193634033204, 0.04581398391723633, 0.04593791961669922, 0.045542144775390626, 0.04563763046264648, 0.0458851203918457, 0.04563180923461914, 0.045676544189453126, 0.04588742446899414, 0.04588959884643555, 0.045729694366455076, 0.045846622467041014, 0.046132225036621094, 0.04610886383056641, 0.04602671813964844, 0.04599689483642578, 0.04591753768920898, 0.045761184692382814, 0.04547788619995117, 0.04575641632080078, 0.04597145462036133, 0.045916160583496096, 0.045768543243408205, 0.04661468887329102, 0.04588544082641602, 0.04621868896484375, 0.0459986572265625, 0.046034942626953124, 0.04585062408447266, 0.045860576629638675, 0.046068000793457034, 0.046314815521240234, 0.046171966552734374, 0.04618675231933594, 0.04638800048828125, 0.04631702423095703, 0.04626675033569336, 0.04692768096923828, 0.046050880432128904, 0.04583692932128906, 0.04597760009765625, 0.04613529586791992, 0.046120960235595705, 0.046301185607910154, 0.046456832885742184, 0.04632726287841797, 0.04596380615234375, 0.04633599853515625, 0.046107872009277344, 0.04594358444213867, 0.04606486511230469, 0.046518463134765625, 0.046434913635253906, 0.04636841583251953, 0.04864233779907227, 0.046163841247558596, 0.045297664642333986, 0.04524249649047852, 0.04565971374511719, 0.04552262496948242, 0.04529024124145508, 0.04550451278686524, 0.04563750457763672, 0.04560441589355469, 0.045402687072753904, 0.04558848190307617, 0.04574003219604492, 0.04577280044555664, 0.045927871704101564, 0.04580409622192383, 0.04604431915283203, 0.04602054214477539, 0.045833118438720705, 0.045830142974853515, 0.04624361419677735, 0.046055553436279296, 0.04616790390014648, 0.04598969650268555, 0.0458160629272461, 0.045811904907226565, 0.04601769638061524, 0.04584320068359375, 0.04563158416748047, 0.04605132675170898, 0.04582809448242187, 0.045838207244873044, 0.04569305419921875, 0.04625305557250976, 0.04619353485107422, 0.04616534423828125, 0.04596611022949219, 0.04619059371948242, 0.0458092155456543, 0.045900222778320315, 0.046442497253417966, 0.04638515090942383, 0.046274559020996094, 0.046276607513427735, 0.04642611312866211, 0.04627648162841797, 0.046176383972167966, 0.04599193572998047, 0.046369953155517576, 0.046209888458251955, 0.04618854522705078, 0.046258174896240234, 0.046300865173339846, 0.04631788635253906, 0.046102527618408204, 0.046319454193115235, 0.046000286102294924, 0.046316959381103515, 0.04773846435546875, 0.0459950065612793, 0.046382270812988284, 0.046532703399658204, 0.04658150482177734, 0.04873788833618164, 0.04627011108398438, 0.04568966293334961, 0.04546688079833985, 0.04542086410522461, 0.04538617706298828, 0.04570111846923828, 0.045649921417236325, 0.04565536117553711, 0.045596607208251955, 0.045574783325195316, 0.045582462310791015, 0.045747329711914066, 0.045706111907958986, 0.045690879821777344, 0.04585023880004883, 0.046246078491210936, 0.04601465606689453, 0.04595097732543945, 0.04582988739013672, 0.04611836624145508, 0.046297889709472656, 0.04645891189575195, 0.04633542251586914, 0.0460120964050293, 0.045988929748535155, 0.04579919815063477, 0.04601023864746094, 0.045631614685058594, 0.045791233062744144, 0.0461475830078125, 0.045830142974853515, 0.045661376953125, 0.046054206848144534, 0.04593670272827149, 0.04588947296142578, 0.045932544708251956, 0.046489601135253904, 0.04622463989257813, 0.045918270111083986, 0.04635833740234375, 0.04641471862792969, 0.046327808380126956, 0.046327808380126956, 0.046660606384277346, 0.046429183959960936, 0.046358047485351564, 0.04631804656982422, 0.04626383972167969, 0.046037471771240235, 0.04619651031494141, 0.04614115142822266, 0.0460880012512207, 0.04601721572875977, 0.046239070892333985, 0.04634022521972656, 0.04610915374755859, 0.04632787322998047, 0.046435806274414064, 0.04654671859741211, 0.046488319396972656, 0.046781822204589846, 0.04657187271118164, 0.04790537643432617, 0.045893791198730466, 0.045526912689208984, 0.045326366424560546, 0.04537472152709961, 0.04554991912841797, 0.045596511840820315, 0.045493919372558596, 0.045714305877685546, 0.045821502685546876, 0.04591177749633789, 0.0471448974609375, 0.045625919342041014, 0.04584368133544922, 0.04574720001220703, 0.04577231979370117, 0.04630486297607422, 0.046045089721679686, 0.04583932876586914, 0.04605747222900391, 0.04646297454833984, 0.046132926940917966, 0.04598188781738281, 0.04592038345336914, 0.04597145462036133, 0.04579878234863281, 0.046016670227050784, 0.04581795120239258, 0.04568307113647461, 0.045657470703125, 0.04600064086914062, 0.04617792129516601, 0.04601619338989258, 0.04600876617431641, 0.04591654586791992, 0.04675379180908203, 0.04589977645874024, 0.04614963150024414, 0.046282176971435544, 0.04606991958618164, 0.04646303939819336, 0.046604095458984376, 0.04626444625854492, 0.04631155014038086, 0.04632953643798828, 0.0463917121887207, 0.04593068695068359, 0.04619059371948242, 0.04610784149169922, 0.04691846466064453, 0.046004222869873046, 0.046491649627685545, 0.04612076950073242, 0.0461354866027832, 0.04622335815429687, 0.04640768051147461, 0.04603084945678711, 0.04639091110229492, 0.046381439208984375, 0.04655712127685547, 0.04635859298706055, 0.0466998405456543, 0.04660089492797852, 0.04867212677001953, 0.04618918228149414, 0.045563488006591796, 0.04525507354736328, 0.04563504028320312, 0.04556035232543945, 0.04551475143432617, 0.04540825653076172, 0.04581292724609375, 0.045947711944580076, 0.04576208114624023, 0.04677014541625977, 0.04558694458007812, 0.04596736145019531, 0.0459356803894043, 0.045884353637695316, 0.04599398422241211, 0.04596736145019531, 0.04580966567993164, 0.045752479553222654, 0.04623344039916992, 0.04632166290283203, 0.04632355117797852, 0.04611625671386719, 0.04594768142700195, 0.04576457595825195, 0.04604108810424805, 0.0458342399597168, 0.04581292724609375, 0.04592723083496094, 0.04601187133789063, 0.04588188934326172, 0.045811038970947265, 0.04600284957885742, 0.04631347274780274, 0.04600937652587891, 0.04603798294067383, 0.04625161743164063, 0.04624835205078125, 0.04603257751464844, 0.046128929138183596, 0.046602783203125, 0.04655923080444336, 0.04659404754638672, 0.04640915298461914, 0.04615948867797852, 0.04621964645385742, 0.04620316696166992, 0.04649398422241211, 0.04604470443725586, 0.046225887298583984, 0.0463927993774414, 0.046034847259521484, 0.04643699264526367, 0.04635238265991211, 0.04637392044067383, 0.04600252914428711, 0.04631001663208008, 0.04648099136352539, 0.04620636749267578, 0.046492385864257815, 0.047624481201171874, 0.04640358352661133, 0.04907440185546875, 0.04619059371948242, 0.0454093132019043, 0.04544947052001953, 0.045601505279541016, 0.045629150390625, 0.04539420700073242, 0.04563558578491211, 0.04596950531005859, 0.04584233474731445, 0.04561305618286133, 0.04579942321777344, 0.045795326232910154, 0.04585472106933594, 0.04577062225341797, 0.04592380905151367, 0.04595779037475586, 0.045864959716796876, 0.0461431999206543, 0.04607392120361328, 0.046080223083496095, 0.04616191864013672, 0.046481407165527344, 0.04616502380371094, 0.04610556793212891, 0.04600831985473633, 0.04604927825927734, 0.04584447860717773, 0.045674495697021485, 0.04597555160522461, 0.04612300872802735, 0.045932544708251956, 0.04595507049560547, 0.04632156753540039, 0.04603084945678711, 0.04580995178222656, 0.04600511932373047, 0.04632057571411133, 0.04620236968994141, 0.05368252944946289, 0.0453858871459961, 0.045788959503173826, 0.04608419036865234, 0.047185630798339845, 0.0467457275390625, 0.04661183929443359, 0.046349151611328125, 0.04642387390136719, 0.04591436767578125, 0.04627865600585938, 0.046182239532470706, 0.04614899063110352, 0.046277183532714844, 0.04658982467651367, 0.04651827239990235, 0.04622985458374024, 0.046491649627685545, 0.04641177749633789, 0.04621279907226562, 0.046808895111083985, 0.04669411087036133, 0.046246688842773435, 0.046145023345947264, 0.04850543975830078, 0.04600323104858398, 0.045709983825683594, 0.04560723114013672, 0.04550041580200195, 0.04542259216308594, 0.04567977523803711, 0.045953887939453125, 0.04592230224609375, 0.04589158248901367, 0.04575347137451172, 0.0456703987121582, 0.045797279357910156, 0.045747169494628905, 0.04624319839477539, 0.04597209548950195, 0.04610867309570312, 0.045872894287109375, 0.04603315353393555, 0.045956382751464846, 0.046508865356445314, 0.046892959594726565, 0.046358528137207033, 0.04583391952514648, 0.04598969650268555, 0.04617267227172851, 0.045879295349121094, 0.046083328247070315, 0.04652518463134766, 0.04623932647705078, 0.045975967407226564, 0.04597555160522461, 0.04612681579589844, 0.045959457397460934, 0.04617987060546875, 0.04641839981079102, 0.046399486541748046, 0.046230560302734376, 0.04623580932617188, 0.04614217758178711, 0.046336097717285155, 0.046567073822021486, 0.046614879608154296, 0.046431774139404296, 0.04627299118041992, 0.046419647216796874, 0.04644646453857422, 0.04598214340209961, 0.046516223907470705, 0.046499839782714845, 0.04628041458129883, 0.0463150405883789, 0.05188479995727539, 0.04571305465698242, 0.04636819076538086, 0.04635443115234375, 0.046519073486328125, 0.04638528060913086, 0.04653987121582031, 0.046615455627441404, 0.04645865631103516, 0.04684553527832031, 0.046613121032714845, 0.04818534469604492, 0.04624492645263672, 0.0456693115234375, 0.04551590347290039, 0.04536819076538086, 0.04576243209838867, 0.04550259017944336, 0.04558195114135742, 0.045682113647460935, 0.04582700729370117, 0.04604655838012695, 0.04581846237182617, 0.04579129409790039, 0.04571136093139649, 0.046004222869873046, 0.04582191848754883, 0.04615766525268555, 0.04613542556762695, 0.045959232330322265, 0.046239585876464845, 0.04652457427978516, 0.04637491226196289, 0.04633744049072266, 0.04647945785522461, 0.046184833526611326, 0.045987968444824216, 0.04603219223022461, 0.04607027053833008, 0.04582009506225586, 0.04614963150024414, 0.04621311950683594, 0.046276607513427735, 0.045866943359375, 0.046139488220214846, 0.04600393676757813, 0.04598313522338867, 0.045953887939453125, 0.046519905090332034, 0.046537120819091796, 0.04618585586547851, 0.04653731155395508, 0.04680502319335938, 0.04643840026855469, 0.0466756477355957, 0.04676025772094727, 0.04656742477416992, 0.04628396987915039, 0.047683582305908204, 0.04593747329711914, 0.04617599868774414, 0.04625958251953125, 0.04656729507446289, 0.046285118103027344, 0.04638175964355469, 0.04639852905273437, 0.04620956802368164, 0.04618406295776367, 0.04642076873779297, 0.04663881683349609, 0.0464664306640625, 0.0463733139038086, 0.04677180862426758, 0.04661747360229492, 0.04902617645263672, 0.04616486358642578, 0.045690494537353514, 0.04564982223510742, 0.04561967849731445, 0.04573183822631836, 0.04566150283813476, 0.045464256286621096, 0.04576233673095703, 0.045883617401123046, 0.04585062408447266, 0.04582809448242187, 0.0457625617980957, 0.046302974700927736, 0.0464447021484375, 0.04615996932983398, 0.04606060791015625, 0.047233985900878905, 0.046069759368896485, 0.04611686325073242, 0.04649369430541992, 0.046540542602539064, 0.04652671813964844, 0.046150657653808595, 0.04593900680541992, 0.04613190460205078, 0.046225406646728515, 0.04591206359863281, 0.045956863403320315, 0.04620518493652344, 0.04619059371948242, 0.045890720367431644, 0.046147903442382815, 0.04631363296508789, 0.046219230651855465, 0.04617436981201172, 0.04652671813964844, 0.04626227188110352, 0.046284801483154295, 0.049309696197509766, 0.04605542373657227, 0.046007423400878905, 0.047815521240234374, 0.04632783889770508, 0.046266368865966793, 0.046378719329833985, 0.04641952133178711, 0.04609302520751953, 0.04627251052856445, 0.04637615966796875, 0.04648969650268555, 0.0461739501953125, 0.04657977676391602, 0.04652889633178711, 0.04643686294555664, 0.04636188888549805, 0.04652515029907227, 0.04649897766113281, 0.04681609725952148, 0.04698448181152344, 0.04694489669799805, 0.0486440315246582, 0.0464796142578125]",tokens/s,21.665214133538704,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8807.899136,10152.18176,0.0,9749.659648,9714.717184,s,1,14.7033388671875,14.7033388671875,0.0,14.7033388671875,14.7033388671875,14.7033388671875,14.7033388671875,[14.7033388671875],,kWh,0.00021300866355418292,2.348912013070779e-05,6.296005036800223e-05,0.00029945783405289293,,MB,1883.185152,10580.000768,0.0,10162.798592,10037.824,s,10,4.624969512939454,0.46249695129394536,0.0008901268197805085,0.4622973480224609,0.4633814025878906,0.46371123962402344,0.4639751092529297,"[0.46327786254882813, 0.46115023803710936, 0.46257376098632813, 0.4632738647460938, 0.4618979797363281, 0.46404107666015626, 0.46330810546875, 0.46158438110351563, 0.46202093505859376, 0.46184130859375]",tokens/s,553.5171621862135,kWh,1.3503292427651903e-05,1.4891767433209699e-06,8.928075324272922e-06,2.3920544495245795e-05,tokens/kWh,10702097.523360305,MB,1892.073472,10791.81312,0.0,10374.610944,10299.609088,s,10,34.453828369140616,3.4453828369140624,0.0023691816873914993,3.445557373046875,3.448095703125,3.4484241943359377,3.4486869873046877,"[3.442867431640625, 3.4428173828125, 3.44151513671875, 3.448022705078125, 3.445501220703125, 3.448752685546875, 3.445613525390625, 3.447486328125, 3.447258056640625, 3.443993896484375]",tokens/s,18.285340985917088,kWh,0.00010048558879068236,1.1082811911940027e-05,6.691595757312707e-05,0.00017848435827574945,tokens/kWh,352972.10696002917,,s,630,34.450136367797896,0.054682756139361674,0.0003925622648369922,0.05466936111450195,0.05514907455444336,0.055284803199768064,0.055477677421569824,"[0.054749183654785157, 0.05431296157836914, 0.05388505554199219, 0.053888576507568356, 0.054233409881591796, 0.05413824081420898, 0.05423081588745117, 0.05438915252685547, 0.05436867141723633, 0.05407132720947266, 0.05438083267211914, 0.05449903869628906, 0.05411430358886719, 0.05410201644897461, 0.054513473510742184, 0.05451948928833008, 0.05456880187988281, 0.054680225372314456, 0.05420441436767578, 0.05443123245239258, 0.05442934417724609, 0.05425964736938477, 0.05416207885742187, 0.05410224151611328, 0.05443552017211914, 0.05452016067504883, 0.05458745574951172, 0.05467903900146484, 0.0544939193725586, 0.05451891326904297, 0.05433750534057617, 0.05458419036865234, 0.05657785415649414, 0.05419820785522461, 0.05464246368408203, 0.05472687911987305, 0.05466521453857422, 0.05460358428955078, 0.054644927978515626, 0.05478400039672852, 0.05490655899047851, 0.054704288482666015, 0.054618431091308595, 0.054709247589111325, 0.054817886352539064, 0.05484848022460938, 0.05504489517211914, 0.054924766540527345, 0.054991390228271486, 0.05507683181762695, 0.054844863891601564, 0.05492780685424805, 0.05471459197998047, 0.05493727874755859, 0.05505580902099609, 0.055314369201660156, 0.055015617370605466, 0.055049121856689455, 0.05537897491455078, 0.05523699188232422, 0.05503014373779297, 0.054910911560058596, 0.055200000762939454, 0.054613311767578124, 0.05439350509643555, 0.05401513671875, 0.053922561645507815, 0.05393212890625, 0.054145023345947264, 0.05402550506591797, 0.0543034896850586, 0.05416547012329102, 0.05412825775146484, 0.054011455535888674, 0.05429724884033203, 0.05443161773681641, 0.05411590576171875, 0.05424361419677735, 0.054346176147460935, 0.05471846389770508, 0.054443649291992184, 0.0543768310546875, 0.054316574096679685, 0.05486217498779297, 0.05460902404785156, 0.054520832061767575, 0.05433718490600586, 0.05448668670654297, 0.05446844863891601, 0.05447897720336914, 0.054397472381591795, 0.054282176971435545, 0.054818401336669924, 0.054601600646972656, 0.05446057510375977, 0.05455116653442383, 0.05468982315063477, 0.05462217712402344, 0.05454000091552735, 0.05440335845947265, 0.054521663665771485, 0.05483129501342773, 0.055041057586669925, 0.05485385513305664, 0.05468646240234375, 0.054836414337158204, 0.055026302337646486, 0.05501142501831055, 0.05484553527832031, 0.05485772705078125, 0.05487206268310547, 0.055009311676025394, 0.054962207794189456, 0.05504819107055664, 0.05497644805908203, 0.05490483093261719, 0.05507276916503906, 0.05511103820800781, 0.055072830200195315, 0.055126590728759764, 0.05540454483032226, 0.055169025421142576, 0.055209598541259765, 0.05536175918579102, 0.05547622299194336, 0.05502377700805664, 0.05459699249267578, 0.05414771270751953, 0.0543001594543457, 0.054516223907470705, 0.05418960189819336, 0.05466892623901367, 0.054080352783203126, 0.054087711334228517, 0.054116321563720704, 0.05411401748657227, 0.054248832702636716, 0.054344032287597654, 0.05418656158447266, 0.054476062774658204, 0.054407806396484376, 0.05432876968383789, 0.05430953598022461, 0.05450320053100586, 0.05453859329223633, 0.05455449676513672, 0.05495507049560547, 0.05445523071289062, 0.055003135681152344, 0.054496513366699216, 0.054629375457763675, 0.054437633514404296, 0.0545689582824707, 0.05461196899414063, 0.05467939376831055, 0.05451696014404297, 0.05438518524169922, 0.054686111450195314, 0.05435801696777344, 0.05449932861328125, 0.05445663833618164, 0.054539966583251956, 0.05468364715576172, 0.054765312194824216, 0.054876415252685544, 0.05464057540893555, 0.05490697479248047, 0.05474505615234375, 0.05483327865600586, 0.054534015655517576, 0.05447484970092773, 0.05460163116455078, 0.054642913818359375, 0.05471619033813477, 0.054822784423828125, 0.05498659133911133, 0.05476367950439453, 0.05483113479614258, 0.05508940887451172, 0.055141822814941406, 0.05529232025146484, 0.05521820831298828, 0.05494355010986328, 0.05491062545776367, 0.05504000091552735, 0.0549711685180664, 0.05489823913574219, 0.05494607925415039, 0.05491011047363281, 0.057283935546875, 0.05477443313598633, 0.054075393676757816, 0.054236351013183595, 0.05424812698364258, 0.05440918350219726, 0.05400371170043945, 0.054061214447021486, 0.05418188858032227, 0.054347232818603514, 0.05409571075439453, 0.05402489471435547, 0.054295936584472654, 0.0543526725769043, 0.05417763137817383, 0.054617984771728516, 0.05466739273071289, 0.05446656036376953, 0.054502559661865235, 0.05450409698486328, 0.05433555221557617, 0.05427212905883789, 0.0546201286315918, 0.05486595153808594, 0.05429411315917969, 0.054505630493164064, 0.05470793533325195, 0.05463004684448242, 0.05464688110351563, 0.05479443359375, 0.054698078155517575, 0.05446297454833984, 0.05466883087158203, 0.05474124908447266, 0.05504022216796875, 0.05496585464477539, 0.054761409759521484, 0.05498518371582031, 0.05477788925170898, 0.05503359985351562, 0.05504764938354492, 0.05489126586914062, 0.054781951904296876, 0.05494358444213867, 0.05505449676513672, 0.054824542999267575, 0.05476742553710937, 0.054889057159423826, 0.05497478485107422, 0.05482463836669922, 0.05493350219726562, 0.05500905609130859, 0.054970558166503904, 0.05506051254272461, 0.05497446441650391, 0.0551956787109375, 0.05497209548950195, 0.0552279052734375, 0.05520790481567383, 0.05480044937133789, 0.05479916763305664, 0.05506860733032227, 0.05531238555908203, 0.05454643249511719, 0.054106113433837894, 0.05406671905517578, 0.054061534881591794, 0.05392326354980469, 0.05418985748291016, 0.05429676818847656, 0.054273727416992185, 0.053916576385498044, 0.053960544586181644, 0.054053024291992186, 0.05420470428466797, 0.0540013427734375, 0.05424979019165039, 0.05419385528564453, 0.05423721694946289, 0.05448646545410156, 0.05432156753540039, 0.05447603225708008, 0.05455923080444336, 0.05464310455322265, 0.054814720153808595, 0.05439459228515625, 0.05439516830444336, 0.05471641540527344, 0.05465087890625, 0.054542335510253906, 0.05450137710571289, 0.05470159912109375, 0.054591552734375, 0.054626846313476564, 0.05461747360229492, 0.054636608123779296, 0.05457539367675781, 0.05484969711303711, 0.05496352005004883, 0.05480438232421875, 0.054774559020996094, 0.05500928115844726, 0.05493971252441406, 0.054879905700683594, 0.05487830352783203, 0.054972766876220706, 0.054941505432128904, 0.05476339340209961, 0.05527561569213867, 0.05533446502685547, 0.055060958862304686, 0.054908958435058594, 0.054941505432128904, 0.05491139221191406, 0.054869087219238284, 0.055149055480957034, 0.055312576293945315, 0.05531584167480469, 0.054941505432128904, 0.05529267120361328, 0.055377983093261716, 0.05506470489501953, 0.05525900650024414, 0.05544780731201172, 0.055154399871826174, 0.05512195205688476, 0.0547295036315918, 0.05420441436767578, 0.05415753555297852, 0.0540546875, 0.05419827270507813, 0.055907646179199216, 0.05405766296386719, 0.054356254577636716, 0.05448044967651367, 0.05402230453491211, 0.05420431900024414, 0.0543026237487793, 0.054262111663818356, 0.05421884918212891, 0.05460534286499023, 0.05430467224121094, 0.05450579071044922, 0.05475849533081055, 0.05470505523681641, 0.054429855346679684, 0.054497119903564456, 0.05452777481079102, 0.05469443130493164, 0.05452339172363281, 0.05464083099365234, 0.05452799987792969, 0.05446656036376953, 0.054370494842529295, 0.05469164657592773, 0.054740993499755856, 0.05446246337890625, 0.05483103942871094, 0.05471382522583008, 0.05463622283935547, 0.0548054084777832, 0.05465292739868164, 0.05487411117553711, 0.05502975845336914, 0.05492940902709961, 0.054669281005859376, 0.0558177261352539, 0.05519993591308594, 0.05488880157470703, 0.05501440048217773, 0.054795135498046876, 0.05479436874389648, 0.05476105499267578, 0.054906368255615234, 0.05499382400512695, 0.054956127166748046, 0.05477088165283203, 0.055012065887451174, 0.055045726776123044, 0.05514924621582031, 0.055004894256591795, 0.055166046142578126, 0.05525187301635742, 0.055109630584716796, 0.0550645751953125, 0.05504800033569336, 0.05534124755859375, 0.05525708770751953, 0.0552573127746582, 0.054730751037597655, 0.05428192138671875, 0.05421500778198242, 0.05430636978149414, 0.054206878662109374, 0.05429862213134766, 0.054187904357910155, 0.05414857482910156, 0.05398931121826172, 0.05414985656738281, 0.054284286499023435, 0.054060161590576174, 0.054182785034179684, 0.054220096588134765, 0.054239936828613285, 0.05415043258666992, 0.05441974258422851, 0.054538177490234374, 0.054485343933105466, 0.0542251205444336, 0.054579135894775394, 0.05433699035644531, 0.05450601577758789, 0.0545054702758789, 0.05436620712280273, 0.05459276962280273, 0.05461840057373047, 0.05467951965332031, 0.05464640045166016, 0.05456780624389648, 0.05461382293701172, 0.05443193435668945, 0.05464188766479492, 0.05463119888305664, 0.054593505859375, 0.05460995101928711, 0.05483724975585937, 0.05511167907714844, 0.05491664123535156, 0.05500156784057617, 0.05474508666992187, 0.05482700729370117, 0.054921215057373046, 0.055225921630859376, 0.05512422561645508, 0.05482688140869141, 0.05498246383666992, 0.05506032180786133, 0.055026432037353516, 0.05496207809448242, 0.054945793151855465, 0.05506662368774414, 0.05505382537841797, 0.05539619064331055, 0.05514102554321289, 0.05510095977783203, 0.05499951934814453, 0.055109630584716796, 0.055748001098632816, 0.05507132720947266, 0.05531033706665039, 0.05533900833129883, 0.055173057556152344, 0.05503023910522461, 0.054284320831298825, 0.05409004974365234, 0.05400175857543945, 0.053912929534912106, 0.05420089721679688, 0.05442559814453125, 0.054115615844726565, 0.05429116821289062, 0.05409996795654297, 0.05401744079589844, 0.054096481323242185, 0.05430185699462891, 0.054203231811523436, 0.05456281661987305, 0.05459747314453125, 0.0542127685546875, 0.054617599487304686, 0.054710369110107425, 0.05477228927612305, 0.054576862335205076, 0.054624385833740234, 0.05457875061035156, 0.05450166320800781, 0.054605022430419925, 0.05460063934326172, 0.05472262573242188, 0.05476688003540039, 0.054651679992675783, 0.05485894393920898, 0.05462108612060547, 0.054632095336914065, 0.0553177261352539, 0.05450640106201172, 0.05446246337890625, 0.05484543991088867, 0.05499881744384766, 0.05479983901977539, 0.054687553405761716, 0.05481286239624023, 0.05515750503540039, 0.055053695678710934, 0.05494643020629883, 0.05488569641113281, 0.0546965103149414, 0.054967681884765626, 0.05502051162719727, 0.05510102462768555, 0.054917312622070315, 0.055184894561767575, 0.05512790298461914, 0.05488620758056641, 0.05501007843017578, 0.05508512115478516, 0.05502566528320312, 0.05512192153930664, 0.05503788757324219, 0.055220287322998045, 0.055261119842529294, 0.05539993667602539, 0.055261856079101564, 0.05498255920410156, 0.055054561614990234, 0.05475804901123047, 0.054111774444580076, 0.05411660766601562, 0.053843582153320316, 0.05441596984863281, 0.054386688232421876, 0.053954719543457035, 0.05414281463623047, 0.05444403076171875, 0.05424870300292969, 0.054156063079833984, 0.054452190399169924, 0.054787521362304685, 0.054478431701660154, 0.054219711303710935, 0.05414915084838867, 0.054468608856201174, 0.05462019348144531, 0.05465494537353516, 0.05450956726074219, 0.05429817581176758, 0.054583744049072264, 0.05438409423828125, 0.05447734451293945, 0.05464678573608398, 0.05457628631591797, 0.0545513916015625, 0.05447225570678711, 0.054544830322265626, 0.05454848098754883, 0.05497446441650391, 0.05496137619018555, 0.054629249572753905, 0.0545871696472168, 0.0545445442199707, 0.05491900634765625, 0.05475532913208008, 0.05466944122314453, 0.054840736389160157, 0.05495049667358398, 0.055045982360839844, 0.05493324661254883, 0.05511209487915039, 0.05490028762817383, 0.05468403244018555, 0.054859840393066406, 0.0547039680480957, 0.055021183013916015, 0.055050785064697266, 0.055119873046875, 0.055209983825683595, 0.05504204940795898, 0.05505827331542969, 0.055132320404052734, 0.05501958465576172, 0.05534304046630859, 0.05527555084228516, 0.055311840057373045, 0.05537424087524414, 0.055500064849853516, 0.05541151809692383, 0.05484134292602539, 0.05514636611938477, 0.054790145874023435, 0.0542371826171875, 0.05392809677124023, 0.05418806457519531, 0.054259521484375, 0.054083553314208985, 0.054461952209472655, 0.05432579040527344, 0.05399347305297852, 0.05394841766357422, 0.05418582534790039, 0.05413820648193359, 0.054477630615234376, 0.05425971221923828, 0.054378017425537106, 0.05446031951904297, 0.05440166473388672, 0.05406611251831055, 0.05458227157592774, 0.05459299087524414, 0.054223167419433595, 0.05461948776245117, 0.05458217620849609, 0.0546283187866211, 0.05445632171630859, 0.05458313751220703, 0.05431926345825195, 0.05438470458984375, 0.054515392303466796, 0.05461382293701172, 0.054501823425292965, 0.05463619232177734, 0.05485388946533203, 0.05474675369262695, 0.05468617630004883, 0.054687744140625, 0.05475942230224609, 0.0548109130859375, 0.05472380828857422, 0.05477337646484375, 0.05486095809936523, 0.0551421127319336, 0.0547512321472168, 0.054724609375, 0.054884384155273434, 0.05476553726196289, 0.05494374465942383, 0.05489030456542969, 0.05512406539916992, 0.05534297561645508, 0.05515657424926758, 0.054935359954833986, 0.05482473754882813, 0.05521907043457031, 0.05526057434082031, 0.054827072143554687, 0.05487651062011719, 0.05522441482543945, 0.05513216018676758, 0.05488230514526367, 0.05521408081054688, 0.055478271484375, 0.05535065460205078]",tokens/s,18.28730061541615,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1176.510464,987.62752,0.0,585.105408,557.135872,s,1,8.3354541015625,8.3354541015625,0.0,8.3354541015625,8.3354541015625,8.3354541015625,8.3354541015625,[8.3354541015625],,kWh,3.2622645854166874e-05,3.5912713417086963e-06,9.474174246001943e-06,4.5688091441877515e-05,,MB,1406.160896,1147.011072,0.0,729.808896,689.092096,s,10,0.35381260681152343,0.03538126068115234,0.00011594546108985009,0.03532651138305664,0.03555142707824707,0.03559608135223388,0.03563180477142334,"[0.03554150390625, 0.03531296157836914, 0.035315486907958986, 0.035440254211425784, 0.035314208984375, 0.035337535858154294, 0.0356407356262207, 0.03535433578491211, 0.03526342391967773, 0.03529216003417969]",tokens/s,7235.468580585984,kWh,1.057187249471604e-06,1.1658781886990102e-07,7.008177909275349e-07,1.87459285926904e-06,tokens/kWh,136562986.8556216,MB,1439.739904,1293.811712,0.0,876.609536,689.094656,s,10,15.23012658691406,1.5230126586914063,0.01601958859472698,1.5158468017578124,1.5526045654296874,1.5529537109375,1.55323302734375,"[1.5056917724609375, 1.5123055419921876, 1.51765087890625, 1.513377685546875, 1.5136868896484375, 1.529890380859375, 1.5143514404296874, 1.5173421630859374, 1.5533028564453124, 1.5525269775390624]",tokens/s,41.365381725802905,kWh,4.357524103094142e-05,4.80567983312932e-06,1.6974140793472676e-05,6.535506165754341e-05,tokens/kWh,963965.1222443368,,s,630,15.224501422882078,0.024165875274415997,0.0006361963746911963,0.02399091148376465,0.024886787796020507,0.025098824882507323,0.026826662883758543,"[0.02395939254760742, 0.02405232048034668, 0.02450003242492676, 0.02394697570800781, 0.023706079483032227, 0.02362553596496582, 0.023595903396606447, 0.023627071380615233, 0.023738336563110352, 0.02372857666015625, 0.023879680633544922, 0.024016767501831054, 0.023666847229003907, 0.0237608642578125, 0.02386092758178711, 0.02416828727722168, 0.02403990364074707, 0.023834623336791993, 0.023648384094238282, 0.023789440155029297, 0.02373017692565918, 0.023696767807006837, 0.02378816032409668, 0.023640064239501952, 0.023714080810546875, 0.023702367782592774, 0.023688064575195313, 0.0239716796875, 0.024438880920410157, 0.024259904861450195, 0.02395622444152832, 0.02369740867614746, 0.02381939125061035, 0.023739263534545897, 0.023758848190307616, 0.0237260799407959, 0.023910400390625, 0.024159263610839844, 0.023857215881347656, 0.023665855407714844, 0.023600704193115236, 0.02364431953430176, 0.02391859245300293, 0.024601856231689454, 0.023825151443481445, 0.023676767349243164, 0.02368060874938965, 0.023616064071655275, 0.023731487274169922, 0.023839487075805663, 0.02371990394592285, 0.023971839904785155, 0.02396384048461914, 0.02393641662597656, 0.023613855361938475, 0.02367692756652832, 0.026093568801879883, 0.02387353515625, 0.02446950340270996, 0.02405072021484375, 0.023806720733642577, 0.023810272216796876, 0.023920896530151368, 0.02384614372253418, 0.023857791900634764, 0.02383014488220215, 0.023708383560180665, 0.02362656021118164, 0.023659488677978516, 0.02389756774902344, 0.024078304290771485, 0.02412598419189453, 0.024139808654785155, 0.024233631134033203, 0.023896575927734375, 0.02409769630432129, 0.024459487915039064, 0.02443484878540039, 0.024834783554077148, 0.024243167877197266, 0.02409766387939453, 0.02410700798034668, 0.024180736541748047, 0.02403638458251953, 0.02398627281188965, 0.024198015213012694, 0.024049663543701173, 0.023546880722045898, 0.023649280548095702, 0.023592735290527345, 0.02382022476196289, 0.024090911865234373, 0.02394316864013672, 0.023625728607177734, 0.024008512496948242, 0.023854496002197266, 0.023827232360839844, 0.02364825630187988, 0.02371379280090332, 0.023828479766845705, 0.02386534309387207, 0.023721376419067384, 0.023699871063232424, 0.023627647399902342, 0.023616992950439453, 0.023615711212158202, 0.02392848014831543, 0.023601472854614256, 0.02410915184020996, 0.02413577651977539, 0.024146400451660156, 0.02389606475830078, 0.023797632217407227, 0.02362495994567871, 0.02615385627746582, 0.0245166072845459, 0.024222944259643556, 0.024154144287109373, 0.023857919692993165, 0.023713504791259766, 0.02400079917907715, 0.023987615585327148, 0.025573984146118164, 0.02383603286743164, 0.02385590362548828, 0.02381001663208008, 0.023598976135253906, 0.023739967346191406, 0.023784032821655275, 0.023828832626342774, 0.023711679458618164, 0.023666688919067383, 0.02404774475097656, 0.023836544036865234, 0.023654399871826173, 0.023789215087890624, 0.023664480209350587, 0.023787935256958007, 0.02371552085876465, 0.024383424758911133, 0.024080799102783202, 0.0241910400390625, 0.02394316864013672, 0.024856672286987305, 0.02462073516845703, 0.024359136581420898, 0.023799583435058592, 0.0244200325012207, 0.023884319305419923, 0.023772640228271483, 0.023660064697265625, 0.023743616104125977, 0.023975807189941405, 0.02443247985839844, 0.024367231369018555, 0.024010303497314454, 0.02392684745788574, 0.023762815475463866, 0.02412598419189453, 0.023837984085083006, 0.02396873664855957, 0.02389695930480957, 0.0243939208984375, 0.02426041603088379, 0.023990751266479492, 0.025410112380981446, 0.024663520812988282, 0.023945472717285157, 0.02409075164794922, 0.02450169563293457, 0.025066303253173827, 0.02455116844177246, 0.02414182472229004, 0.02410905647277832, 0.02395123291015625, 0.023787872314453125, 0.02366806411743164, 0.023631999969482422, 0.02385686492919922, 0.024088863372802735, 0.024061471939086913, 0.02374304008483887, 0.023783903121948242, 0.02378726387023926, 0.02449843215942383, 0.024653568267822265, 0.02408412742614746, 0.024469600677490235, 0.02486297607421875, 0.02410495948791504, 0.02432640075683594, 0.023887231826782228, 0.023828863143920898, 0.023823488235473634, 0.024127775192260743, 0.024045440673828126, 0.02369580841064453, 0.023886016845703125, 0.023861343383789063, 0.023803232192993164, 0.023851680755615234, 0.023859199523925782, 0.02392198371887207, 0.023927648544311522, 0.024401760101318358, 0.02398195266723633, 0.024420768737792968, 0.02368467140197754, 0.023748640060424805, 0.023695104598999022, 0.02372831916809082, 0.02368079948425293, 0.023558559417724608, 0.023635936737060548, 0.023797855377197266, 0.023859136581420897, 0.0241430721282959, 0.023781503677368164, 0.023860960006713866, 0.024029983520507812, 0.02451584053039551, 0.02755471992492676, 0.024311807632446288, 0.024104448318481447, 0.023957151412963867, 0.023876352310180662, 0.024072288513183594, 0.024311807632446288, 0.024582143783569335, 0.02429680061340332, 0.024201887130737305, 0.024229888916015626, 0.024049312591552734, 0.023980127334594727, 0.02422166442871094, 0.024158079147338866, 0.023929119110107422, 0.023959232330322267, 0.024182687759399413, 0.02391641616821289, 0.02371855926513672, 0.023584768295288085, 0.023740415573120118, 0.02364841651916504, 0.023668575286865234, 0.023577600479125976, 0.023648832321166994, 0.024449119567871092, 0.02448147201538086, 0.02369603157043457, 0.023654399871826173, 0.02370368003845215, 0.023473695755004884, 0.02364668846130371, 0.02410495948791504, 0.023809375762939452, 0.024545951843261717, 0.023895231246948243, 0.024693567276000975, 0.02404351997375488, 0.02406399917602539, 0.023885183334350586, 0.02452128028869629, 0.02417283248901367, 0.0241231689453125, 0.02429132843017578, 0.024206815719604494, 0.02406451225280762, 0.023758655548095704, 0.023689088821411134, 0.02386729621887207, 0.02371433639526367, 0.02413942337036133, 0.023896127700805663, 0.023820480346679686, 0.023754751205444336, 0.02491542434692383, 0.023841312408447266, 0.024012800216674804, 0.024035327911376952, 0.023984031677246095, 0.02434160041809082, 0.024015424728393554, 0.024146303176879883, 0.024530399322509767, 0.02387798309326172, 0.02376316833496094, 0.02386479949951172, 0.02411939239501953, 0.02589334487915039, 0.024168447494506837, 0.023920127868652344, 0.02430793571472168, 0.023813695907592775, 0.023958240509033203, 0.023846912384033202, 0.02389580726623535, 0.02404377555847168, 0.02363507270812988, 0.02368601608276367, 0.02354742431640625, 0.024400447845458983, 0.02367888069152832, 0.02368435287475586, 0.023759584426879882, 0.023715871810913086, 0.023716960906982422, 0.02386400032043457, 0.023740095138549806, 0.02369331169128418, 0.023613759994506836, 0.02400489616394043, 0.02479302406311035, 0.02407219123840332, 0.024154111862182616, 0.023762336730957033, 0.02389632034301758, 0.024284448623657227, 0.02402992057800293, 0.023873504638671876, 0.023795551300048828, 0.02390025520324707, 0.023851104736328125, 0.023999616622924803, 0.02395635223388672, 0.023976095199584962, 0.024073823928833008, 0.02400668716430664, 0.02488672065734863, 0.02421401596069336, 0.024066335678100587, 0.023916543960571288, 0.023795007705688476, 0.024064607620239258, 0.0237805118560791, 0.023620351791381836, 0.024242368698120118, 0.023927967071533204, 0.02396041679382324, 0.02367897605895996, 0.023848960876464844, 0.023879680633544922, 0.024131359100341795, 0.024168672561645507, 0.02427903938293457, 0.02409062385559082, 0.024118335723876953, 0.023788063049316407, 0.024143423080444336, 0.023727167129516603, 0.02392192077636719, 0.02376896095275879, 0.023764640808105468, 0.023771711349487305, 0.024365503311157225, 0.023834720611572265, 0.023961503982543944, 0.02421945571899414, 0.024481119155883788, 0.024477664947509765, 0.024183616638183594, 0.024190656661987303, 0.024422592163085937, 0.024963455200195314, 0.02505299186706543, 0.025044639587402343, 0.024955232620239257, 0.0249202880859375, 0.02490959930419922, 0.025260032653808592, 0.02492416000366211, 0.024791040420532227, 0.025544704437255858, 0.02504697608947754, 0.025730688095092772, 0.024936895370483398, 0.024909759521484377, 0.02505939292907715, 0.024412256240844726, 0.024647584915161135, 0.02458624076843262, 0.024379135131835938, 0.024236223220825196, 0.02413369560241699, 0.023838720321655273, 0.023729408264160156, 0.02375551986694336, 0.024129535675048826, 0.024033279418945314, 0.024320255279541014, 0.0240247688293457, 0.023939136505126954, 0.023975391387939454, 0.024052255630493163, 0.023994144439697267, 0.025213151931762694, 0.0243056640625, 0.024467296600341797, 0.02400271987915039, 0.023965215682983397, 0.023751136779785156, 0.02381001663208008, 0.02373823928833008, 0.023920799255371095, 0.02408790397644043, 0.02439017677307129, 0.02446732711791992, 0.024087999343872072, 0.02381420707702637, 0.024135648727416994, 0.023957536697387694, 0.024215999603271483, 0.023994688034057618, 0.023941024780273438, 0.023887136459350585, 0.02417967987060547, 0.023745792388916016, 0.024452991485595703, 0.024824640274047852, 0.024370655059814453, 0.023660032272338868, 0.023647167205810546, 0.023601119995117187, 0.02385296058654785, 0.023969919204711913, 0.023764991760253908, 0.023826431274414063, 0.023701343536376953, 0.02361155128479004, 0.023670783996582033, 0.023553983688354492, 0.023625247955322264, 0.023700000762939454, 0.02364329528808594, 0.023633983612060545, 0.023919391632080077, 0.02365644836425781, 0.02377132797241211, 0.02405766487121582, 0.024392831802368165, 0.02455027198791504, 0.023810047149658203, 0.024026559829711913, 0.026823232650756836, 0.02385305595397949, 0.023777280807495117, 0.02371379280090332, 0.02369536018371582, 0.02411520004272461, 0.023731807708740234, 0.023656864166259766, 0.023707807540893553, 0.026771295547485353, 0.025542560577392577, 0.023984224319458007, 0.023928287506103516, 0.02428982353210449, 0.02399955177307129, 0.02425337600708008, 0.02405171203613281, 0.023679136276245117, 0.02497865676879883, 0.024359167098999022, 0.023977632522583007, 0.02421548843383789, 0.02363267135620117, 0.02384486389160156, 0.023742111206054687, 0.0237346248626709, 0.02363929557800293, 0.02394393539428711, 0.023916543960571288, 0.02364723205566406, 0.023686143875122072, 0.023769088745117187, 0.02369152069091797, 0.023908128738403322, 0.023727231979370118, 0.024257375717163087, 0.024251935958862304, 0.024232448577880858, 0.024079328536987306, 0.024114175796508788, 0.024031295776367187, 0.023805280685424805, 0.02389059257507324, 0.023964927673339843, 0.023956159591674804, 0.02407619285583496, 0.02397398376464844, 0.02441753578186035, 0.024414976119995116, 0.024413951873779295, 0.02382464027404785, 0.023915712356567382, 0.023996448516845702, 0.024257055282592775, 0.02390575981140137, 0.023991071701049804, 0.024070144653320313, 0.0237457275390625, 0.023894880294799806, 0.02383395195007324, 0.02377084732055664, 0.02363465690612793, 0.023686624526977538, 0.023704288482666015, 0.02363091278076172, 0.02377414321899414, 0.02398806381225586, 0.024191360473632812, 0.024204959869384767, 0.024614336013793946, 0.02430636787414551, 0.024098623275756837, 0.024544767379760742, 0.02459529685974121, 0.024248159408569336, 0.024707328796386718, 0.02682806396484375, 0.024629632949829103, 0.02447337532043457, 0.02459283256530762, 0.024606592178344728, 0.0242172794342041, 0.02434079933166504, 0.024551519393920897, 0.02525993537902832, 0.02527449607849121, 0.02498080062866211, 0.0253875846862793, 0.02497327995300293, 0.025034624099731444, 0.025024255752563476, 0.025311456680297852, 0.02539948844909668, 0.024989856719970702, 0.025093439102172852, 0.025532960891723633, 0.02993516731262207, 0.025395008087158204, 0.024827871322631836, 0.02501919937133789, 0.02500739288330078, 0.02470128059387207, 0.02477916717529297, 0.024753984451293946, 0.024647136688232422, 0.02452521514892578, 0.0244083194732666, 0.024434560775756835, 0.024414207458496092, 0.024131584167480468, 0.02410700798034668, 0.023980031967163085, 0.024053760528564453, 0.024166080474853517, 0.024305728912353514, 0.02450662422180176, 0.024322240829467774, 0.024248287200927733, 0.024466495513916015, 0.024247072219848634, 0.024207136154174805, 0.02406012725830078, 0.024147647857666016, 0.024453279495239257, 0.02437126350402832, 0.02468003273010254, 0.024757856369018554, 0.02481417655944824, 0.024599872589111327, 0.024699935913085936, 0.024932159423828124, 0.024887392044067383, 0.024895647048950195, 0.02492185592651367, 0.025673696517944336, 0.02528463935852051, 0.027379711151123046, 0.027931711196899415, 0.025285568237304688, 0.025230815887451172, 0.02502841567993164, 0.02510323143005371, 0.02498953628540039, 0.02495849609375, 0.02510476875305176, 0.02502460861206055, 0.024797183990478516, 0.024665472030639648, 0.024611040115356444, 0.024691104888916016, 0.02447871971130371, 0.02488012886047363, 0.024370304107666017, 0.02472230339050293, 0.024390720367431642, 0.02420217514038086, 0.023857152938842774, 0.024033279418945314, 0.02390617561340332, 0.024014911651611327, 0.023627552032470703, 0.02378371238708496, 0.02367897605895996, 0.023807552337646483, 0.024183231353759764, 0.024014848709106446, 0.02384486389160156, 0.023990272521972656, 0.02409996795654297, 0.023815040588378907, 0.023733535766601564, 0.02374115180969238, 0.024043392181396485, 0.023876768112182617, 0.023675872802734376, 0.02388915252685547, 0.0237391357421875, 0.02410425567626953, 0.023916671752929688, 0.02795782470703125, 0.023950559616088867, 0.023673311233520507, 0.025019872665405275, 0.02887875175476074, 0.02441721534729004, 0.024216800689697265, 0.0241364803314209]",tokens/s,41.380665448467454,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 58.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 127520 has 14.69 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 193.68 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,7185.829888,7954.366464,0.0,7551.844352,7485.12768,s,1,13.0658056640625,13.0658056640625,0.0,13.0658056640625,13.0658056640625,13.0658056640625,13.0658056640625,[13.0658056640625],,kWh,0.00016651609543334492,1.8360159548670913e-05,4.975531758200091e-05,0.00023463157256401674,,MB,3027.84512,8254.2592,0.0,7837.057024,7735.356416,s,10,3.563182922363281,0.35631829223632816,0.0004353149700745075,0.35641351318359377,0.3567517120361328,0.3567926498413086,0.3568254000854492,"[0.3564080505371094, 0.3552875061035156, 0.356231689453125, 0.3560609130859375, 0.35597732543945315, 0.35641897583007814, 0.3568335876464844, 0.3565511779785156, 0.35674261474609376, 0.3566710815429687]",tokens/s,718.4587644751282,kWh,1.0434369565624971e-05,1.150724941306145e-06,6.8954420242857915e-06,1.8480536531216908e-05,tokens/kWh,13852411.674713586,MB,3042.779136,8275.23072,0.0,7858.028544,7759.281152,s,10,25.967472656250003,2.5967472656250004,0.003950393385552821,2.5971947021484376,2.5994681396484376,2.602893127441406,2.605633117675781,"[2.591390625, 2.5929296875, 2.59425634765625, 2.593861083984375, 2.59702734375, 2.597591064453125, 2.59870703125, 2.598029296875, 2.597362060546875, 2.606318115234375]",tokens/s,24.261121147204438,kWh,7.614747643479139e-05,8.399171939627992e-06,5.0608520645514054e-05,0.00013515516901993347,tokens/kWh,466130.8957462692,,s,630,25.964603820800782,0.04121365685841394,0.0003667683513705873,0.04119153594970703,0.04163794212341308,0.04173655586242676,0.04202617168426514,"[0.04365929412841797, 0.040384159088134766, 0.040630401611328124, 0.04055913543701172, 0.04070800018310547, 0.04075734329223633, 0.04074700927734375, 0.040687614440917966, 0.04060675048828125, 0.04066787338256836, 0.04058086395263672, 0.04074041748046875, 0.040739967346191404, 0.040648448944091795, 0.040675167083740235, 0.04078169631958008, 0.04079849624633789, 0.04121760177612305, 0.04142956924438477, 0.04123641586303711, 0.041094528198242185, 0.04105072021484375, 0.04118233489990234, 0.040933502197265624, 0.04087887954711914, 0.04110281753540039, 0.04121625518798828, 0.04106256103515625, 0.04129743957519531, 0.0413513298034668, 0.041202014923095706, 0.04117071914672851, 0.04105043029785156, 0.04123648071289063, 0.041191646575927734, 0.04106441497802735, 0.04095161437988281, 0.04098463821411133, 0.04091183853149414, 0.04092208099365234, 0.04115353775024414, 0.041229312896728515, 0.04104998397827148, 0.04101337432861328, 0.0409620475769043, 0.04111360168457031, 0.041027904510498044, 0.04109904098510742, 0.04135212707519531, 0.04156105422973633, 0.04149657440185547, 0.04145673751831055, 0.04143196868896484, 0.04129999923706055, 0.04170339202880859, 0.041457664489746096, 0.04161513519287109, 0.041629920959472655, 0.04150044631958008, 0.04154185485839844, 0.04138323211669922, 0.041633857727050784, 0.04129411315917969, 0.04150435256958008, 0.04087030410766602, 0.0408043212890625, 0.04073270416259766, 0.04073030471801758, 0.04096031951904297, 0.0409804801940918, 0.040908798217773434, 0.040927230834960936, 0.040812416076660155, 0.04083539199829102, 0.04093302536010742, 0.04099497604370117, 0.04092063903808594, 0.04074131011962891, 0.04056268692016601, 0.040671230316162106, 0.04087807846069336, 0.04073062515258789, 0.04080627059936524, 0.04094076919555664, 0.04084624099731445, 0.040769535064697264, 0.040914527893066405, 0.04092764663696289, 0.04105011367797851, 0.041082878112792966, 0.04118527984619141, 0.041560256958007816, 0.04145132827758789, 0.04148019027709961, 0.041417823791503904, 0.04134003067016601, 0.04162335968017578, 0.041654239654541014, 0.041289726257324216, 0.04124982452392578, 0.041237056732177736, 0.041185504913330076, 0.04120390319824219, 0.04109257507324219, 0.04117353439331055, 0.04130799865722656, 0.04137590408325195, 0.04113564682006836, 0.04112841415405273, 0.04097228622436523, 0.04104806518554688, 0.041027584075927735, 0.041111553192138675, 0.04117299270629883, 0.04116889572143555, 0.0414024658203125, 0.04142233657836914, 0.04157247924804688, 0.041570465087890626, 0.041513088226318356, 0.041746177673339845, 0.041586112976074216, 0.041525535583496094, 0.04162393569946289, 0.04161283111572266, 0.041659008026123046, 0.04177027130126953, 0.04081327819824219, 0.040767200469970705, 0.04070329666137695, 0.04074169540405274, 0.04080998229980469, 0.040891040802001954, 0.040871936798095705, 0.04102348709106445, 0.04094915390014649, 0.04083363342285156, 0.04096950531005859, 0.04101542282104492, 0.041273952484130856, 0.040910846710205076, 0.04060160064697266, 0.040796161651611325, 0.041312255859375, 0.04059545516967773, 0.040742942810058594, 0.040691680908203125, 0.04088383865356445, 0.04089484786987305, 0.04074102401733398, 0.04081164932250977, 0.042049983978271484, 0.04095209503173828, 0.041270751953125, 0.041263359069824215, 0.041244510650634766, 0.04107513427734375, 0.04143049621582031, 0.04142544174194336, 0.041248382568359374, 0.041234622955322264, 0.04160940933227539, 0.04148223876953125, 0.04133033752441406, 0.041232734680175784, 0.04122793579101563, 0.041172542572021485, 0.04110825729370117, 0.04104806518554688, 0.041163871765136716, 0.04099983978271484, 0.04111929702758789, 0.04116502380371094, 0.04093267059326172, 0.04103392028808594, 0.041331329345703126, 0.04141884613037109, 0.04141056060791016, 0.04159283065795898, 0.04170547103881836, 0.04138934326171875, 0.041570079803466796, 0.041548480987548826, 0.041609249114990234, 0.041717662811279296, 0.041608673095703125, 0.041592864990234374, 0.04163161468505859, 0.04160195159912109, 0.041605121612548826, 0.04086505508422852, 0.040892574310302736, 0.040739391326904295, 0.040771583557128906, 0.04080844879150391, 0.04079206466674805, 0.040785919189453124, 0.04095747375488281, 0.040976863861083984, 0.040828929901123044, 0.041046016693115236, 0.04103782272338867, 0.04104748916625976, 0.04099545669555664, 0.040791999816894534, 0.04074720001220703, 0.04067216110229492, 0.04071926498413086, 0.04073286437988281, 0.04073382568359375, 0.04081264114379883, 0.04078243255615235, 0.0407501449584961, 0.040817665100097655, 0.04083091354370117, 0.040922847747802735, 0.04094704055786133, 0.041012161254882815, 0.04117721557617188, 0.04159065628051758, 0.0412589111328125, 0.041201759338378906, 0.04142067337036133, 0.04165337753295899, 0.0414543342590332, 0.04142700958251953, 0.041574592590332034, 0.04148633575439453, 0.041265151977539063, 0.04124444961547852, 0.0412367057800293, 0.04115363311767578, 0.041014175415039066, 0.04100492858886719, 0.04122982406616211, 0.041734302520751956, 0.041053951263427736, 0.04109385681152344, 0.04116889572143555, 0.04131840133666992, 0.04151910400390625, 0.041562145233154296, 0.041637855529785155, 0.04150886535644531, 0.04139116668701172, 0.04145840072631836, 0.0416995849609375, 0.04168057632446289, 0.041820384979248046, 0.04178739166259766, 0.04177081680297851, 0.04155532836914062, 0.0416993293762207, 0.04072739028930664, 0.040796161651611325, 0.04089360046386719, 0.040928096771240235, 0.0409169921875, 0.040756511688232425, 0.040921630859375, 0.04072262573242187, 0.04081024169921875, 0.04078992080688477, 0.04092348861694336, 0.04104806518554688, 0.04103168106079102, 0.041106784820556644, 0.04098892974853516, 0.04089487838745117, 0.04082483291625977, 0.04073241424560547, 0.04086604690551758, 0.04082483291625977, 0.040734912872314455, 0.04083222579956055, 0.040796512603759764, 0.04081894302368164, 0.04098252868652344, 0.04092505645751953, 0.041100608825683595, 0.04126188659667969, 0.041316352844238284, 0.04120774459838867, 0.041406494140625, 0.04135238265991211, 0.04135200119018555, 0.04146588897705078, 0.041702560424804684, 0.04168569564819336, 0.04154403305053711, 0.041568065643310545, 0.041404415130615234, 0.04137068939208984, 0.041385921478271484, 0.04127388763427734, 0.04129337692260742, 0.04131113433837891, 0.041267200469970705, 0.04125900650024414, 0.04132223892211914, 0.04131865692138672, 0.04117500686645508, 0.0415346565246582, 0.041552734375, 0.041420799255371094, 0.04151091384887695, 0.04172560119628906, 0.04153174209594727, 0.04151705551147461, 0.0418120002746582, 0.04169241714477539, 0.04166438293457031, 0.041662593841552735, 0.04175302505493164, 0.04168732833862305, 0.041525279998779294, 0.04071644973754883, 0.0427639045715332, 0.040546688079833984, 0.04062003326416016, 0.040748577117919925, 0.04079244613647461, 0.04070819091796875, 0.04069539260864258, 0.040840927124023436, 0.040970592498779296, 0.040968544006347654, 0.040817760467529295, 0.0410015983581543, 0.04098896026611328, 0.041277183532714846, 0.04131974411010742, 0.04113094329833984, 0.041043968200683595, 0.041240577697753904, 0.04112998580932617, 0.04102556610107422, 0.041045982360839846, 0.04080844879150391, 0.041215614318847654, 0.04114675140380859, 0.04105648040771484, 0.040966175079345704, 0.04105152130126953, 0.04120819091796875, 0.04112793731689453, 0.04117299270629883, 0.041156417846679685, 0.041154369354248044, 0.04122227096557617, 0.041205535888671874, 0.041099742889404295, 0.041316352844238284, 0.041164798736572264, 0.04118310546875, 0.04123660659790039, 0.04152115249633789, 0.04175177764892578, 0.04163177490234375, 0.04143385696411133, 0.041508766174316404, 0.041324638366699216, 0.0410887680053711, 0.04157465744018555, 0.04139212799072266, 0.04129363250732422, 0.04140192031860351, 0.04137823867797852, 0.04143737411499023, 0.041388031005859374, 0.04128124618530273, 0.04119171142578125, 0.041393409729003905, 0.041465919494628904, 0.04166086578369141, 0.04196787261962891, 0.04196556854248047, 0.04184678268432617, 0.041871166229248045, 0.04120943832397461, 0.041054527282714845, 0.040996574401855466, 0.04104841613769531, 0.04087811279296875, 0.041250465393066406, 0.04108707046508789, 0.040960254669189455, 0.041109504699707033, 0.041078529357910155, 0.04102988815307617, 0.04114150238037109, 0.04137555313110351, 0.04118783950805664, 0.04110790252685547, 0.04078387069702148, 0.040697856903076174, 0.04084067153930664, 0.04091100692749024, 0.040868480682373046, 0.04078566360473633, 0.04080752182006836, 0.04089948654174805, 0.040828929901123044, 0.040828929901123044, 0.04081868743896484, 0.04082681655883789, 0.04089657592773437, 0.041107135772705077, 0.04118764877319336, 0.04100096130371094, 0.041301151275634766, 0.04134998321533203, 0.04145356750488281, 0.0413779182434082, 0.041604991912841796, 0.041705215454101566, 0.04161356735229492, 0.04195305633544922, 0.04177292633056641, 0.04153174209594727, 0.04141017532348633, 0.04125120162963867, 0.04114022445678711, 0.04133257675170898, 0.041279647827148436, 0.041072799682617185, 0.04122009658813477, 0.04122204971313476, 0.041111167907714845, 0.04109958267211914, 0.041527294158935545, 0.041713855743408204, 0.04147385787963867, 0.04151603317260742, 0.04175769424438477, 0.0416255989074707, 0.04167990493774414, 0.04169622421264649, 0.041802913665771484, 0.04176163101196289, 0.04160121536254883, 0.04173839950561523, 0.04086207962036133, 0.04142281723022461, 0.04085356903076172, 0.04094384002685547, 0.040975326538085936, 0.04083804702758789, 0.040736671447753905, 0.04075734329223633, 0.04096979141235352, 0.041021537780761716, 0.041107711791992185, 0.04120387268066406, 0.04124399948120117, 0.04105219268798828, 0.040968673706054684, 0.04099225616455078, 0.04092979049682617, 0.04069132614135742, 0.040737056732177736, 0.04084096145629883, 0.04088662338256836, 0.04074895858764648, 0.040753246307373044, 0.0409169921875, 0.04100710296630859, 0.041102977752685545, 0.041309566497802735, 0.041319423675537106, 0.04159078216552734, 0.04153343963623047, 0.041194496154785154, 0.04111872100830078, 0.04124467086791992, 0.04156595230102539, 0.04166681671142578, 0.04136982345581055, 0.04139811325073242, 0.041619327545166014, 0.041441120147705075, 0.041261280059814456, 0.04125286483764649, 0.0410909423828125, 0.04111167907714844, 0.04101529693603516, 0.04105215835571289, 0.041183456420898434, 0.041540863037109375, 0.04152569580078125, 0.04146185684204102, 0.04139622497558594, 0.041543838500976565, 0.04155081558227539, 0.04158143997192383, 0.04170953750610352, 0.04150185775756836, 0.04142947387695312, 0.04153395080566406, 0.04141455841064453, 0.04156415939331055, 0.041734142303466795, 0.04181196975708008, 0.041761985778808595, 0.04161536026000977, 0.04075872039794922, 0.040849857330322266, 0.0406611213684082, 0.040798175811767576, 0.04100508880615234, 0.040852672576904295, 0.04083363342285156, 0.04085504150390625, 0.0409152946472168, 0.04089689636230469, 0.04093952178955078, 0.04101529693603516, 0.041104862213134766, 0.0409400634765625, 0.04077363204956055, 0.04081180953979492, 0.04086179351806641, 0.04090099334716797, 0.041138111114501955, 0.04108051300048828, 0.04118175888061523, 0.04117510223388672, 0.04106966400146484, 0.040997791290283206, 0.04135523223876953, 0.041130016326904294, 0.04111942291259765, 0.04129606246948242, 0.04141683197021485, 0.041322494506835936, 0.0413040657043457, 0.041338878631591795, 0.041306110382080076, 0.041371105194091796, 0.04122473526000976, 0.04131382369995117, 0.0413836784362793, 0.041264095306396485, 0.040972129821777344, 0.04096940612792969, 0.04115020751953125, 0.04113827133178711, 0.04120256042480469, 0.04146553421020508, 0.04160748672485352, 0.04144079971313477, 0.0412465934753418, 0.04150307083129883, 0.04157171249389648, 0.0413678092956543, 0.04143299102783203, 0.04148713684082031, 0.04147785568237305, 0.04172822570800781, 0.04167222213745117, 0.04167222213745117, 0.04155481719970703, 0.041592510223388675, 0.041576831817626954, 0.041523200988769535, 0.04178851318359375, 0.04175350570678711, 0.04163404846191406, 0.04076051330566406, 0.0436580810546875, 0.04051145553588867, 0.041133056640625, 0.04156927871704102, 0.04145151901245117, 0.0412608642578125, 0.04272528076171875, 0.0409398078918457, 0.040992191314697266, 0.04126367950439453, 0.041250816345214845, 0.041560222625732425, 0.041344863891601566, 0.04106444931030274, 0.04105011367797851, 0.041027488708496096, 0.041006206512451175, 0.04095840072631836, 0.040976287841796875, 0.04091289520263672, 0.040747840881347655, 0.04080620956420898, 0.04086943817138672, 0.04091334533691406, 0.04084326553344726, 0.040825855255126955, 0.040864192962646484, 0.04092291259765625, 0.04111199951171875, 0.04127942276000977, 0.041527713775634766, 0.04160908889770508, 0.042796257019042966, 0.041450401306152344, 0.04145971298217774, 0.04142272186279297, 0.041527328491210935, 0.04123657608032227, 0.04125711822509766, 0.04139811325073242, 0.041290782928466795, 0.0414156494140625, 0.041191425323486325, 0.04126876831054688, 0.04122671890258789, 0.04129532623291016, 0.04125244903564453, 0.04132140731811523, 0.04143452835083008, 0.04168172836303711, 0.041638721466064454, 0.04155081558227539, 0.041391136169433594, 0.04210172653198242, 0.04195315170288086, 0.04189798355102539, 0.04170940780639648, 0.04184297561645508, 0.04156428909301758, 0.04147148895263672, 0.04161808013916016]",tokens/s,24.263801764434934,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4972.916736,7338.917888,0.0,6943.670272,6539.1744,s,1,11.5805625,11.5805625,0.0,11.5805625,11.5805625,11.5805625,11.5805625,[11.5805625],,kWh,0.00012860236552916527,1.4178698316460877e-05,3.937392038799725e-05,0.00018215498423362339,,MB,4882.82112,7634.61632,0.0,7224.68864,6917.39904,s,10,2.0647067413330076,0.20647067413330075,0.000531730646215658,0.20658805084228515,0.20704199523925781,0.20706782684326172,0.20708849212646485,"[0.20546876525878907, 0.20618710327148437, 0.2056812744140625, 0.20671177673339844, 0.2066510772705078, 0.20633375549316407, 0.2065250244140625, 0.20709365844726563, 0.2070362548828125, 0.20701805114746094]",tokens/s,1239.8855240561782,kWh,6.064707978571061e-06,6.688289937004971e-07,4.0114261116328494e-06,1.0744963083904409e-05,tokens/kWh,23825116.754796427,MB,4887.052288,7636.713472,0.0,7226.785792,6917.4016,s,10,21.867404052734372,2.1867404052734374,0.012244226575833619,2.1830804443359373,2.2044777587890625,2.2071846801757813,2.2093502172851562,"[2.192820556640625, 2.176710205078125, 2.203876220703125, 2.170734130859375, 2.184782470703125, 2.18137841796875, 2.2098916015625, 2.192957763671875, 2.1802666015625, 2.173986083984375]",tokens/s,28.810004080992993,kWh,6.411299401559425e-05,7.070647166017867e-06,4.264413899056652e-05,0.00011382778017217864,tokens/kWh,553467.7027409713,,s,630,21.86504935073855,0.034706427540854796,0.0005855559582517047,0.03455833435058594,0.035169828033447266,0.035670738410949705,0.03758546409606934,"[0.035383296966552735, 0.03546527862548828, 0.034865119934082034, 0.034340545654296874, 0.0344681282043457, 0.03508019256591797, 0.03487744140625, 0.03448604965209961, 0.034574558258056644, 0.03421184158325195, 0.034609153747558595, 0.034402305603027344, 0.0343900146484375, 0.03457206344604492, 0.03493500900268555, 0.03437564849853516, 0.03462047958374023, 0.03440534210205078, 0.03441401672363281, 0.03501523208618164, 0.03827711868286133, 0.03582566452026367, 0.03512115097045899, 0.03474163055419922, 0.034491008758544925, 0.03439759826660156, 0.03435708618164063, 0.03478598403930664, 0.03515558242797852, 0.035756481170654296, 0.03479347229003906, 0.03462870407104492, 0.03438431930541992, 0.03434134292602539, 0.034705406188964845, 0.03537052917480469, 0.03477689743041992, 0.03476505661010742, 0.034521503448486326, 0.037220352172851565, 0.03622873687744141, 0.03523827362060547, 0.035151264190673825, 0.034589279174804685, 0.034840576171875, 0.03438796615600586, 0.034369537353515625, 0.03425404739379883, 0.0344359359741211, 0.03440428924560547, 0.0343521614074707, 0.03429475021362305, 0.03449446487426758, 0.034680831909179685, 0.034566368103027344, 0.0342606086730957, 0.03461955261230469, 0.03428707122802734, 0.03444380950927734, 0.034484222412109376, 0.03466854476928711, 0.03540486526489258, 0.03423235321044922, 0.03514518356323242, 0.034751007080078125, 0.034376895904541016, 0.034328895568847655, 0.034202110290527346, 0.03420159912109375, 0.034181121826171876, 0.03435724639892578, 0.03453952026367187, 0.034252799987792966, 0.03416985702514649, 0.03419558334350586, 0.0340975341796875, 0.03408127975463867, 0.03436646270751953, 0.034349086761474606, 0.034210784912109375, 0.03429296112060547, 0.034315040588378906, 0.0341130256652832, 0.034143936157226565, 0.03417782211303711, 0.0341341438293457, 0.03432172775268555, 0.034269790649414066, 0.03410259246826172, 0.03460156631469727, 0.034645824432373046, 0.03487772750854492, 0.034444480895996096, 0.034327136993408204, 0.035087711334228514, 0.034548095703125, 0.03440867233276367, 0.03445993423461914, 0.034351104736328124, 0.03418521499633789, 0.034289600372314454, 0.034439231872558596, 0.034326526641845705, 0.034327968597412106, 0.03429548645019531, 0.03474905776977539, 0.03491459274291992, 0.034969600677490234, 0.034523136138916014, 0.03429580688476563, 0.03424665451049805, 0.034178943634033204, 0.0342795524597168, 0.03480083084106445, 0.03489436721801758, 0.034826526641845705, 0.034590721130371094, 0.034614368438720705, 0.0349002571105957, 0.03524774551391602, 0.03514265441894531, 0.034912254333496096, 0.03476844787597656, 0.03858598327636719, 0.03489056015014649, 0.03483395385742188, 0.035860481262207033, 0.034970783233642576, 0.03497865676879883, 0.034830337524414064, 0.034852863311767575, 0.034842464447021486, 0.03505372619628906, 0.034774654388427736, 0.034885089874267576, 0.03499030303955078, 0.036324031829833986, 0.03523539352416992, 0.03790665435791016, 0.034979934692382815, 0.03522496032714844, 0.03500262451171875, 0.03609446334838867, 0.03530137634277344, 0.035108863830566404, 0.03628441619873047, 0.034846145629882815, 0.034873920440673827, 0.03482009506225586, 0.034590721130371094, 0.03460300827026367, 0.03469903945922852, 0.03451087951660156, 0.034609344482421874, 0.03575603103637695, 0.0371234245300293, 0.03498604965209961, 0.03466819381713867, 0.03481011199951172, 0.03500921630859375, 0.03504489517211914, 0.03557833480834961, 0.035133438110351564, 0.034857185363769534, 0.03485878372192383, 0.034977790832519534, 0.03533148956298828, 0.03495792007446289, 0.034971649169921876, 0.03507814407348633, 0.03519833755493164, 0.034982528686523434, 0.03481510543823242, 0.03504729461669922, 0.03437875366210937, 0.034307262420654294, 0.03418195343017578, 0.03435059356689453, 0.03434288024902344, 0.03417961502075195, 0.03421734237670898, 0.03421043014526367, 0.03466239929199219, 0.03456819152832031, 0.03448953628540039, 0.03433555221557617, 0.034309791564941405, 0.03424812698364258, 0.03462851333618164, 0.03585871887207031, 0.03474198532104492, 0.03442512130737305, 0.03425689697265625, 0.03439334487915039, 0.034706016540527344, 0.034422977447509766, 0.03537267303466797, 0.03451555252075195, 0.034283199310302735, 0.03457855987548828, 0.034547134399414064, 0.03437318420410156, 0.034390399932861325, 0.03418310546875, 0.03422681427001953, 0.034487648010253905, 0.03431046295166015, 0.03424895858764648, 0.03416796875, 0.034243518829345704, 0.03425075149536133, 0.03499008178710938, 0.035166206359863283, 0.03484844970703125, 0.034462017059326173, 0.034342910766601564, 0.0343633918762207, 0.03454489517211914, 0.034530048370361326, 0.03426508712768555, 0.03424460983276367, 0.03423436737060547, 0.03437158584594727, 0.03433062362670898, 0.03426496124267578, 0.034307872772216794, 0.03436576080322266, 0.034359329223632815, 0.03426303863525391, 0.034275230407714845, 0.03422627258300781, 0.03446499252319336, 0.03455875015258789, 0.034264225006103516, 0.03445817565917969, 0.034414878845214845, 0.03435443115234375, 0.03443584060668945, 0.03439311981201172, 0.03420665740966797, 0.034514976501464845, 0.03445673751831055, 0.03426089477539063, 0.03437593460083008, 0.034673118591308595, 0.03444758224487305, 0.03443273544311523, 0.03468521499633789, 0.03430604934692383, 0.03439411163330078, 0.03429929733276367, 0.03437424087524414, 0.03569558334350586, 0.03451644897460938, 0.03454127883911133, 0.03460185623168945, 0.034391841888427734, 0.03443059158325195, 0.034431358337402344, 0.03421184158325195, 0.03422822570800781, 0.03460710525512695, 0.034369537353515625, 0.03435273742675781, 0.03429417419433594, 0.034536705017089844, 0.03442764663696289, 0.034770942687988284, 0.0347852783203125, 0.03446755218505859, 0.03488796615600586, 0.03423231887817383, 0.034557918548583984, 0.03499193572998047, 0.034810142517089845, 0.03436076736450195, 0.0343741455078125, 0.034293758392333985, 0.034525184631347655, 0.03470745468139649, 0.034668254852294925, 0.03450806427001953, 0.034411518096923825, 0.034631679534912106, 0.03495657730102539, 0.03825328063964844, 0.03508838272094727, 0.035054656982421876, 0.03502166366577148, 0.03472127914428711, 0.03472035217285156, 0.034772991180419925, 0.03480166244506836, 0.03455292892456055, 0.03433564758300781, 0.03418102264404297, 0.034428417205810545, 0.034474273681640626, 0.03449055862426758, 0.03452121734619141, 0.03449046325683594, 0.034385887145996094, 0.03457222366333008, 0.03500783920288086, 0.03731737518310547, 0.03470121765136719, 0.03450182342529297, 0.03433292770385742, 0.03451347351074219, 0.03483238220214844, 0.03473628616333008, 0.034369281768798825, 0.03430307388305664, 0.03420876693725586, 0.03427139282226563, 0.03526095962524414, 0.034813438415527344, 0.03427148818969727, 0.034119937896728514, 0.037425151824951174, 0.03510015869140625, 0.034859073638916015, 0.03441196823120117, 0.0342210578918457, 0.03433657455444336, 0.03422841644287109, 0.0346126708984375, 0.034230144500732425, 0.034143070220947265, 0.034312030792236325, 0.034179073333740234, 0.03425475311279297, 0.03425654220581055, 0.03444710540771485, 0.03420230484008789, 0.03418511962890625, 0.034993473052978515, 0.03419331359863281, 0.034382495880126956, 0.03446185684204101, 0.0343573112487793, 0.03478483200073242, 0.03506227111816406, 0.03484988784790039, 0.035019615173339846, 0.03506089782714844, 0.03451737594604492, 0.03441872024536133, 0.034319072723388674, 0.03426444625854492, 0.03526092910766602, 0.034631233215332034, 0.03420332717895508, 0.03447663879394531, 0.03438713455200195, 0.034433502197265625, 0.03515119934082031, 0.03472803115844727, 0.034501537322998044, 0.03450201416015625, 0.03431897735595703, 0.034576385498046876, 0.0354463996887207, 0.03474060821533203, 0.034516990661621096, 0.03443119812011719, 0.034244384765625, 0.03504127883911133, 0.0344535026550293, 0.03428966522216797, 0.03451903915405274, 0.03449174499511719, 0.03457011032104492, 0.03472054290771484, 0.034715648651123046, 0.034631679534912106, 0.03510067367553711, 0.035460670471191405, 0.03617792129516602, 0.03544588851928711, 0.035445632934570315, 0.03527651214599609, 0.03516854476928711, 0.03502454376220703, 0.03522800064086914, 0.035053569793701174, 0.0350904312133789, 0.03529852676391602, 0.035643775939941405, 0.03508060836791992, 0.03479462432861328, 0.03515071868896484, 0.03520512008666992, 0.03471887969970703, 0.0346673583984375, 0.03489177703857422, 0.034767967224121094, 0.03474319839477539, 0.034988033294677735, 0.03450611114501953, 0.035928638458251956, 0.034592830657958984, 0.03826073455810547, 0.035218814849853515, 0.03497395324707031, 0.0348834228515625, 0.03549443054199219, 0.03580867385864258, 0.035465217590332034, 0.035076095581054685, 0.03491219329833985, 0.03510953521728516, 0.03474764633178711, 0.034599391937255856, 0.03511548614501953, 0.03511865615844727, 0.03622886276245117, 0.03478486251831055, 0.03472025680541992, 0.034793407440185546, 0.036690399169921874, 0.034783233642578126, 0.035119102478027346, 0.034598911285400394, 0.03458047866821289, 0.034729984283447264, 0.03481923294067383, 0.03510099029541015, 0.0346794548034668, 0.03464944076538086, 0.03460051345825195, 0.0350579833984375, 0.03479420852661133, 0.034616737365722655, 0.03467932891845703, 0.03500611114501953, 0.03433232116699219, 0.03457913589477539, 0.034662208557128905, 0.0347421760559082, 0.03463372802734375, 0.03555049514770508, 0.0348436164855957, 0.034705310821533206, 0.03475667190551758, 0.034533409118652346, 0.03448831939697266, 0.03509455871582031, 0.0346333122253418, 0.03463161468505859, 0.03461103820800781, 0.034553886413574215, 0.034652225494384764, 0.03765094375610351, 0.03582361602783203, 0.035243297576904295, 0.034902751922607424, 0.03481190490722656, 0.03511705780029297, 0.034598911285400394, 0.03457024002075195, 0.03466239929199219, 0.0345272331237793, 0.03461939239501953, 0.03454742431640625, 0.03454611206054688, 0.03457443237304687, 0.03480678558349609, 0.03468767929077148, 0.034695232391357425, 0.035844097137451174, 0.034871295928955076, 0.034732032775878906, 0.034789215087890624, 0.03477231979370117, 0.03518137741088867, 0.035127296447753906, 0.03534963226318359, 0.03474956893920898, 0.03492633438110351, 0.03499327850341797, 0.03481484985351563, 0.035098785400390624, 0.03511689758300781, 0.035091552734375, 0.034753406524658206, 0.03512527847290039, 0.034523136138916014, 0.034484222412109376, 0.03450265502929688, 0.034340801239013674, 0.03436345672607422, 0.0342724494934082, 0.03434364700317383, 0.03437577438354492, 0.03443283081054688, 0.03461344146728516, 0.034514846801757815, 0.0344351692199707, 0.03446988677978516, 0.03461478424072266, 0.034531455993652344, 0.034523521423339844, 0.03461686325073242, 0.035692798614501954, 0.03473372650146484, 0.034605087280273436, 0.03464681625366211, 0.034457599639892575, 0.03476684951782227, 0.034952865600585935, 0.03733462524414063, 0.03452620697021484, 0.03428326416015625, 0.034223583221435545, 0.03419395065307617, 0.03425791931152344, 0.0342083511352539, 0.034216190338134764, 0.03414236831665039, 0.03446579360961914, 0.03442486572265625, 0.034203262329101564, 0.03424905776977539, 0.034917377471923826, 0.03472710418701172, 0.03467977523803711, 0.03486403274536133, 0.03462044906616211, 0.03430249786376953, 0.0341712646484375, 0.034260990142822266, 0.034723838806152346, 0.034602718353271486, 0.034471424102783206, 0.034377761840820316, 0.034406558990478515, 0.03420537567138672, 0.034331390380859375, 0.034216094970703125, 0.03435724639892578, 0.03419443130493164, 0.03419647979736328, 0.03409100723266602, 0.034541568756103515, 0.03470876693725586, 0.03433567810058594, 0.03512092971801758, 0.035901439666748046, 0.03781017684936523, 0.03492643356323242, 0.034694366455078125, 0.03455481719970703, 0.034385921478271485, 0.03443241500854492, 0.03452959823608399, 0.034936481475830075, 0.034527870178222654, 0.034293758392333985, 0.03423027038574219, 0.03423385620117188, 0.034386238098144534, 0.03443471908569336, 0.03435996627807617, 0.03460287857055664, 0.034431262969970705, 0.034362529754638674, 0.035893310546875, 0.034924831390380856, 0.03462732696533203, 0.03475900650024414, 0.03450028610229492, 0.034411903381347655, 0.0344073600769043, 0.03512704086303711, 0.03439231872558594, 0.03427315139770508, 0.03467891311645508, 0.03491420745849609, 0.03474147033691406, 0.03458038330078125, 0.03452387237548828, 0.034383262634277344, 0.03433087921142578, 0.03433942413330078, 0.03460095977783203, 0.034582527160644534, 0.03457228851318359, 0.034617439270019534, 0.03452099227905273, 0.03501049423217773, 0.0344532470703125, 0.03424310302734375, 0.034367488861083983, 0.03439139175415039, 0.03419180679321289, 0.034362846374511716, 0.03429840087890625, 0.03424870300292969, 0.03447795104980469, 0.034479969024658205, 0.034483486175537106, 0.03453440093994141, 0.03418684768676758, 0.034191776275634765, 0.03423846435546875, 0.03419955062866211, 0.03426444625854492, 0.03470195388793945, 0.03451023864746094, 0.034802112579345706, 0.034436927795410154, 0.03456195068359375, 0.03440806579589844, 0.034502494812011716, 0.0343087043762207, 0.03430822372436523, 0.034234622955322265, 0.03435520172119141, 0.0347852783203125, 0.034290687561035156, 0.03451916885375977, 0.03442777633666992, 0.034385921478271485, 0.034371200561523436, 0.034649856567382814, 0.03453152084350586, 0.03454572677612305, 0.034378047943115234, 0.034401599884033206]",tokens/s,28.8131067025797,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,2684.260352,3212.705792,0.0,2944.401408,2910.225408,s,1,9.6064462890625,9.6064462890625,0.0,9.6064462890625,9.6064462890625,9.6064462890625,9.6064462890625,[9.6064462890625],,kWh,6.754144740001115e-05,7.443083564067278e-06,2.0051960486017206e-05,9.503649145009563e-05,,MB,1611.07968,3565.027328,0.0,3147.825152,3105.082368,s,10,0.9440026779174806,0.09440026779174805,0.0003043603008207187,0.09437142181396485,0.09482877197265625,0.09484579391479492,0.09485941146850586,"[0.09401446533203126, 0.09411494445800782, 0.0948628158569336, 0.09404029083251954, 0.09437359619140626, 0.09436924743652343, 0.09477833557128906, 0.094389404296875, 0.09482498931884766, 0.09423458862304687]",tokens/s,2711.8567138469293,kWh,2.801556002500042e-06,3.0895930109446105e-07,1.8568692103619677e-06,4.9673845139564706e-06,tokens/kWh,51536175.48243686,MB,1620.819968,3565.027328,0.0,3147.825152,3105.084928,s,10,10.170398986816405,1.0170398986816407,0.0115282953246402,1.0197572021484373,1.0280236206054687,1.029098907470703,1.0299591369628907,"[1.0107721557617189, 1.02778466796875, 1.0301741943359375, 1.0223877563476562, 1.0118225708007813, 1.025399658203125, 1.0051096801757813, 1.0276883544921875, 1.0171266479492187, 0.99213330078125]",tokens/s,61.94447246530355,kWh,2.9900598412497308e-05,3.297575479274584e-06,1.9296647712438574e-05,5.249482160421045e-05,tokens/kWh,1200118.375008383,,s,630,10.168211554527296,0.016140018340519497,0.00031636181920435976,0.016169535636901856,0.016429669761657717,0.016535739135742186,0.017127364082336425,"[0.015831135749816896, 0.01573843193054199, 0.015859392166137694, 0.015769887924194335, 0.01597100830078125, 0.015707776069641112, 0.015569055557250976, 0.01559552001953125, 0.01569814395904541, 0.01581670379638672, 0.015725600242614747, 0.015688159942626952, 0.01583769607543945, 0.0159454402923584, 0.01574326419830322, 0.015607808113098144, 0.016570367813110352, 0.015714303970336914, 0.01574672031402588, 0.015814208030700685, 0.015745823860168456, 0.01568348789215088, 0.01574307155609131, 0.01636966323852539, 0.016012544631958007, 0.015919872283935547, 0.01611097526550293, 0.01605900764465332, 0.01600307273864746, 0.0161213436126709, 0.01649510383605957, 0.0161298885345459, 0.015887999534606935, 0.01604457664489746, 0.015978495597839357, 0.015926783561706542, 0.016156543731689454, 0.01607926368713379, 0.016271583557128905, 0.016068159103393556, 0.01612972831726074, 0.0163110408782959, 0.016259071350097656, 0.016300031661987305, 0.016124000549316408, 0.01604390335083008, 0.016295551300048828, 0.016331167221069337, 0.01642927932739258, 0.016178272247314454, 0.01616864013671875, 0.01632972717285156, 0.01636672019958496, 0.01674336051940918, 0.016434879302978517, 0.016136447906494142, 0.01627136039733887, 0.016316064834594725, 0.016293792724609374, 0.0162509765625, 0.01584163188934326, 0.01606662368774414, 0.01617299270629883, 0.016404415130615236, 0.016224159240722656, 0.016545856475830078, 0.01630691146850586, 0.016316415786743164, 0.016103071212768556, 0.016099679946899415, 0.016312320709228514, 0.016236032485961914, 0.016265920639038086, 0.01611244773864746, 0.016172319412231444, 0.01635196876525879, 0.016150688171386717, 0.016497503280639647, 0.01610108757019043, 0.016146720886230467, 0.01651273536682129, 0.016470399856567383, 0.016304031372070312, 0.01617856025695801, 0.016212640762329103, 0.016447168350219726, 0.01647439956665039, 0.016185663223266603, 0.016338783264160155, 0.01626915168762207, 0.01634675216674805, 0.016404127120971678, 0.01631305694580078, 0.016242111206054687, 0.01619993591308594, 0.01637571144104004, 0.016433183670043944, 0.016576223373413086, 0.016467872619628905, 0.016355775833129884, 0.016315872192382813, 0.01629475212097168, 0.016104448318481446, 0.01615667152404785, 0.016524608612060548, 0.01633145523071289, 0.016422111511230467, 0.016479040145874025, 0.01642185592651367, 0.016076032638549804, 0.01595644760131836, 0.016194175720214844, 0.01644304084777832, 0.016410367965698242, 0.016281984329223634, 0.016302175521850586, 0.01638377571105957, 0.01649667167663574, 0.01658790397644043, 0.016362079620361326, 0.016166879653930665, 0.016188831329345704, 0.016407424926757813, 0.016386144638061522, 0.01617296028137207, 0.016232448577880858, 0.016381952285766603, 0.016361183166503906, 0.016447423934936523, 0.016190879821777342, 0.016282560348510743, 0.016422239303588868, 0.016181919097900392, 0.01630240058898926, 0.015963104248046874, 0.01605276870727539, 0.016252639770507813, 0.016242271423339845, 0.016048511505126952, 0.016245248794555665, 0.016105472564697267, 0.01600307273864746, 0.016244735717773438, 0.01617919921875, 0.01612995147705078, 0.016103519439697265, 0.01642406463623047, 0.01624153518676758, 0.01623478317260742, 0.016221887588500978, 0.016188671112060547, 0.01637171173095703, 0.01637046432495117, 0.0163288631439209, 0.016176992416381836, 0.01606380844116211, 0.016288448333740234, 0.016324607849121094, 0.016398048400878905, 0.01861020851135254, 0.01627280044555664, 0.016186208724975587, 0.016439104080200197, 0.016518688201904295, 0.016491167068481444, 0.016256000518798826, 0.016272544860839844, 0.01646886444091797, 0.016345823287963867, 0.01741814422607422, 0.017447423934936524, 0.016278976440429686, 0.016341407775878905, 0.01628489685058594, 0.01636252784729004, 0.016294784545898437, 0.01622105598449707, 0.016110815048217774, 0.016123743057250978, 0.0165098876953125, 0.01659903907775879, 0.016880928039550783, 0.016179264068603514, 0.01618921661376953, 0.01628384017944336, 0.016224960327148437, 0.01620992088317871, 0.016109567642211914, 0.016236543655395508, 0.016451583862304688, 0.016250879287719726, 0.01634272003173828, 0.016397855758666993, 0.01627414321899414, 0.01626947212219238, 0.016045375823974608, 0.016364128112792968, 0.01621811294555664, 0.01615897560119629, 0.016229248046875, 0.016100032806396485, 0.016222335815429687, 0.016195615768432616, 0.01615679931640625, 0.01626323127746582, 0.01620524787902832, 0.016316511154174804, 0.016218431472778322, 0.01629747200012207, 0.016321023941040038, 0.016207136154174805, 0.016227296829223633, 0.01614348793029785, 0.016380224227905273, 0.016140607833862303, 0.016056415557861328, 0.016094560623168944, 0.016245311737060546, 0.016363519668579102, 0.016359424591064452, 0.01706188774108887, 0.017110815048217775, 0.01639241600036621, 0.016205631256103515, 0.01623484802246094, 0.0162093448638916, 0.016080352783203126, 0.016159360885620117, 0.016123775482177735, 0.016169504165649416, 0.01617417526245117, 0.016136959075927736, 0.0162193603515625, 0.016347999572753905, 0.01614028739929199, 0.016223936080932616, 0.016175424575805664, 0.016158304214477538, 0.016134559631347658, 0.01617919921875, 0.016146432876586913, 0.016115583419799805, 0.01612713623046875, 0.016192895889282227, 0.016057952880859375, 0.016107488632202148, 0.01592732810974121, 0.016318464279174806, 0.016095232009887696, 0.016037887573242187, 0.015956000328063966, 0.0159454402923584, 0.017944448471069335, 0.016228736877441405, 0.016220703125, 0.016138240814208983, 0.016211999893188476, 0.016175327301025392, 0.016075679779052734, 0.01611452865600586, 0.01608252716064453, 0.016205440521240233, 0.016372512817382813, 0.01618739128112793, 0.016215583801269532, 0.016204256057739258, 0.016197120666503906, 0.016208383560180666, 0.01633807945251465, 0.016212736129760742, 0.016244831085205077, 0.01630419158935547, 0.016194719314575196, 0.016169567108154297, 0.016149728775024415, 0.015907039642333986, 0.016048160552978516, 0.016337631225585937, 0.01600921630859375, 0.015890432357788087, 0.0159902400970459, 0.016081119537353517, 0.01609760093688965, 0.015933440208435058, 0.015865856170654297, 0.015980544090270995, 0.015987839698791504, 0.01593139171600342, 0.015754112243652342, 0.0158023681640625, 0.015795712471008302, 0.015731200218200684, 0.015681568145751952, 0.01565113639831543, 0.015770400047302246, 0.015745920181274416, 0.01577779197692871, 0.015804415702819825, 0.016075008392333983, 0.016015039443969727, 0.015800031661987304, 0.01581683158874512, 0.015745247840881348, 0.015931103706359865, 0.01589891242980957, 0.016189504623413085, 0.015991935729980467, 0.015858495712280273, 0.01594940757751465, 0.01606492805480957, 0.015901856422424317, 0.015796735763549806, 0.01595631980895996, 0.016056480407714843, 0.016582496643066408, 0.016349376678466795, 0.016104480743408204, 0.016049184799194337, 0.01607468795776367, 0.0160317440032959, 0.01597439956665039, 0.01623478317260742, 0.016202880859375, 0.01639894485473633, 0.016182559967041016, 0.016124095916748047, 0.01624937629699707, 0.016223936080932616, 0.01615216064453125, 0.01612054443359375, 0.016144159317016602, 0.0162589111328125, 0.016354976654052736, 0.01622006416320801, 0.016095327377319335, 0.016126239776611328, 0.016222719192504884, 0.016242624282836914, 0.016218080520629882, 0.01677302360534668, 0.016195775985717774, 0.016351167678833007, 0.01633791923522949, 0.016445600509643554, 0.016386720657348634, 0.016223552703857422, 0.016171903610229493, 0.016172927856445314, 0.01619161605834961, 0.01637580871582031, 0.0162194881439209, 0.01628432083129883, 0.016277503967285157, 0.016164768218994142, 0.016584800720214843, 0.016688447952270508, 0.016339391708374024, 0.016379871368408204, 0.016353343963623045, 0.017129535675048827, 0.016281984329223634, 0.016227743148803712, 0.01628607940673828, 0.01618729591369629, 0.016225759506225585, 0.01620368003845215, 0.016214752197265626, 0.016312320709228514, 0.016275711059570312, 0.016371456146240235, 0.01643724822998047, 0.016242368698120117, 0.016407167434692383, 0.016201408386230468, 0.01614028739929199, 0.01622345542907715, 0.016189504623413085, 0.016341888427734375, 0.016336223602294923, 0.016035903930664064, 0.01602403259277344, 0.016113792419433594, 0.01614339256286621, 0.015969247817993165, 0.015898719787597656, 0.015913887977600096, 0.015958815574645995, 0.0158787202835083, 0.01572544002532959, 0.015647040367126466, 0.015806943893432616, 0.015751168251037596, 0.015699968338012696, 0.015697919845581054, 0.015813695907592774, 0.015792832374572754, 0.015800576210021972, 0.015685407638549805, 0.015702176094055175, 0.015808799743652343, 0.01599075222015381, 0.01597772789001465, 0.015911487579345702, 0.015874048233032227, 0.016062463760375977, 0.015992416381835937, 0.015943967819213867, 0.015752511978149412, 0.016255231857299806, 0.0160753288269043, 0.01599513626098633, 0.015951616287231445, 0.01592319965362549, 0.016022880554199218, 0.016116384506225587, 0.015990847587585448, 0.015887807846069336, 0.01607731246948242, 0.01598361587524414, 0.015970368385314942, 0.015985631942749025, 0.0159618558883667, 0.016035999298095703, 0.016052288055419923, 0.016044095993041994, 0.015871935844421387, 0.016223712921142577, 0.015935040473937987, 0.01601020812988281, 0.01614847946166992, 0.016107328414916994, 0.01596230411529541, 0.01598204803466797, 0.015929887771606446, 0.015810303688049317, 0.015890656471252443, 0.01612393569946289, 0.015979968070983886, 0.015980544090270995, 0.01590124797821045, 0.015984928131103516, 0.01701241683959961, 0.01628191947937012, 0.01621366310119629, 0.016044384002685548, 0.016026847839355467, 0.01606118392944336, 0.016087360382080078, 0.016192480087280272, 0.016118688583374022, 0.016152191162109374, 0.016180736541748047, 0.01617584037780762, 0.016160800933837892, 0.016258304595947265, 0.016253120422363283, 0.016108064651489257, 0.016146432876586913, 0.016158720016479493, 0.01609503936767578, 0.016175296783447264, 0.016234495162963866, 0.01617852783203125, 0.016144800186157226, 0.016125280380249022, 0.017122047424316406, 0.016279712677001953, 0.016228160858154296, 0.01630841636657715, 0.016166912078857423, 0.016257280349731444, 0.01631964874267578, 0.016310880661010742, 0.016297983169555663, 0.016209856033325195, 0.016234655380249024, 0.017305599212646485, 0.01627916717529297, 0.01618351936340332, 0.016215232849121092, 0.01616972732543945, 0.018140832901000978, 0.016774784088134764, 0.016427871704101562, 0.016349184036254884, 0.016220159530639648, 0.016252479553222655, 0.016276031494140624, 0.01623027229309082, 0.016310272216796876, 0.01618943977355957, 0.016246688842773437, 0.01623049545288086, 0.016272384643554686, 0.01617193603515625, 0.016283744812011718, 0.01618124771118164, 0.01621196746826172, 0.016219520568847658, 0.016226943969726564, 0.016316415786743164, 0.016450559616088867, 0.016487424850463867, 0.016492544174194337, 0.016713727951049806, 0.016512704849243165, 0.016562496185302734, 0.01630204772949219, 0.01639459228515625, 0.01647529602050781, 0.01651513671875, 0.0167490234375, 0.016494592666625976, 0.016541311264038086, 0.01634867286682129, 0.016358144760131837, 0.01641689682006836, 0.016528928756713867, 0.01655881690979004, 0.01648806381225586, 0.01636944007873535, 0.01627769660949707, 0.016388256072998045, 0.016339263916015624, 0.016275007247924803, 0.016140352249145506, 0.016220224380493163, 0.016187007904052735, 0.016167295455932616, 0.01614371109008789, 0.016029407501220703, 0.016061471939086913, 0.016088768005371092, 0.016167135238647462, 0.016207136154174805, 0.01589964771270752, 0.015951168060302733, 0.016261791229248045, 0.016116512298583983, 0.01585568046569824, 0.0157991361618042, 0.0156725435256958, 0.01608563232421875, 0.01621824073791504, 0.016582048416137696, 0.015858367919921876, 0.015958016395568847, 0.01592147159576416, 0.015855232238769532, 0.015789536476135253, 0.015993535995483397, 0.015877087593078612, 0.016288415908813476, 0.01605392074584961, 0.01586240005493164, 0.015794400215148927, 0.015832063674926757, 0.015906975746154784, 0.015677120208740233, 0.015616959571838378, 0.015654656410217287, 0.015982463836669923, 0.016054655075073244, 0.015914079666137695, 0.015936415672302248, 0.01583084774017334, 0.015806655883789062, 0.016576543807983398, 0.01678883171081543, 0.016880319595336913, 0.01594976043701172, 0.0158886079788208, 0.015750944137573244, 0.015790080070495604, 0.015734784126281737, 0.015821951866149903, 0.015693920135498047, 0.01595584011077881, 0.015821791648864745, 0.015855551719665528, 0.015906463623046874, 0.015786335945129393, 0.015755040168762207, 0.015621439933776855, 0.01567840003967285, 0.015576543807983399, 0.015622464179992676, 0.015684831619262696, 0.01565932846069336, 0.015616640090942383, 0.01565283203125, 0.01576966381072998, 0.015640576362609862, 0.015757023811340334, 0.015604000091552734, 0.015871999740600586, 0.015730591773986818, 0.01568278408050537, 0.015571840286254883, 0.015573087692260743, 0.015628191947937013, 0.015659008026123047, 0.01560313606262207, 0.015621983528137207, 0.01568841552734375, 0.015699135780334474, 0.01558351993560791, 0.015564352035522462, 0.015610848426818848, 0.015738880157470703, 0.0157575044631958, 0.01565676784515381, 0.015535584449768066, 0.015599391937255859, 0.015617055892944336, 0.015754143714904786, 0.015765664100646972, 0.015669440269470213, 0.015659520149230957, 0.015703776359558105, 0.015607359886169434, 0.015606464385986328, 0.015611328125, 0.015620927810668945, 0.015589119911193848, 0.015640576362609862, 0.01577603244781494, 0.015720191955566405, 0.015720704078674316, 0.015668959617614747]",tokens/s,61.95779824422511,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4628.680704,6708.658176,0.0,6306.136064,6200.409088,s,1,11.1948701171875,11.1948701171875,0.0,11.1948701171875,11.1948701171875,11.1948701171875,11.1948701171875,[11.1948701171875],,kWh,0.00011492607883751967,1.267004279174663e-05,3.635058463599683e-05,0.00016394670626526312,,MB,4724.453376,7736.262656,0.0,7316.963328,6846.674432,s,10,5.565386474609375,0.5565386474609375,0.004543577740426309,0.5558000793457032,0.5599786926269531,0.563558560180664,0.5664224542236328,"[0.5563475341796875, 0.5591761474609375, 0.5585891723632812, 0.551018310546875, 0.5552526245117188, 0.5544142456054687, 0.55166357421875, 0.567138427734375, 0.5591831665039062, 0.552603271484375]",tokens/s,459.98602463267065,kWh,1.618756798903521e-05,1.785219289178254e-06,9.320285233999593e-06,2.7293072512213056e-05,tokens/kWh,9379669.507177895,MB,4734.0544,7740.45696,0.0,7321.157632,6846.676992,s,10,35.3519951171875,3.53519951171875,0.017290708206461954,3.5318414306640626,3.560785302734375,3.564649853515625,3.567741494140625,"[3.536116455078125, 3.5410595703125, 3.522985595703125, 3.52010546875, 3.513117431640625, 3.52756640625, 3.559926513671875, 3.568514404296875, 3.543132080078125, 3.51947119140625]",tokens/s,17.820776392156304,kWh,0.00010333014504013209,1.139773836272117e-05,6.649719208660037e-05,0.00018122507548945364,tokens/kWh,347634.0116280778,,s,630,35.3501015853882,0.05611127235775902,0.0007698667670877515,0.055886560440063474,0.05700373077392578,0.057534986495971684,0.059320293235778816,"[0.05698102569580078, 0.056110977172851566, 0.05616700744628906, 0.06043587112426758, 0.055855712890625, 0.055643264770507815, 0.05630806350708008, 0.05586716842651367, 0.05551580810546875, 0.055400638580322265, 0.05543926239013672, 0.055785377502441405, 0.05571968078613281, 0.05596124649047852, 0.05799756622314453, 0.057477569580078124, 0.05603680038452148, 0.05579219055175781, 0.05620867156982422, 0.05573295974731445, 0.05570528030395508, 0.056293663024902345, 0.05596982574462891, 0.055861248016357425, 0.056600574493408204, 0.05614614486694336, 0.05580572891235352, 0.055795711517333986, 0.05640713500976562, 0.05579177474975586, 0.05628166580200195, 0.056051902770996094, 0.056199169158935545, 0.05594521713256836, 0.05633779144287109, 0.056318592071533204, 0.05640816116333008, 0.0558540153503418, 0.05566886520385742, 0.0561446418762207, 0.05593215942382813, 0.05588054275512695, 0.05621734237670899, 0.056419776916503905, 0.055778144836425785, 0.05570147323608399, 0.05571583938598633, 0.05573823928833008, 0.05585728073120117, 0.055615455627441406, 0.05566876983642578, 0.05568511962890625, 0.055556095123291016, 0.05556857681274414, 0.056170398712158204, 0.05740841674804688, 0.056339454650878903, 0.05603120040893555, 0.056363040924072266, 0.05585718536376953, 0.05655712127685547, 0.056002975463867184, 0.055852222442626956, 0.056930526733398434, 0.05631436920166016, 0.05630681610107422, 0.05651955032348633, 0.05661056137084961, 0.057135425567626956, 0.056665313720703124, 0.05711740875244141, 0.0568785285949707, 0.056613441467285155, 0.05641775894165039, 0.056250911712646484, 0.056290847778320316, 0.05614031982421875, 0.05568652725219726, 0.05563375854492188, 0.05586608123779297, 0.05600665664672851, 0.05579776000976563, 0.055670783996582034, 0.05555199813842773, 0.05639891052246094, 0.057665889739990234, 0.056584800720214844, 0.055907329559326174, 0.0556960334777832, 0.05573052978515625, 0.05638345718383789, 0.05624630355834961, 0.05633420944213867, 0.0560022087097168, 0.056148448944091794, 0.05603123092651367, 0.05743167877197266, 0.05626713562011719, 0.05609811019897461, 0.05611491012573242, 0.056261600494384764, 0.055994369506835937, 0.0574516487121582, 0.056363040924072266, 0.05686924743652344, 0.056035903930664065, 0.05628303909301758, 0.05549014282226562, 0.05597020721435547, 0.05556828689575195, 0.055691360473632816, 0.05579494476318359, 0.05554662322998047, 0.05561920166015625, 0.05571116638183594, 0.055539806365966796, 0.05579043197631836, 0.05577014541625976, 0.055916671752929685, 0.05587849426269531, 0.05582140731811523, 0.05621644973754883, 0.05676995086669922, 0.05642086410522461, 0.05606412887573242, 0.05656371307373047, 0.057057151794433596, 0.05608291244506836, 0.05603062438964844, 0.05592972946166992, 0.056410049438476564, 0.05594732666015625, 0.05584281539916992, 0.05647750473022461, 0.05606623840332031, 0.05576700973510742, 0.055783454895019534, 0.055588863372802735, 0.056001918792724606, 0.055890464782714845, 0.055776481628417966, 0.05604195022583008, 0.05593494415283203, 0.05580844879150391, 0.0561657600402832, 0.05653881454467773, 0.055933345794677736, 0.05567542266845703, 0.05624825668334961, 0.058873054504394534, 0.05604204940795898, 0.05610438537597656, 0.05571593475341797, 0.055522048950195316, 0.05543929672241211, 0.05549881744384766, 0.05532672119140625, 0.0554859504699707, 0.05583718490600586, 0.055728126525878906, 0.05552953720092774, 0.05571311950683594, 0.05558703994750976, 0.05544489669799805, 0.055419872283935544, 0.05552041625976562, 0.05588435363769531, 0.05594972610473633, 0.055645984649658205, 0.055556190490722655, 0.05548646545410156, 0.055441184997558596, 0.05561110305786133, 0.05886003112792969, 0.055752704620361325, 0.055662593841552734, 0.05571379089355469, 0.055821601867675784, 0.0556071662902832, 0.055757343292236326, 0.05560140609741211, 0.05565359878540039, 0.05570431900024414, 0.055403648376464845, 0.05598880004882813, 0.055961215972900394, 0.05612828826904297, 0.05559910583496094, 0.05621920013427734, 0.05606150436401367, 0.0557756462097168, 0.05525708770751953, 0.05577318572998047, 0.05588172912597656, 0.05596160125732422, 0.05588787078857422, 0.055791038513183594, 0.05586928176879883, 0.055820606231689454, 0.05554617691040039, 0.0554958381652832, 0.055445758819580075, 0.05659939193725586, 0.05555545425415039, 0.058292190551757814, 0.05619148635864258, 0.055537662506103515, 0.05537996673583984, 0.05530361557006836, 0.055250720977783205, 0.05549251174926758, 0.05585971069335938, 0.05591007995605469, 0.05628998565673828, 0.055836673736572265, 0.05558211135864258, 0.05540105438232422, 0.05529516983032227, 0.055261409759521485, 0.05537033462524414, 0.0554879035949707, 0.05528630447387695, 0.055355457305908205, 0.05575619125366211, 0.05550124740600586, 0.05546121597290039, 0.05537216186523437, 0.055382400512695315, 0.05554550552368164, 0.05795993423461914, 0.055772064208984375, 0.05565763092041016, 0.055701663970947266, 0.055421630859375, 0.05556838226318359, 0.05869158554077149, 0.056024288177490236, 0.055634719848632816, 0.055946590423583985, 0.05539292907714844, 0.05566016006469727, 0.055859584808349606, 0.05559296035766602, 0.05789286422729492, 0.05646092987060547, 0.055718273162841794, 0.05610841751098633, 0.05669468688964844, 0.056079105377197264, 0.05584003067016602, 0.05589823913574219, 0.05622841644287109, 0.05667020797729492, 0.05571891021728516, 0.05556243133544922, 0.05635497665405274, 0.05580857467651367, 0.05586748886108398, 0.055993854522705076, 0.056025054931640624, 0.05552316665649414, 0.05577926254272461, 0.05564902496337891, 0.0553963508605957, 0.05610870361328125, 0.05580630493164063, 0.05570355224609375, 0.0555233268737793, 0.05554985427856445, 0.05552076721191406, 0.05543920135498047, 0.05542601776123047, 0.055502304077148436, 0.05610502243041992, 0.055908607482910155, 0.05576294326782227, 0.055994369506835937, 0.05609088134765625, 0.05559884643554688, 0.055662593841552734, 0.05563497543334961, 0.05574041748046875, 0.055899105072021484, 0.05569462585449219, 0.055492446899414065, 0.05532966232299805, 0.05553782272338867, 0.055508831024169925, 0.05546393585205078, 0.0554571533203125, 0.05546876907348633, 0.0558853759765625, 0.055646560668945313, 0.05559910583496094, 0.05556044769287109, 0.055608062744140624, 0.05600281524658203, 0.0557309455871582, 0.05572380828857422, 0.05565254211425781, 0.05672719955444336, 0.05534348678588867, 0.05579267120361328, 0.05558502578735352, 0.05541299057006836, 0.05530019378662109, 0.055265663146972656, 0.05519564819335938, 0.055054046630859374, 0.05950809478759766, 0.056023200988769534, 0.0559986572265625, 0.055494369506835936, 0.05547724914550781, 0.05607107162475586, 0.056068096160888675, 0.05581619262695312, 0.05576294326782227, 0.05611494445800781, 0.05566281509399414, 0.05576236724853516, 0.05542768096923828, 0.05752799987792969, 0.05611052703857422, 0.05572288131713867, 0.055521278381347655, 0.0569273910522461, 0.0553603515625, 0.055313503265380856, 0.05573513412475586, 0.05560940933227539, 0.05563388824462891, 0.055404575347900394, 0.05525219345092774, 0.05552822494506836, 0.055336769104003904, 0.056432830810546876, 0.05597792053222656, 0.05561964797973633, 0.055629825592041014, 0.05613699340820313, 0.055881919860839846, 0.05577987289428711, 0.05554995346069336, 0.05556598281860352, 0.055519039154052735, 0.05528828811645508, 0.05551670455932617, 0.05541049575805664, 0.055448287963867186, 0.05530828857421875, 0.055258689880371095, 0.05550742340087891, 0.056589664459228514, 0.05651724624633789, 0.05640531158447266, 0.05603091049194336, 0.05579487991333008, 0.05592659378051758, 0.05584016036987305, 0.05642300796508789, 0.05583599853515625, 0.055995040893554685, 0.05940342330932617, 0.056552288055419925, 0.056501953125, 0.05674220657348633, 0.056097793579101565, 0.05621247863769531, 0.05638547134399414, 0.05611731338500976, 0.05640192031860351, 0.05700198364257812, 0.057382911682128904, 0.056371200561523435, 0.05591654586791992, 0.055934974670410156, 0.055572479248046876, 0.056318145751953125, 0.05589993667602539, 0.05632479858398438, 0.05599846267700195, 0.055554046630859374, 0.05553561782836914, 0.05564416122436523, 0.05551513671875, 0.0554920654296875, 0.05711628723144531, 0.05545273590087891, 0.0555579833984375, 0.05548031997680664, 0.055537601470947266, 0.05612284851074219, 0.05751049423217774, 0.05576700973510742, 0.05579574584960938, 0.05549260711669922, 0.05589811325073242, 0.05616809463500977, 0.05582220840454102, 0.05552585601806641, 0.055932926177978515, 0.05551513671875, 0.05553728103637695, 0.05703718566894531, 0.056157600402832034, 0.05579203033447266, 0.05564144134521484, 0.055933792114257815, 0.055862785339355465, 0.055777793884277345, 0.05583871841430664, 0.05555817413330078, 0.05569891357421875, 0.05579008102416992, 0.055795326232910156, 0.056379776000976566, 0.05638956832885742, 0.05621091079711914, 0.05626531219482422, 0.05657395172119141, 0.056425952911376955, 0.05708998489379883, 0.05694076919555664, 0.05722355270385742, 0.05684428787231445, 0.057109504699707034, 0.05745904159545898, 0.05736924743652344, 0.05641830444335937, 0.056415264129638674, 0.05751456069946289, 0.05846672058105469, 0.05951897430419922, 0.0592400016784668, 0.0579730224609375, 0.05802204895019531, 0.058529918670654296, 0.057640830993652345, 0.05892300796508789, 0.05935308837890625, 0.05829228973388672, 0.05801737594604492, 0.05754070281982422, 0.05740579223632813, 0.05816115188598633, 0.05777612686157227, 0.05737279891967773, 0.05701945495605469, 0.05627151870727539, 0.05731139373779297, 0.0566715202331543, 0.05656383895874023, 0.056815616607666014, 0.057119327545166014, 0.057339134216308596, 0.05651123046875, 0.06024192047119141, 0.05668044662475586, 0.05721449661254883, 0.05625408172607422, 0.05623689651489258, 0.056264705657958984, 0.05617049789428711, 0.056110942840576175, 0.058931041717529296, 0.05724544143676758, 0.05680319976806641, 0.05645177459716797, 0.056543231964111325, 0.056395774841308595, 0.05678079986572265, 0.05636505508422852, 0.056610816955566405, 0.05663100814819336, 0.056334880828857424, 0.056340225219726564, 0.057353408813476565, 0.05678163146972656, 0.05724364852905273, 0.05810550308227539, 0.05623056030273438, 0.056472736358642577, 0.056197120666503904, 0.056014881134033204, 0.05573068618774414, 0.056114238739013673, 0.05613644790649414, 0.055836639404296874, 0.0557138557434082, 0.05563785552978515, 0.05563833618164062, 0.05570355224609375, 0.05606195068359375, 0.05588172912597656, 0.05653654479980469, 0.05603353500366211, 0.05552790451049805, 0.05556409454345703, 0.055488510131835936, 0.05568646240234375, 0.05615820693969727, 0.05599843215942383, 0.05568092727661133, 0.05698169708251953, 0.055898975372314454, 0.05584796905517578, 0.05580358505249024, 0.055734527587890624, 0.05598575973510742, 0.057618846893310545, 0.05655683135986328, 0.05605779266357422, 0.05617251205444336, 0.055788352966308595, 0.05599958419799805, 0.05582460784912109, 0.055870143890380856, 0.055654399871826174, 0.05593423843383789, 0.05573091125488281, 0.05627481460571289, 0.05590620803833008, 0.05571811294555664, 0.05573836898803711, 0.05551308822631836, 0.055597057342529295, 0.055859104156494144, 0.05607635116577148, 0.05589961624145508, 0.055871456146240235, 0.055699264526367184, 0.05627983856201172, 0.05590220642089844, 0.05565030288696289, 0.05594070434570313, 0.055969982147216796, 0.05563619232177734, 0.05585715103149414, 0.05613363265991211, 0.05623420715332031, 0.05622528076171875, 0.05650473785400391, 0.056744991302490236, 0.05700006484985352, 0.056782974243164065, 0.0571602897644043, 0.05694464111328125, 0.05684796905517578, 0.05817734527587891, 0.05877206420898438, 0.057020191192626954, 0.05674147033691406, 0.05595199966430664, 0.05665731048583984, 0.05663190460205078, 0.056513919830322265, 0.05685107040405273, 0.056342529296875, 0.055999774932861325, 0.055769824981689455, 0.057071617126464844, 0.05557657623291016, 0.05568307113647461, 0.05571596908569336, 0.05622288131713867, 0.05582716751098633, 0.05651507186889648, 0.055887744903564456, 0.05566067123413086, 0.05688924789428711, 0.055983680725097656, 0.055683712005615234, 0.05595536041259765, 0.05563391876220703, 0.05584896087646484, 0.05558867263793945, 0.05632825469970703, 0.055911937713623044, 0.05586598587036133, 0.05617049789428711, 0.0556003532409668, 0.05607503890991211, 0.056174335479736326, 0.05583251190185547, 0.055626144409179686, 0.055697185516357425, 0.0555579833984375, 0.05556047821044922, 0.05556598281860352, 0.05562198257446289, 0.0557916145324707, 0.05581727981567383, 0.05591308975219727, 0.05581353759765625, 0.05592156982421875, 0.05598003387451172, 0.05590739059448242, 0.05560931015014649, 0.05568815994262695, 0.056057857513427733, 0.05602467346191406, 0.0557850570678711, 0.05560531234741211, 0.05554048156738281, 0.05548835372924805, 0.05575846481323242, 0.05545014572143555, 0.05542646408081055, 0.05555449676513672, 0.055269214630126955, 0.055484737396240234, 0.05544140625, 0.05550694274902344, 0.055711742401123046, 0.05564163208007813, 0.06078819274902344, 0.05590323257446289, 0.05586329650878906, 0.05593088150024414, 0.05584691238403321, 0.05551103973388672, 0.05560950469970703, 0.055478046417236325, 0.0559859504699707, 0.05575084686279297, 0.055771232604980465, 0.05575196838378906, 0.05569814300537109, 0.055965694427490234]",tokens/s,17.821730963862574,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11138.854912,12227.3792,0.0,11848.9088,11814.752256,s,1,16.331890625,16.331890625,0.0,16.331890625,16.331890625,16.331890625,16.331890625,[16.331890625],,kWh,0.000269950213837501,2.977020798288645e-05,8.404562279201722e-05,0.0003837660446124047,,MB,2093.58848,14033.027072,0.0,13625.196544,13298.00192,s,10,23.36663818359375,2.336663818359375,0.0021511089569970668,2.337625244140625,2.338669384765625,2.3388350585937503,2.33896759765625,"[2.338632568359375, 2.338152099609375, 2.339000732421875, 2.338243408203125, 2.337871337890625, 2.33632177734375, 2.337379150390625, 2.3351865234375, 2.33296484375, 2.3328857421875]",tokens/s,109.55790815460284,kWh,6.795378911708592e-05,7.494725000763435e-06,4.506350827299943e-05,0.00012051202239084878,tokens/kWh,2124269.387578045,MB,2099.965952,14184.022016,0.0,13776.191488,13689.859584,s,10,1365.8407656250001,136.5840765625,0.14099416845937957,136.63813281249998,136.729778125,136.73538906250002,136.73987781250003,"[136.741, 136.71475, 136.72853125, 136.681078125, 136.67503125, 136.601234375, 136.51703125, 136.45, 136.38490625, 136.347203125]",tokens/s,0.46125435398885317,kWh,0.0039742203255445805,0.00043838653352902454,0.0026434413647514004,0.007056048223825005,tokens/kWh,8928.510407182053,,s,630,1365.835356933595,2.167992630053323,0.00245803797327439,2.168363525390625,2.170982055664062,2.1715647216796876,2.172534157714844,"[2.169362548828125, 2.169524169921875, 2.169290283203125, 2.169293212890625, 2.17150390625, 2.17208203125, 2.170980224609375, 2.17135791015625, 2.16968408203125, 2.171004150390625, 2.171822998046875, 2.1700087890625, 2.169798583984375, 2.170569580078125, 2.1727841796875, 2.170962158203125, 2.171938720703125, 2.170955810546875, 2.1710234375, 2.17062109375, 2.170538818359375, 2.170964111328125, 2.17099853515625, 2.170173828125, 2.16871630859375, 2.170280517578125, 2.170905029296875, 2.170300537109375, 2.169556884765625, 2.1686904296875, 2.17009716796875, 2.168494873046875, 2.171104736328125, 2.1698603515625, 2.170408447265625, 2.169908203125, 2.169794677734375, 2.17245068359375, 2.171545654296875, 2.17277783203125, 2.17004931640625, 2.170314697265625, 2.170281494140625, 2.1694814453125, 2.170204345703125, 2.16982763671875, 2.17168896484375, 2.169891845703125, 2.171984375, 2.170481201171875, 2.170078857421875, 2.17049658203125, 2.17175439453125, 2.170583984375, 2.17089013671875, 2.168748046875, 2.1698232421875, 2.170303955078125, 2.170503662109375, 2.170760498046875, 2.169337646484375, 2.17056982421875, 2.170290283203125, 2.16931982421875, 2.1692529296875, 2.1683291015625, 2.17041455078125, 2.1696865234375, 2.16855517578125, 2.169948486328125, 2.168610107421875, 2.167562255859375, 2.168133544921875, 2.1682509765625, 2.170392333984375, 2.169739501953125, 2.167556396484375, 2.168411865234375, 2.168475341796875, 2.168783447265625, 2.16992578125, 2.170560546875, 2.169288818359375, 2.16925390625, 2.167908203125, 2.17023095703125, 2.16987451171875, 2.1698251953125, 2.1699169921875, 2.17007080078125, 2.168732421875, 2.17103369140625, 2.170037841796875, 2.1694931640625, 2.1693408203125, 2.16977001953125, 2.16997216796875, 2.171922607421875, 2.16950390625, 2.169200439453125, 2.170351806640625, 2.170050537109375, 2.17153125, 2.17074072265625, 2.169933349609375, 2.1697294921875, 2.16966552734375, 2.17122216796875, 2.170271484375, 2.171137939453125, 2.170353515625, 2.1701796875, 2.17076318359375, 2.172542724609375, 2.16906591796875, 2.170171142578125, 2.17135302734375, 2.17271728515625, 2.17183642578125, 2.171159912109375, 2.17287255859375, 2.17251318359375, 2.171107421875, 2.17227880859375, 2.172880615234375, 2.170507080078125, 2.17068896484375, 2.17104150390625, 2.17034228515625, 2.171580322265625, 2.171598876953125, 2.1708017578125, 2.17172216796875, 2.1705146484375, 2.16976220703125, 2.168747802734375, 2.168450927734375, 2.1688330078125, 2.1695283203125, 2.16977197265625, 2.169739013671875, 2.170224853515625, 2.170828857421875, 2.169964599609375, 2.17046435546875, 2.16966552734375, 2.171493896484375, 2.1710830078125, 2.1718798828125, 2.169415771484375, 2.16923876953125, 2.169569580078125, 2.169186767578125, 2.169117919921875, 2.170444580078125, 2.170138671875, 2.171592529296875, 2.1686962890625, 2.169596435546875, 2.168596435546875, 2.168890625, 2.17133349609375, 2.169258056640625, 2.169445556640625, 2.17002001953125, 2.169176513671875, 2.171047607421875, 2.1707880859375, 2.16980419921875, 2.17070458984375, 2.171602294921875, 2.171460205078125, 2.171439208984375, 2.171408447265625, 2.1716806640625, 2.17069482421875, 2.170970947265625, 2.1697451171875, 2.1705810546875, 2.168915283203125, 2.169745849609375, 2.16980859375, 2.17103662109375, 2.1712587890625, 2.171217041015625, 2.16958056640625, 2.1689609375, 2.17033935546875, 2.172760009765625, 2.16794189453125, 2.168345947265625, 2.168537109375, 2.169506103515625, 2.16912109375, 2.16852880859375, 2.169100341796875, 2.16913037109375, 2.1684619140625, 2.168895263671875, 2.169536865234375, 2.16985400390625, 2.170060791015625, 2.16954248046875, 2.170759521484375, 2.169196533203125, 2.1683466796875, 2.1684892578125, 2.1680517578125, 2.169093017578125, 2.16945263671875, 2.16905859375, 2.170333984375, 2.168133544921875, 2.168465087890625, 2.1704296875, 2.170662841796875, 2.169109619140625, 2.169588623046875, 2.167689208984375, 2.168756103515625, 2.169784423828125, 2.16871533203125, 2.169174072265625, 2.169288818359375, 2.169388427734375, 2.171112060546875, 2.1708349609375, 2.16998095703125, 2.169499755859375, 2.170353759765625, 2.17191015625, 2.1708798828125, 2.171993896484375, 2.17151513671875, 2.17060546875, 2.168620849609375, 2.169801025390625, 2.170787841796875, 2.169780029296875, 2.16985986328125, 2.169104736328125, 2.1704267578125, 2.169712890625, 2.170366455078125, 2.16974951171875, 2.16888330078125, 2.169206787109375, 2.170090576171875, 2.168535888671875, 2.169146484375, 2.169555908203125, 2.169720947265625, 2.168891357421875, 2.16675927734375, 2.16760546875, 2.16756640625, 2.168850341796875, 2.168493408203125, 2.168814208984375, 2.168524658203125, 2.168668212890625, 2.167869140625, 2.169588134765625, 2.167732177734375, 2.167242431640625, 2.168169921875, 2.16750537109375, 2.167734619140625, 2.169392333984375, 2.1687666015625, 2.16872802734375, 2.167601318359375, 2.16911669921875, 2.1690546875, 2.16907958984375, 2.16874853515625, 2.167593017578125, 2.169420166015625, 2.16717626953125, 2.168871337890625, 2.16750537109375, 2.168604736328125, 2.16865966796875, 2.1699580078125, 2.169855712890625, 2.17080419921875, 2.16951708984375, 2.169409423828125, 2.169374755859375, 2.17058935546875, 2.1702998046875, 2.17072021484375, 2.1687197265625, 2.170218505859375, 2.168195068359375, 2.170111572265625, 2.170776123046875, 2.170766845703125, 2.170847900390625, 2.171146240234375, 2.170884033203125, 2.17183544921875, 2.171052978515625, 2.168711181640625, 2.17054833984375, 2.169819091796875, 2.170898193359375, 2.1709560546875, 2.17090869140625, 2.17165576171875, 2.17193115234375, 2.170635498046875, 2.17216796875, 2.171198486328125, 2.171590576171875, 2.166392822265625, 2.1678125, 2.167912353515625, 2.16743115234375, 2.167509033203125, 2.16749072265625, 2.168342529296875, 2.167932373046875, 2.167357666015625, 2.16689013671875, 2.166894775390625, 2.1672587890625, 2.16954345703125, 2.168363037109375, 2.166921142578125, 2.167406005859375, 2.167665283203125, 2.16841015625, 2.168255859375, 2.16693408203125, 2.16949755859375, 2.167058349609375, 2.1681904296875, 2.16927294921875, 2.168059814453125, 2.167815673828125, 2.168355224609375, 2.168860595703125, 2.1682236328125, 2.169315185546875, 2.16764208984375, 2.167444580078125, 2.168672119140625, 2.1676962890625, 2.171187255859375, 2.1680087890625, 2.1676337890625, 2.167380126953125, 2.1674638671875, 2.168512451171875, 2.167825927734375, 2.16818115234375, 2.167401611328125, 2.1679296875, 2.168057373046875, 2.169559326171875, 2.16978173828125, 2.168203857421875, 2.168690673828125, 2.167989501953125, 2.170017578125, 2.16817041015625, 2.168364013671875, 2.168296875, 2.16830615234375, 2.16907568359375, 2.169552978515625, 2.168143798828125, 2.169496826171875, 2.168421142578125, 2.169944091796875, 2.170005615234375, 2.170220458984375, 2.170427490234375, 2.16810498046875, 2.16730615234375, 2.167529541015625, 2.16633349609375, 2.16644921875, 2.166615966796875, 2.16589404296875, 2.16842626953125, 2.166503173828125, 2.165834228515625, 2.166220703125, 2.16660986328125, 2.16643994140625, 2.168268798828125, 2.1664296875, 2.16646240234375, 2.166199951171875, 2.167081298828125, 2.167373779296875, 2.165665771484375, 2.16609375, 2.1672197265625, 2.16670458984375, 2.16618798828125, 2.166564208984375, 2.1669462890625, 2.16717919921875, 2.167109619140625, 2.165676025390625, 2.16823291015625, 2.166917724609375, 2.16533154296875, 2.165361572265625, 2.16673486328125, 2.167796875, 2.167021728515625, 2.16681884765625, 2.16614794921875, 2.167887939453125, 2.1662373046875, 2.168397216796875, 2.16859716796875, 2.166555908203125, 2.167212646484375, 2.168458251953125, 2.167673828125, 2.166921142578125, 2.16628759765625, 2.166884765625, 2.167589111328125, 2.166208740234375, 2.16766796875, 2.16665185546875, 2.166697021484375, 2.1662607421875, 2.16687548828125, 2.16793505859375, 2.16679248046875, 2.1663173828125, 2.1664091796875, 2.166824951171875, 2.166919189453125, 2.165501953125, 2.1651494140625, 2.16407666015625, 2.164482177734375, 2.167457763671875, 2.165520263671875, 2.166302734375, 2.165307373046875, 2.165833740234375, 2.166099609375, 2.166630859375, 2.1645966796875, 2.16576416015625, 2.16566796875, 2.165785888671875, 2.1655, 2.165670654296875, 2.165544921875, 2.1652744140625, 2.16552783203125, 2.16616552734375, 2.16677685546875, 2.166128662109375, 2.165551025390625, 2.16499609375, 2.16361083984375, 2.167022216796875, 2.1652890625, 2.165182373046875, 2.165781494140625, 2.164755126953125, 2.1667412109375, 2.165505615234375, 2.166676025390625, 2.1669765625, 2.16542626953125, 2.166435791015625, 2.16665087890625, 2.16688037109375, 2.166635986328125, 2.165787109375, 2.166326904296875, 2.167841064453125, 2.16712109375, 2.165900146484375, 2.166140869140625, 2.165682373046875, 2.16465771484375, 2.165961181640625, 2.16589306640625, 2.167142333984375, 2.1649755859375, 2.16568212890625, 2.16656884765625, 2.167205810546875, 2.165231689453125, 2.166363525390625, 2.16429736328125, 2.16599853515625, 2.16683251953125, 2.16555908203125, 2.1658466796875, 2.16557763671875, 2.164621337890625, 2.164137451171875, 2.164543212890625, 2.164142333984375, 2.16470361328125, 2.165547119140625, 2.16657275390625, 2.1632021484375, 2.16575390625, 2.16450244140625, 2.163293212890625, 2.16456298828125, 2.164375244140625, 2.163614013671875, 2.163464111328125, 2.164674560546875, 2.164279296875, 2.16469287109375, 2.16445947265625, 2.164264892578125, 2.1646865234375, 2.16453759765625, 2.166032470703125, 2.163771240234375, 2.16451806640625, 2.163108642578125, 2.1642138671875, 2.1642236328125, 2.16492626953125, 2.165387451171875, 2.163768798828125, 2.164283203125, 2.164267822265625, 2.166134765625, 2.165104736328125, 2.1657763671875, 2.16464990234375, 2.165478515625, 2.16481005859375, 2.16541455078125, 2.165032470703125, 2.165539306640625, 2.164387939453125, 2.164507568359375, 2.166477783203125, 2.1652705078125, 2.16556298828125, 2.165947998046875, 2.165300048828125, 2.16605224609375, 2.165786865234375, 2.16467822265625, 2.16627490234375, 2.164760498046875, 2.16550927734375, 2.16473388671875, 2.164972412109375, 2.1643857421875, 2.164823486328125, 2.16431396484375, 2.165788818359375, 2.165119873046875, 2.16464794921875, 2.164505126953125, 2.163439208984375, 2.16489013671875, 2.16233544921875, 2.16322314453125, 2.16285791015625, 2.163093505859375, 2.16430126953125, 2.163749267578125, 2.164066162109375, 2.162819091796875, 2.163954833984375, 2.163837890625, 2.165333984375, 2.16505712890625, 2.162974365234375, 2.165384033203125, 2.163882080078125, 2.164766357421875, 2.16448193359375, 2.163464599609375, 2.163142578125, 2.163967041015625, 2.16417578125, 2.165642333984375, 2.163696533203125, 2.162642822265625, 2.162888671875, 2.16344580078125, 2.165002197265625, 2.164923828125, 2.16365234375, 2.16443359375, 2.163681396484375, 2.1636767578125, 2.1659365234375, 2.1631240234375, 2.1641806640625, 2.165008544921875, 2.16462744140625, 2.165252197265625, 2.165598388671875, 2.1647626953125, 2.164516845703125, 2.16500439453125, 2.164822021484375, 2.166564208984375, 2.165697021484375, 2.163859375, 2.163810302734375, 2.16506494140625, 2.164789794921875, 2.16618408203125, 2.16356201171875, 2.163326416015625, 2.16323486328125, 2.164265869140625, 2.164746337890625, 2.16414208984375, 2.163759033203125, 2.162353515625, 2.165184814453125, 2.1658955078125]",tokens/s,0.4612561805504866,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3550.654464,4495.179776,0.0,4116.709376,3980.386816,s,1,9.758755859375,9.758755859375,0.0,9.758755859375,9.758755859375,9.758755859375,9.758755859375,[9.758755859375],,kWh,8.898960568749884e-05,9.809050610969499e-06,2.695057711600015e-05,0.00012574923341446848,,MB,3549.011968,4826.529792,0.0,4418.699264,4245.89568,s,10,6.605035339355468,0.6605035339355468,0.0006230609261115123,0.6603740234375,0.6612575622558594,0.661468197631836,0.6616367059326171,"[0.6597408447265625, 0.6600413818359375, 0.6596790771484375, 0.660224853515625, 0.6602737426757812, 0.6604743041992187, 0.6611260375976562, 0.6612107543945313, 0.6616788330078125, 0.6605855102539062]",tokens/s,387.5830890330724,kWh,1.9281606503906238e-05,2.1255516435658325e-06,1.287422904937502e-05,3.428138719684709e-05,tokens/kWh,7467609.1294095805,MB,3556.94592,4837.015552,0.0,4429.185024,4245.89824,s,10,385.3308124999999,38.53308125,0.0249248607635426,38.5474375,38.55015078125,38.55081171875,38.55134046875,"[38.47213671875, 38.50425390625, 38.5239609375, 38.53500390625, 38.54686328125, 38.55147265625, 38.54991015625, 38.54801171875, 38.55000390625, 38.5491953125]",tokens/s,1.63495879271269,kWh,0.0011238995098256774,0.0001239736496154392,0.0007469280732364251,0.0019948012326775418,tokens/kWh,31582.093979076613,,s,630,385.32706719970645,0.6116302653963603,0.0005920977871824857,0.6116809997558594,0.6123485229492188,0.6124803436279297,0.6127918884277344,"[0.6100867309570313, 0.610197021484375, 0.6103533325195313, 0.6096705932617188, 0.6108343505859375, 0.6095738525390625, 0.6110203857421875, 0.6089094848632812, 0.6107077026367187, 0.610044921875, 0.609010009765625, 0.6115314331054688, 0.60957080078125, 0.6103983154296875, 0.6107708740234375, 0.610209716796875, 0.6111192016601562, 0.6099435424804688, 0.610407470703125, 0.6109091186523438, 0.6098042602539062, 0.6107578125, 0.6106380004882812, 0.6103272094726563, 0.6102745361328125, 0.6112899169921875, 0.6105045166015625, 0.610654296875, 0.6111459350585937, 0.6102752075195312, 0.610801513671875, 0.6112227783203125, 0.6105524291992187, 0.61095556640625, 0.6106604614257812, 0.611110107421875, 0.610850830078125, 0.6102777709960937, 0.6112688598632813, 0.6107890014648437, 0.610859375, 0.6106542358398438, 0.610566162109375, 0.6109616088867188, 0.6111590576171875, 0.610697998046875, 0.6113421630859375, 0.6108383178710938, 0.6102860107421875, 0.6111743774414062, 0.6108426513671875, 0.6111047973632813, 0.610873046875, 0.6104456176757812, 0.6114224243164063, 0.6110021362304687, 0.6109299926757813, 0.61133447265625, 0.61034521484375, 0.6125866088867188, 0.6106936645507812, 0.6112772827148437, 0.6109339599609375, 0.6115963134765625, 0.61043017578125, 0.6112429809570312, 0.6107515258789062, 0.61059912109375, 0.6110541381835938, 0.6098611450195313, 0.611631591796875, 0.6110841674804688, 0.610819580078125, 0.61090234375, 0.6101548461914063, 0.6113565673828125, 0.6110004272460937, 0.6110596923828125, 0.6109173583984375, 0.6111201171875, 0.6114240112304687, 0.6107047729492188, 0.6108209228515625, 0.6112788696289062, 0.6108262329101563, 0.6119588012695313, 0.61079345703125, 0.61098388671875, 0.6112564086914063, 0.6110984497070312, 0.61134423828125, 0.6106360473632813, 0.6111801147460938, 0.6110778198242187, 0.6114282836914062, 0.6116187744140625, 0.6108777465820312, 0.6109251098632813, 0.6113710327148437, 0.61138330078125, 0.6113792114257812, 0.6106234741210937, 0.611322998046875, 0.6113616943359375, 0.61153076171875, 0.6110494995117187, 0.6110392456054687, 0.6116618041992188, 0.6112166748046876, 0.6115838012695313, 0.6112612915039063, 0.6110740356445312, 0.6114365234375, 0.611292724609375, 0.6114259643554687, 0.6112550659179687, 0.6111948852539062, 0.6115687255859374, 0.61121630859375, 0.6115549926757813, 0.6113662719726562, 0.6114887084960937, 0.6114703979492188, 0.6114799194335937, 0.6118580932617188, 0.61162548828125, 0.6115798950195312, 0.611135498046875, 0.6108765258789063, 0.6112225952148438, 0.6110739135742187, 0.611142822265625, 0.6112203369140625, 0.610957275390625, 0.6111561279296875, 0.6113258056640625, 0.611061767578125, 0.6114877319335937, 0.6107731323242187, 0.6111189575195313, 0.6112454833984375, 0.61106640625, 0.6115083618164062, 0.611123046875, 0.6112849731445312, 0.6114754638671875, 0.6112337646484375, 0.6113956298828125, 0.6112267456054687, 0.611590087890625, 0.611854248046875, 0.6115601806640625, 0.6119142456054687, 0.6109790649414063, 0.6119214477539062, 0.612299560546875, 0.611412109375, 0.6113074340820313, 0.611765869140625, 0.6118466186523438, 0.611181884765625, 0.611301513671875, 0.6116542358398438, 0.6109094848632812, 0.6116719360351562, 0.6122107543945312, 0.611000244140625, 0.611877685546875, 0.6112620239257812, 0.61207763671875, 0.6121326293945313, 0.6109374389648438, 0.6120220947265625, 0.6113857421875, 0.6120875244140624, 0.6114581909179687, 0.6120641479492187, 0.6115491943359375, 0.6118441162109375, 0.6118250732421875, 0.611924560546875, 0.6112235717773438, 0.6115874633789062, 0.6117986450195313, 0.6119608154296875, 0.611535400390625, 0.6116593017578125, 0.6118470458984375, 0.6114503173828125, 0.612738037109375, 0.6118358764648437, 0.6109839477539063, 0.6119075927734375, 0.611090576171875, 0.6118807983398438, 0.611280029296875, 0.6113063354492188, 0.6113076782226563, 0.6112477416992188, 0.6117296142578125, 0.6115123291015625, 0.6111682739257812, 0.611694580078125, 0.6109653930664063, 0.6115628662109375, 0.6117786254882812, 0.6112447509765625, 0.611982666015625, 0.611343017578125, 0.6116022338867187, 0.6114140014648437, 0.6116100463867188, 0.6115374145507813, 0.6111929931640625, 0.61177880859375, 0.6119177856445313, 0.611346435546875, 0.6115819702148437, 0.6109470825195312, 0.6124107666015625, 0.6109736938476562, 0.6119307250976562, 0.6117291870117187, 0.6115064086914063, 0.61217578125, 0.6114890747070313, 0.61170166015625, 0.6121551513671875, 0.6119916381835937, 0.6127471923828125, 0.6110349731445313, 0.6121996459960938, 0.6112449951171876, 0.6121649169921874, 0.612045166015625, 0.6112271118164062, 0.6116300048828125, 0.6117802734375, 0.6122683715820313, 0.6113378295898437, 0.6117656860351562, 0.6114736328125, 0.611730224609375, 0.6118562622070313, 0.611344482421875, 0.6121122436523437, 0.6114164428710938, 0.6118440551757812, 0.6117517700195313, 0.6120194091796874, 0.6115458374023437, 0.6125361938476562, 0.612640380859375, 0.6116910400390625, 0.6108280029296875, 0.6117782592773438, 0.6113224487304687, 0.612083740234375, 0.6109819946289062, 0.6113761596679688, 0.6123519897460937, 0.6109807739257812, 0.6120018920898438, 0.61100634765625, 0.6117019653320312, 0.6111486206054687, 0.611885009765625, 0.6125772705078125, 0.6108948364257812, 0.6113424682617188, 0.61180810546875, 0.6117007446289062, 0.6113034057617187, 0.6120345458984375, 0.6122250366210937, 0.611577880859375, 0.6113272705078125, 0.6122667236328125, 0.6115052490234375, 0.6119617309570312, 0.6121383056640625, 0.6112713012695312, 0.6125997924804687, 0.6112788696289062, 0.612013671875, 0.6117134399414063, 0.6122352905273437, 0.6120238647460937, 0.6116519775390625, 0.6117332763671876, 0.6118280029296875, 0.6121653442382813, 0.6120798950195312, 0.6116488647460937, 0.6122659301757812, 0.6116287841796875, 0.6124349365234375, 0.6120017700195313, 0.61205908203125, 0.6118953247070312, 0.6121962280273437, 0.6119605712890624, 0.6128274536132813, 0.6122230834960938, 0.6122086181640625, 0.6117847290039062, 0.6124800415039062, 0.6116911010742188, 0.6117849731445313, 0.6125952758789063, 0.6111460571289062, 0.6127905883789062, 0.6115914306640625, 0.6121683349609375, 0.61209716796875, 0.61191796875, 0.612640625, 0.6110637817382812, 0.6115594482421876, 0.612121826171875, 0.6112201538085937, 0.6117783203125, 0.6113529663085937, 0.611819091796875, 0.6113018798828125, 0.6122822265625, 0.6118338623046875, 0.61203662109375, 0.61149951171875, 0.6117914428710938, 0.611531982421875, 0.6117403564453125, 0.6119852294921875, 0.6121980590820313, 0.6109639892578125, 0.6124329833984375, 0.6110625, 0.6117787475585937, 0.611724365234375, 0.6115828857421876, 0.612011962890625, 0.6118659057617187, 0.6120718383789062, 0.6121231689453125, 0.6124175415039063, 0.6121798095703125, 0.6114788818359375, 0.61227294921875, 0.6122002563476563, 0.6121082763671875, 0.6117191772460937, 0.6123735961914063, 0.611915771484375, 0.6119144897460937, 0.6118585815429688, 0.6122119140625, 0.6117993774414062, 0.6126126098632813, 0.6121094360351562, 0.61227099609375, 0.6115380249023438, 0.6126572875976563, 0.6113551025390624, 0.61220458984375, 0.6124115600585938, 0.6120941162109375, 0.6118339233398438, 0.6122511596679687, 0.6117440795898438, 0.6119541015625, 0.6115374145507813, 0.6120458984375, 0.6121747436523437, 0.612713623046875, 0.6117813110351562, 0.6124095458984375, 0.6112662963867187, 0.61240771484375, 0.6112577514648437, 0.6117216796875, 0.61115185546875, 0.6116004028320312, 0.6115552978515625, 0.6116796875, 0.6114078979492188, 0.612432373046875, 0.6116531372070313, 0.6120514526367188, 0.612039794921875, 0.611510498046875, 0.6129464111328125, 0.6108401489257812, 0.612366943359375, 0.611108642578125, 0.612112548828125, 0.6117539672851563, 0.6120386352539062, 0.6122014770507812, 0.610892822265625, 0.6124089965820313, 0.6112544555664062, 0.612296875, 0.6117885131835937, 0.6124813232421875, 0.6114488525390624, 0.6121001586914062, 0.6117293701171875, 0.6118292846679687, 0.6114818725585938, 0.6121261596679688, 0.6116583862304688, 0.611817626953125, 0.6119771728515625, 0.6119376220703125, 0.6120693359375, 0.6117487182617187, 0.6118214111328125, 0.6121790771484374, 0.6123486328125, 0.6122989501953126, 0.6123724975585938, 0.6121533203125, 0.6122147827148438, 0.6122823486328125, 0.6118863525390625, 0.6121823120117188, 0.6119112548828125, 0.6119692993164062, 0.6123930053710938, 0.6113568115234375, 0.6123175659179687, 0.6118001098632813, 0.6119844970703125, 0.6119932250976563, 0.6120860595703125, 0.6121143188476562, 0.6115952758789063, 0.6121318359375, 0.6116773681640625, 0.6123018188476562, 0.6116881103515625, 0.6114631958007812, 0.6121328735351562, 0.6111272583007813, 0.6119588623046875, 0.61170263671875, 0.6119564208984375, 0.6112684326171876, 0.6118877563476562, 0.6115038452148438, 0.6114746704101562, 0.6119205932617188, 0.6120281372070312, 0.6116987915039063, 0.6114064331054687, 0.6124113159179687, 0.6112781372070313, 0.6120066528320313, 0.611626953125, 0.6123621215820313, 0.61093798828125, 0.6119782104492187, 0.6115983276367187, 0.6121513061523437, 0.61161474609375, 0.6117186889648437, 0.6113179321289063, 0.611909912109375, 0.6120181884765625, 0.6120342407226562, 0.6114204711914063, 0.612114013671875, 0.6120016479492187, 0.612050537109375, 0.6117672729492187, 0.6117969360351563, 0.61220361328125, 0.6111314086914063, 0.6126417846679687, 0.611280029296875, 0.6126109008789062, 0.6116351318359375, 0.6120182495117188, 0.6121041259765625, 0.6115387573242187, 0.6120286254882813, 0.6117396240234375, 0.6120509643554688, 0.61136279296875, 0.6125336303710938, 0.61175439453125, 0.6116763305664062, 0.6122394409179688, 0.61179443359375, 0.6122379760742187, 0.6120364379882812, 0.6118072509765625, 0.6126284790039063, 0.6119752807617187, 0.6128693237304688, 0.6118898315429687, 0.6119874267578125, 0.6124451904296875, 0.6117816162109375, 0.6116331176757812, 0.6122645874023438, 0.6111027221679688, 0.6122823486328125, 0.61180419921875, 0.6120498657226563, 0.6111559448242188, 0.611862548828125, 0.6112052001953125, 0.6119134521484375, 0.61221826171875, 0.6110249633789062, 0.6118604736328125, 0.6112611694335938, 0.6116834106445312, 0.6120723876953125, 0.6109813842773437, 0.6128661499023438, 0.6112119140625, 0.612406494140625, 0.6119605712890624, 0.6113814086914062, 0.6115191040039063, 0.6121307983398437, 0.6115082397460937, 0.6124724731445312, 0.611697021484375, 0.6117130126953125, 0.6124393310546875, 0.6120467529296875, 0.6117588500976563, 0.6121447143554688, 0.6123012084960937, 0.611454345703125, 0.6122400512695313, 0.6113724365234375, 0.6123485107421875, 0.6113218383789063, 0.6121974487304688, 0.6117467041015625, 0.612235107421875, 0.61198291015625, 0.6116659545898437, 0.6121889038085937, 0.6113687744140625, 0.612431884765625, 0.6117457885742188, 0.612025634765625, 0.612070068359375, 0.6119446411132813, 0.61252490234375, 0.6114900512695313, 0.612707275390625, 0.6114993896484375, 0.6124589233398438, 0.6123704223632812, 0.612073486328125, 0.6117560424804688, 0.6119668579101563, 0.6117354736328126, 0.6124805908203125, 0.61143701171875, 0.6127924194335937, 0.6124965209960938, 0.6111691284179688, 0.61187890625, 0.6114877319335937, 0.6118578491210938, 0.6114903564453125, 0.61153076171875, 0.6115978393554687, 0.61136962890625, 0.6120692138671875, 0.6116220092773438, 0.6122158813476563, 0.6112848510742187, 0.6118174438476562, 0.6116864013671875, 0.6121880493164062, 0.611337646484375, 0.6113819580078125, 0.6126708374023437, 0.6112508544921875, 0.6123948974609374, 0.61131787109375, 0.6128312377929688, 0.6115405883789062, 0.6118854370117187, 0.6120525512695313, 0.6114081420898437, 0.6123910522460938, 0.6116823120117187, 0.6123521118164063, 0.6113831787109375, 0.6121242065429687, 0.611533447265625, 0.6117701416015625, 0.6116065063476562, 0.6117473754882813, 0.6116456909179687, 0.6114653930664062, 0.61201611328125, 0.6121922607421875, 0.6118807373046875, 0.612284912109375, 0.6114402465820312, 0.612060791015625, 0.6114349975585938, 0.6126071166992187, 0.6115418701171875, 0.61243798828125, 0.6115164184570312, 0.61290673828125, 0.611835693359375, 0.61220703125, 0.6122926025390625, 0.6111846313476562, 0.6127697143554688, 0.6120305786132813, 0.6122406616210937, 0.6120680541992187, 0.6117254638671875, 0.6124521484375, 0.6115799560546875, 0.6124517822265625, 0.6120985717773437]",tokens/s,1.6349746841778028,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,5182.85312,5584.584704,0.0,5182.062592,5181.108736,s,1,11.2801416015625,11.2801416015625,0.0,11.2801416015625,11.2801416015625,11.2801416015625,11.2801416015625,[11.2801416015625],,kWh,0.0001153400779125036,1.2715570198330854e-05,3.361030466599718e-05,0.00016166595277683162,,MB,5159.092224,5739.773952,0.0,5322.571776,5283.621376,s,10,2.5152559204101563,0.25152559204101566,0.0006134836204615277,0.25144146728515626,0.2524150741577148,0.2524170860290527,0.25241869552612306,"[0.2505451202392578, 0.2510733184814453, 0.2507513580322266, 0.25143788146972657, 0.25144505310058596, 0.2517642822265625, 0.25241909790039063, 0.25133126831054686, 0.2524146270751953, 0.25207391357421877]",tokens/s,1017.789076342795,kWh,7.422230555104174e-06,8.18522820740633e-07,4.954385907950109e-06,1.3195139283794918e-05,tokens/kWh,19401083.572827168,MB,5167.259648,5756.551168,0.0,5339.348992,5283.623936,s,10,28.361325927734374,2.8361325927734375,0.018969364521891238,2.8370953369140626,2.8564384033203125,2.8598361450195315,2.8625543383789065,"[2.810791015625, 2.82039013671875, 2.80667431640625, 2.825640625, 2.83059521484375, 2.85177490234375, 2.843595458984375, 2.86323388671875, 2.855683349609375, 2.852947021484375]",tokens/s,22.21334790923603,kWh,8.2129030527396e-05,9.058927448725363e-06,5.234896549024954e-05,0.0001435369234663709,tokens/kWh,438911.45552356914,,s,630,28.3581591758728,0.04501295107281397,0.0005839222223718378,0.044969408035278324,0.04556121368408203,0.045784611892700194,0.04739607604980469,"[0.0451861457824707, 0.045018016815185545, 0.04541193771362305, 0.04488179016113281, 0.04458345413208008, 0.044635711669921876, 0.04494198226928711, 0.04477315139770508, 0.044635456085205076, 0.04441977691650391, 0.044478462219238284, 0.04455014419555664, 0.044494400024414064, 0.04445228958129883, 0.04458720016479492, 0.04489628982543945, 0.04466284942626953, 0.04442876815795899, 0.04440089416503906, 0.04435126495361328, 0.04469168090820313, 0.044497951507568356, 0.04462704086303711, 0.04464627075195313, 0.04477337646484375, 0.04461481475830078, 0.0445305290222168, 0.044459968566894534, 0.044607551574707034, 0.044677120208740234, 0.04444367980957031, 0.04468454360961914, 0.0444950065612793, 0.04457113647460938, 0.04498179244995117, 0.04451996612548828, 0.04415427017211914, 0.04430089569091797, 0.04466387176513672, 0.044526241302490235, 0.04441116714477539, 0.04445798492431641, 0.04422655868530274, 0.044199935913085936, 0.04416214370727539, 0.044657569885253906, 0.04470988845825195, 0.04449280166625977, 0.044711936950683595, 0.04447983932495117, 0.04467363357543945, 0.044877311706542966, 0.044612064361572265, 0.04449248123168945, 0.044493217468261716, 0.04468751907348633, 0.04476707077026367, 0.04462182235717774, 0.04470988845825195, 0.0445665283203125, 0.04469760131835938, 0.04483900833129883, 0.04472016143798828, 0.0451624641418457, 0.04474687957763672, 0.04461203384399414, 0.04450099182128906, 0.044916736602783204, 0.04592230224609375, 0.04470723342895508, 0.045160385131835935, 0.045058624267578125, 0.04490864181518555, 0.04563363265991211, 0.044687488555908206, 0.044412704467773435, 0.04620083236694336, 0.04498153686523437, 0.04480873489379883, 0.04472441482543945, 0.04470374298095703, 0.04469164657592774, 0.04434310531616211, 0.044240768432617185, 0.044822689056396484, 0.04432268905639648, 0.04426147079467774, 0.04422406387329102, 0.044003776550292965, 0.04409139251708984, 0.04454934310913086, 0.04431967926025391, 0.04435337448120117, 0.044625694274902344, 0.04473436737060547, 0.04497436904907227, 0.04451724624633789, 0.044585025787353516, 0.044799423217773436, 0.04486956787109375, 0.044659423828125, 0.04467113494873047, 0.04461100769042969, 0.04532675170898438, 0.04475494384765625, 0.044701694488525394, 0.04436377716064453, 0.04439244842529297, 0.044668800354003904, 0.04523408126831055, 0.04447049713134766, 0.04454633712768555, 0.04460726547241211, 0.044771263122558594, 0.04467686462402344, 0.044669185638427734, 0.04463846588134766, 0.044295425415039065, 0.044854015350341794, 0.04444291305541992, 0.044730464935302736, 0.04509142303466797, 0.047123584747314456, 0.045125598907470706, 0.044714305877685545, 0.044791839599609376, 0.04482003021240234, 0.04424454498291015, 0.04442940902709961, 0.044208927154541014, 0.04475081634521484, 0.04430031967163086, 0.04445907211303711, 0.044182464599609374, 0.044611583709716796, 0.04501465606689453, 0.04419641494750977, 0.044437503814697264, 0.044257022857666015, 0.044328990936279296, 0.04443334579467773, 0.044240478515625, 0.044773887634277344, 0.044646625518798826, 0.044469951629638675, 0.04438230514526367, 0.04648556900024414, 0.04488819122314453, 0.04463391876220703, 0.04515030288696289, 0.04483881759643555, 0.044910209655761715, 0.046985790252685546, 0.044642112731933595, 0.04428505706787109, 0.044249664306640624, 0.04400774383544922, 0.04484019088745117, 0.044316638946533204, 0.04436867141723633, 0.04447654342651367, 0.04428787231445312, 0.044305599212646485, 0.04405478286743164, 0.04409360122680664, 0.04421263885498047, 0.04446828842163086, 0.04433500671386719, 0.04437404632568359, 0.04449894332885742, 0.04415078353881836, 0.044259071350097656, 0.044601024627685545, 0.04434560012817383, 0.04435385513305664, 0.04432896041870117, 0.04432044982910156, 0.04433337783813476, 0.04415683364868164, 0.044101119995117184, 0.04803644943237305, 0.04430847930908203, 0.04421836853027344, 0.04408115386962891, 0.044455936431884766, 0.04460688018798828, 0.04413631820678711, 0.044297183990478516, 0.04440796661376953, 0.045266750335693356, 0.04471231842041016, 0.04524031829833984, 0.04482867050170898, 0.0446196174621582, 0.044765342712402345, 0.044609535217285154, 0.04475699234008789, 0.045416065216064457, 0.0446710090637207, 0.044620128631591795, 0.044892288208007815, 0.04461094284057617, 0.04450342559814453, 0.04463324737548828, 0.04461423873901367, 0.0446959342956543, 0.04477318572998047, 0.04450860977172852, 0.04455295944213867, 0.04445167922973633, 0.044631423950195315, 0.044638175964355466, 0.04465558242797851, 0.04449161529541015, 0.04514416122436524, 0.04461865615844727, 0.0448001937866211, 0.044986175537109374, 0.044805919647216794, 0.04465891265869141, 0.04507852935791016, 0.04502048110961914, 0.04488057708740235, 0.04502102279663086, 0.044996383666992185, 0.044869598388671876, 0.04462742233276367, 0.04475795364379883, 0.044830432891845705, 0.04501532745361328, 0.04494960021972656, 0.04518697738647461, 0.04466262435913086, 0.04503535842895508, 0.04495187377929687, 0.04619222259521484, 0.04503798294067383, 0.044944511413574216, 0.0447836799621582, 0.04477347183227539, 0.04516672134399414, 0.04544736099243164, 0.044997024536132815, 0.044853534698486325, 0.04460297775268555, 0.04493529510498047, 0.044677120208740234, 0.04459929656982422, 0.04465996932983399, 0.044826496124267576, 0.04476172637939453, 0.044990718841552736, 0.04525260925292969, 0.04502665710449219, 0.044962558746337894, 0.04495779037475586, 0.044937023162841795, 0.04490854263305664, 0.04478275299072266, 0.04447840118408203, 0.04444784164428711, 0.04406937789916992, 0.04405855941772461, 0.04399472045898437, 0.04427193450927734, 0.04473664093017578, 0.044644702911376954, 0.04471814346313477, 0.04521539306640625, 0.04488425445556641, 0.04474879837036133, 0.044660030364990236, 0.04472902297973633, 0.04485529708862305, 0.044834815979003906, 0.04499193572998047, 0.045115039825439456, 0.044770111083984376, 0.0447957763671875, 0.04478694534301758, 0.04602159881591797, 0.04478540802001953, 0.045062080383300784, 0.04855839920043945, 0.044805503845214846, 0.04486563110351562, 0.04491228866577148, 0.04491312026977539, 0.044687774658203124, 0.044638046264648436, 0.04475305557250977, 0.04496723175048828, 0.04495862579345703, 0.04489113616943359, 0.04497910308837891, 0.044942657470703126, 0.044912353515625, 0.04492092895507813, 0.04489043045043945, 0.04490079879760742, 0.045197311401367186, 0.04501606369018555, 0.04488889694213867, 0.04503753662109375, 0.04497158432006836, 0.044927646636962894, 0.04492287826538086, 0.04494131088256836, 0.044989761352539064, 0.04490719985961914, 0.045106464385986325, 0.04505462265014649, 0.04537139129638672, 0.04505401611328125, 0.04490403366088867, 0.045779102325439455, 0.04530934524536133, 0.04498697662353516, 0.04530899047851562, 0.04504467010498047, 0.045125633239746096, 0.044991905212402344, 0.04470140838623047, 0.04476553726196289, 0.04701779174804688, 0.044748863220214846, 0.04506691360473633, 0.04543625640869141, 0.045134624481201174, 0.044834686279296876, 0.04482640075683594, 0.046123233795166016, 0.04509872055053711, 0.04488016128540039, 0.04498636627197266, 0.04509641647338867, 0.045271263122558594, 0.04500640106201172, 0.04520332717895508, 0.04512857437133789, 0.04519036865234375, 0.04541024017333985, 0.04508758544921875, 0.045088768005371094, 0.04489215850830078, 0.04485529708862305, 0.04514543914794922, 0.04493174362182617, 0.044826881408691406, 0.044754688262939456, 0.044935169219970705, 0.04493856048583984, 0.04475769424438476, 0.0452567024230957, 0.044875774383544925, 0.04484067153930664, 0.044959102630615234, 0.04515049743652344, 0.04963350296020508, 0.04539993667602539, 0.04519363021850586, 0.04522406387329102, 0.04520345687866211, 0.045764606475830076, 0.045039615631103515, 0.04553113555908203, 0.04512153625488281, 0.0450334701538086, 0.0452751350402832, 0.04541996765136719, 0.04516719818115234, 0.04555587387084961, 0.04564476776123047, 0.04566412734985351, 0.04546579360961914, 0.04567267227172851, 0.045382175445556644, 0.04529155349731445, 0.04640892791748047, 0.04569363021850586, 0.04766857528686524, 0.04542892837524414, 0.04546166229248047, 0.045335006713867185, 0.0454466552734375, 0.045496864318847655, 0.045270912170410155, 0.045099136352539065, 0.044918785095214846, 0.04573606491088867, 0.04484844970703125, 0.045066814422607425, 0.04503257751464844, 0.045053855895996094, 0.04523311996459961, 0.04535456085205078, 0.04515843200683594, 0.045117855072021484, 0.045156352996826174, 0.04501708984375, 0.045658016204833986, 0.04522608184814453, 0.045330432891845705, 0.04514815902709961, 0.045330047607421875, 0.045300289154052736, 0.04512876892089844, 0.044900096893310544, 0.04477030563354492, 0.04516864013671875, 0.044990463256835936, 0.045702751159667966, 0.04481884765625, 0.04486134338378906, 0.04476457595825195, 0.044816638946533205, 0.04478611373901367, 0.044777473449707034, 0.04501440048217773, 0.045085311889648434, 0.045004127502441406, 0.04507920074462891, 0.04531337738037109, 0.04475766372680664, 0.04482252883911133, 0.04452083206176758, 0.044584766387939456, 0.04421635055541992, 0.04408195114135742, 0.04453171157836914, 0.04499276733398438, 0.045233505249023434, 0.04523984146118164, 0.045079425811767576, 0.044984321594238284, 0.0450437126159668, 0.044849056243896485, 0.0456233901977539, 0.04505132675170898, 0.04494803237915039, 0.04477452850341797, 0.04603209686279297, 0.04529436874389649, 0.04483071899414062, 0.044972030639648435, 0.04478060913085938, 0.045247425079345704, 0.04525056076049805, 0.044865535736083983, 0.04499596786499024, 0.04546214294433594, 0.04491231918334961, 0.04502320098876953, 0.04872022247314453, 0.04524031829833984, 0.045006847381591795, 0.04511331176757812, 0.04501641464233398, 0.045513153076171875, 0.045590782165527345, 0.045069534301757815, 0.04514691162109375, 0.04506009674072266, 0.04589932632446289, 0.04497571182250976, 0.045077342987060544, 0.045294944763183596, 0.04523491287231445, 0.04534451293945312, 0.04533881759643555, 0.04566835021972656, 0.04558028793334961, 0.045592575073242186, 0.04518889617919922, 0.04541667175292969, 0.045295616149902344, 0.04525696182250977, 0.04525235366821289, 0.04508467102050781, 0.04547993469238281, 0.045184223175048825, 0.045638656616210936, 0.045378849029541014, 0.04521011352539062, 0.045625343322753906, 0.04589977645874024, 0.04516390228271484, 0.046706527709960935, 0.045980384826660156, 0.045752384185791015, 0.045748222351074216, 0.045768672943115235, 0.04542806243896484, 0.04542319869995117, 0.045631423950195316, 0.04545756912231445, 0.04555926513671875, 0.04558428955078125, 0.0457529296875, 0.04578911972045899, 0.045463649749755856, 0.04535500717163086, 0.04583833694458008, 0.04530790328979492, 0.045641502380371096, 0.04517315292358398, 0.04521551895141602, 0.044988414764404294, 0.045338623046875, 0.04546559906005859, 0.04549836730957031, 0.0451778564453125, 0.045593185424804686, 0.04554297637939453, 0.0451973762512207, 0.04544182586669922, 0.04498799896240235, 0.045289886474609374, 0.04511667251586914, 0.04546579360961914, 0.045951553344726566, 0.04574720001220703, 0.04574310302734375, 0.0455096321105957, 0.04552975845336914, 0.045418846130371095, 0.045109310150146485, 0.04505184173583984, 0.04602675247192383, 0.04735795211791992, 0.045350910186767575, 0.04511743927001953, 0.04527260971069336, 0.045299999237060545, 0.044992702484130856, 0.04636876678466797, 0.04554547119140625, 0.04537343978881836, 0.04501504135131836, 0.04487369537353516, 0.04489628982543945, 0.04527056121826172, 0.04557875061035156, 0.0452070083618164, 0.04543539047241211, 0.045895263671875, 0.04502345657348633, 0.045037761688232425, 0.04504956817626953, 0.0452322883605957, 0.0452704963684082, 0.04500243377685547, 0.04466310501098633, 0.04469532775878906, 0.04484131240844726, 0.0450437126159668, 0.04470454406738281, 0.04516972732543945, 0.04480684661865234, 0.0446668815612793, 0.045047103881835936, 0.0448903694152832, 0.04741164779663086, 0.04531814575195312, 0.04534211349487305, 0.04536790466308594, 0.04468668746948242, 0.04611385726928711, 0.04510079956054688, 0.04536521530151367, 0.04532368087768555, 0.045275390625, 0.04520499038696289, 0.04505702209472656, 0.04496198272705078, 0.04547155380249023, 0.04515430450439453, 0.045301025390625, 0.044994400024414065, 0.04508777618408203, 0.04506198501586914, 0.045467647552490234, 0.045146110534667966, 0.04503142547607422, 0.04805836868286133, 0.04520550537109375, 0.045553375244140625, 0.044937503814697265, 0.04518473434448242, 0.04524025726318359, 0.04519971084594727, 0.04564377593994141, 0.04507033538818359, 0.04510438537597656, 0.044924671173095704, 0.04531110382080078, 0.045176513671875, 0.045209503173828124, 0.04506639862060547, 0.04532646560668945, 0.04509247970581055, 0.045230464935302736, 0.04581785583496094, 0.045238273620605465, 0.045348926544189455, 0.04529296112060547, 0.04549276733398438, 0.045315807342529296, 0.04553123092651367, 0.045606849670410156, 0.04531600189208984, 0.0452446403503418, 0.04500492858886719, 0.044935169219970705, 0.044918880462646485, 0.04560031890869141, 0.04615622329711914, 0.04523203277587891, 0.04513587188720703, 0.045174785614013675, 0.04501504135131836, 0.04492287826538086, 0.044816383361816405, 0.04504572677612305, 0.04504988861083985, 0.04521561431884766, 0.045428478240966796, 0.044988800048828125, 0.04511967849731445, 0.045010433197021485]",tokens/s,22.215828470841142,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,876.408832,655.294464,0.0,260.046848,258.555392,s,1,7.42957080078125,7.42957080078125,0.0,7.42957080078125,7.42957080078125,7.42957080078125,7.42957080078125,[7.42957080078125],,kWh,1.47450138791631e-05,1.617255663913867e-06,4.5072258280029476e-06,2.0869495371079913e-05,,MB,1354.1376,751.763456,0.0,341.835776,317.950464,s,14,0.18944127941131592,0.013531519957951137,0.00010986742841942603,0.01349833583831787,0.013646256065368654,0.01368941445350647,0.013752180376052857,"[0.013313983917236328, 0.013539551734924316, 0.013508831977844239, 0.01347913646697998, 0.013476960182189942, 0.01360489559173584, 0.01364412784576416, 0.013487839698791504, 0.013767871856689453, 0.013647168159484864, 0.013454751968383789, 0.013474368095397949, 0.013429311752319336, 0.013612480163574218]",tokens/s,18918.791148039072,kWh,3.933527302672926e-07,4.33739737116998e-08,2.0916496218059023e-07,6.458916661595826e-07,tokens/kWh,396351297.6133512,MB,1394.151424,779.026432,0.0,369.098752,317.953024,s,14,9.84946746826172,0.7035333905901229,0.003768367487343936,0.7027530212402344,0.7062368774414063,0.7098903015136719,0.7145298669433594,"[0.703157470703125, 0.7049987182617188, 0.7024894409179687, 0.7156897583007813, 0.7067675170898438, 0.7030166015625, 0.703255859375, 0.7013170166015625, 0.7020364990234375, 0.7000364379882813, 0.7017915649414063, 0.7006768188476562, 0.7031799926757812, 0.7010537719726563]",tokens/s,89.54798854274092,kWh,2.0345932114177316e-05,2.243730290200346e-06,7.366507765019225e-06,2.9956170169396877e-05,tokens/kWh,2103072.5771601,,s,882,9.842919075965877,0.011159772194972654,0.00023924831474364988,0.01113099193572998,0.01124176025390625,0.01131808614730835,0.01177602387428283,"[0.010941344261169434, 0.011363776206970214, 0.011177696228027344, 0.011175007820129394, 0.011122431755065918, 0.011148544311523437, 0.011139840126037597, 0.011110239982604981, 0.011110560417175293, 0.011093024253845216, 0.011267040252685547, 0.011154656410217284, 0.01115011215209961, 0.011115551948547363, 0.011127488136291504, 0.011277728080749512, 0.011207488059997559, 0.01118012809753418, 0.011095711708068848, 0.011179936408996583, 0.01119375991821289, 0.011166720390319825, 0.011177087783813476, 0.01110431957244873, 0.011184608459472656, 0.011194720268249511, 0.011208703994750976, 0.01113702392578125, 0.011073247909545898, 0.011125023841857911, 0.011114496231079102, 0.011100128173828125, 0.011062751770019532, 0.01110108757019043, 0.011156959533691406, 0.011157600402832032, 0.011135071754455566, 0.011164928436279297, 0.011156576156616211, 0.011145888328552246, 0.011122879981994628, 0.011342687606811524, 0.011103743553161622, 0.011163135528564454, 0.011170720100402832, 0.011165760040283202, 0.011130816459655761, 0.01114476776123047, 0.011173664093017578, 0.011193056106567383, 0.011124640464782716, 0.011087008476257324, 0.011164192199707032, 0.01119491195678711, 0.011165568351745606, 0.011206175804138184, 0.01113491153717041, 0.011127327919006348, 0.011151231765747071, 0.011148991584777833, 0.01112720012664795, 0.011130911827087402, 0.011213120460510253, 0.010954303741455078, 0.011210463523864745, 0.01124176025390625, 0.01118825626373291, 0.011138848304748535, 0.01108182430267334, 0.011115167617797852, 0.011206656455993653, 0.011228896141052247, 0.01124176025390625, 0.01117360019683838, 0.011184415817260741, 0.01120684814453125, 0.011245375633239746, 0.011136096000671386, 0.011216128349304199, 0.01130281639099121, 0.011754591941833497, 0.011418368339538574, 0.011681695938110352, 0.011278240203857422, 0.011237471580505372, 0.01115340805053711, 0.011183199882507324, 0.011211199760437012, 0.011125375747680664, 0.011070464134216309, 0.011231583595275879, 0.011270591735839844, 0.011148351669311524, 0.01110319995880127, 0.011022432327270508, 0.011134048461914063, 0.01113379192352295, 0.011118528366088867, 0.011077535629272462, 0.011219103813171387, 0.011156607627868652, 0.011118911743164062, 0.011112992286682128, 0.011052864074707031, 0.011137344360351563, 0.011151007652282715, 0.011179583549499512, 0.011157535552978515, 0.011227231979370117, 0.011225855827331543, 0.011244864463806152, 0.011121408462524414, 0.01120847988128662, 0.011055071830749512, 0.011143296241760255, 0.011314656257629395, 0.011210463523864745, 0.011153632164001465, 0.011084063529968262, 0.011165216445922851, 0.011153344154357911, 0.0111080961227417, 0.011110752105712891, 0.011097760200500488, 0.011101152420043945, 0.011098112106323242, 0.011083776473999024, 0.01118575954437256, 0.011216832160949706, 0.011106783866882325, 0.011353631973266602, 0.011212736129760743, 0.011196672439575195, 0.011125023841857911, 0.011074655532836914, 0.011131808280944825, 0.011109503746032715, 0.011152192115783692, 0.011098176002502442, 0.011136832237243653, 0.01120684814453125, 0.011119903564453125, 0.011059776306152343, 0.011032768249511719, 0.011138175964355469, 0.011160575866699218, 0.011132767677307129, 0.01115932846069336, 0.01107590389251709, 0.011148832321166991, 0.01115135955810547, 0.011134943962097167, 0.01102892780303955, 0.01109398365020752, 0.011085503578186036, 0.011180000305175781, 0.01107808017730713, 0.011048928260803222, 0.011134719848632813, 0.011190303802490234, 0.01112723159790039, 0.011104160308837891, 0.01115071964263916, 0.011108415603637695, 0.011082112312316894, 0.011053055763244628, 0.011026432037353515, 0.011161120414733886, 0.011169631958007813, 0.01121555233001709, 0.011244607925415038, 0.011281279563903809, 0.011198464393615723, 0.011200511932373047, 0.011091967582702637, 0.011086848258972168, 0.01109324836730957, 0.011120351791381836, 0.01112713623046875, 0.011166943550109863, 0.011259424209594726, 0.011164416313171386, 0.011220319747924805, 0.011131744384765625, 0.011110400199890137, 0.011106207847595214, 0.01116374397277832, 0.011292672157287598, 0.011222175598144531, 0.010917311668395996, 0.01114412784576416, 0.011158623695373536, 0.011120896339416503, 0.01117692756652832, 0.011144255638122559, 0.011136992454528809, 0.011213472366333008, 0.011282431602478027, 0.011269696235656738, 0.011231679916381836, 0.011124383926391602, 0.011125439643859864, 0.011226335525512695, 0.011428288459777832, 0.01146675205230713, 0.011367808341979981, 0.011214879989624024, 0.011143775939941406, 0.011190272331237794, 0.011162848472595214, 0.011170592308044434, 0.011122688293457032, 0.01109347152709961, 0.011268544197082519, 0.011224703788757324, 0.011176416397094727, 0.011530240058898926, 0.01113315200805664, 0.011179903984069824, 0.011193535804748534, 0.011199135780334473, 0.011156736373901367, 0.011137855529785157, 0.011186176300048829, 0.011183520317077637, 0.01116425609588623, 0.011165696144104004, 0.01173299217224121, 0.014544320106506348, 0.01618182373046875, 0.011280384063720703, 0.011198464393615723, 0.011116543769836425, 0.011068863868713379, 0.01122873592376709, 0.011391776084899902, 0.011184224128723145, 0.011110527992248535, 0.0110632963180542, 0.011188223838806152, 0.011187647819519043, 0.011209088325500488, 0.011177696228027344, 0.011166175842285157, 0.011193568229675294, 0.011149087905883788, 0.011174847602844238, 0.012023776054382325, 0.011214943885803222, 0.011070528030395509, 0.011652031898498534, 0.011185824394226074, 0.0108602876663208, 0.011147520065307618, 0.01116710376739502, 0.011112832069396972, 0.01118233585357666, 0.011255935668945312, 0.011179648399353027, 0.011198304176330566, 0.011126367568969727, 0.011052255630493163, 0.011148768424987793, 0.011163776397705077, 0.011129920005798339, 0.011135999679565429, 0.011160863876342773, 0.011546591758728027, 0.011295424461364747, 0.011179360389709472, 0.011199359893798827, 0.01105840015411377, 0.011227871894836426, 0.011154975891113281, 0.011258303642272949, 0.011161760330200195, 0.011081472396850586, 0.011168928146362304, 0.011164287567138671, 0.01113219165802002, 0.011135295867919922, 0.011189184188842773, 0.011196096420288086, 0.011202527999877929, 0.011180064201354981, 0.011032032012939454, 0.011132672309875488, 0.011184896469116211, 0.011122688293457032, 0.011323488235473633, 0.011308320045471191, 0.011419391632080078, 0.01197555160522461, 0.011309056282043458, 0.011228704452514648, 0.011330016136169433, 0.011111647605895995, 0.011074336051940917, 0.011108351707458495, 0.011108415603637695, 0.011100095748901367, 0.01107148838043213, 0.011192031860351562, 0.011247903823852538, 0.011620351791381836, 0.011659263610839844, 0.011290623664855956, 0.011243488311767578, 0.011213855743408203, 0.011274304389953613, 0.011286623954772949, 0.011286527633666991, 0.01120751953125, 0.01111843204498291, 0.011159711837768555, 0.010941344261169434, 0.01117407989501953, 0.011140224456787109, 0.01110700798034668, 0.011132927894592285, 0.011065343856811523, 0.01115340805053711, 0.011149312019348144, 0.011103520393371582, 0.011050880432128906, 0.011127072334289551, 0.011153984069824218, 0.011129920005798339, 0.011140031814575195, 0.01112451171875, 0.011139295578002929, 0.011143168449401856, 0.011104479789733886, 0.011104031562805176, 0.011103903770446778, 0.011126463890075683, 0.011117216110229491, 0.011110400199890137, 0.01110166358947754, 0.01118671989440918, 0.011131072044372558, 0.011116448402404786, 0.011068896293640136, 0.011125184059143067, 0.011189696311950683, 0.011315775871276855, 0.011141152381896972, 0.011334624290466309, 0.011891712188720703, 0.011173824310302734, 0.011238752365112304, 0.011229920387268066, 0.011102208137512207, 0.011038368225097656, 0.011263615608215332, 0.011154144287109376, 0.011144191741943359, 0.011089920043945312, 0.011070464134216309, 0.011159903526306152, 0.011126432418823242, 0.011186431884765625, 0.011048992156982422, 0.011149312019348144, 0.011097824096679688, 0.011166848182678223, 0.01110108757019043, 0.011067008018493652, 0.011212639808654785, 0.011179648399353027, 0.011158528327941895, 0.011100031852722169, 0.011157695770263671, 0.011138879776000976, 0.011160767555236817, 0.011203136444091797, 0.011086079597473144, 0.011300864219665528, 0.01085209560394287, 0.011110048294067382, 0.011188159942626954, 0.011169728279113769, 0.011150015830993652, 0.011061471939086914, 0.011114303588867187, 0.011476991653442382, 0.011181952476501465, 0.011524127960205079, 0.011185600280761718, 0.01116431999206543, 0.01186739158630371, 0.011273056030273438, 0.011318207740783692, 0.011138143539428711, 0.011102399826049804, 0.01105299186706543, 0.011117728233337402, 0.011106911659240723, 0.011102463722229004, 0.01154800033569336, 0.011113216400146484, 0.011158592224121093, 0.01113491153717041, 0.01111673641204834, 0.011033087730407716, 0.011097760200500488, 0.011194304466247558, 0.01108620834350586, 0.011058719635009765, 0.011040767669677735, 0.01112112045288086, 0.011242560386657715, 0.011100959777832032, 0.011059616088867187, 0.011164416313171386, 0.011144191741943359, 0.011106304168701172, 0.011068896293640136, 0.011023167610168458, 0.011128543853759765, 0.011149184226989746, 0.01115347194671631, 0.011100223541259766, 0.011143168449401856, 0.011169728279113769, 0.011177984237670899, 0.01112070369720459, 0.011041119575500488, 0.011107872009277344, 0.011077407836914063, 0.01109023952484131, 0.011016415596008302, 0.011112256050109863, 0.011370495796203613, 0.011130847930908204, 0.011155679702758789, 0.011083583831787109, 0.011202079772949219, 0.011153887748718262, 0.011153632164001465, 0.011073311805725098, 0.010983519554138184, 0.011087776184082031, 0.011039744377136231, 0.011114336013793946, 0.011288800239562989, 0.011171903610229492, 0.0110665283203125, 0.011108575820922851, 0.011151616096496581, 0.01116748809814453, 0.011157312393188477, 0.011087583541870118, 0.011172831535339356, 0.011235103607177735, 0.011172063827514648, 0.01112063980102539, 0.011113632202148437, 0.01115017604827881, 0.011139072418212891, 0.011148384094238281, 0.01109235191345215, 0.011116064071655274, 0.011148287773132324, 0.01147871971130371, 0.01111036777496338, 0.011050751686096191, 0.011104543685913086, 0.011149824142456055, 0.011140928268432618, 0.011120384216308593, 0.011069696426391601, 0.011169792175292969, 0.011143168449401856, 0.011140576362609863, 0.011098400115966797, 0.011114496231079102, 0.011147487640380859, 0.011125120162963867, 0.011101152420043945, 0.011135135650634766, 0.011196255683898926, 0.011119296073913574, 0.011065055847167968, 0.011036959648132325, 0.0110731840133667, 0.011099552154541016, 0.011201472282409667, 0.01113491153717041, 0.011075615882873535, 0.011122336387634277, 0.011143551826477052, 0.011057120323181152, 0.011052800178527831, 0.011088255882263184, 0.011153311729431152, 0.011129152297973633, 0.011118176460266114, 0.01103270435333252, 0.011129920005798339, 0.011178144454956055, 0.01110636806488037, 0.011047616004943847, 0.011058367729187012, 0.010780768394470215, 0.011065312385559082, 0.011125375747680664, 0.011120448112487793, 0.011062687873840332, 0.011113247871398925, 0.011122688293457032, 0.011100159645080567, 0.011126784324645997, 0.01109769630432129, 0.011084480285644531, 0.011134431838989258, 0.011124992370605468, 0.011141119956970215, 0.011108351707458495, 0.011134880065917969, 0.01115328025817871, 0.011472319602966309, 0.011174176216125488, 0.011065855979919433, 0.011192383766174317, 0.011132512092590332, 0.011147487640380859, 0.011114399909973144, 0.011091391563415527, 0.011176608085632323, 0.011175392150878906, 0.011188384056091308, 0.011311200141906739, 0.011192511558532715, 0.011206368446350097, 0.011184672355651855, 0.011143008232116699, 0.011181695938110352, 0.011068191528320313, 0.01114076805114746, 0.011122336387634277, 0.011076000213623047, 0.011078816413879394, 0.011148032188415527, 0.01115555191040039, 0.011147199630737304, 0.011223103523254395, 0.011112575531005859, 0.011203776359558106, 0.01115993595123291, 0.011137344360351563, 0.011116543769836425, 0.011128255844116211, 0.011247615814208984, 0.01117360019683838, 0.011164511680603027, 0.01103872013092041, 0.011104415893554688, 0.01111571216583252, 0.011096927642822265, 0.011063103675842284, 0.01112063980102539, 0.011143168449401856, 0.01112063980102539, 0.011144479751586915, 0.011129568099975586, 0.011147232055664062, 0.010875935554504395, 0.011146016120910644, 0.011135071754455566, 0.01112822437286377, 0.011112704277038574, 0.011060959815979004, 0.01110694408416748, 0.011091456413269044, 0.011142815589904785, 0.011023263931274414, 0.01111836814880371, 0.011116703987121583, 0.011122783660888673, 0.011101280212402344, 0.011078463554382325, 0.011095616340637207, 0.011108799934387207, 0.011075391769409179, 0.011032768249511719, 0.011097503662109374, 0.011090432167053223, 0.011096256256103516, 0.011050911903381347, 0.011053055763244628, 0.011085824012756347, 0.011154784202575683, 0.011037343978881837, 0.011053055763244628, 0.011124735832214355, 0.011106176376342774, 0.011104607582092285, 0.011034399986267089, 0.011155136108398437, 0.011104576110839844, 0.011065343856811523, 0.011054752349853515, 0.011069791793823242, 0.011179327964782715, 0.011141695976257325, 0.011108799934387207, 0.01104851245880127, 0.01109004783630371, 0.011122688293457032, 0.011383935928344727, 0.011096832275390624, 0.011093407630920411, 0.01112985610961914, 0.011126496315002442, 0.011110400199890137, 0.011102208137512207, 0.011108351707458495, 0.0111627836227417, 0.011139583587646485, 0.011086112022399902, 0.011061311721801758, 0.011130880355834961, 0.011193504333496094, 0.011162464141845702, 0.011124608039855956, 0.011068799972534179, 0.011145024299621583, 0.011116607666015625, 0.011118911743164062, 0.010893952369689942, 0.011158687591552734, 0.011143520355224609, 0.011192352294921874, 0.01118620777130127, 0.01112492847442627, 0.011116543769836425, 0.011062784194946289, 0.011164159774780273, 0.011168095588684082, 0.011359295845031737, 0.011165599822998046, 0.011120927810668946, 0.011157919883728027, 0.011146431922912597, 0.011131967544555664, 0.011105152130126953, 0.011119071960449218, 0.011147680282592774, 0.011108672142028809, 0.01107545566558838, 0.011011296272277833, 0.011091903686523437, 0.011102527618408202, 0.011107904434204102, 0.01114566421508789, 0.011201184272766113, 0.01152787208557129, 0.01115555191040039, 0.011142399787902832, 0.011077983856201172, 0.011073856353759766, 0.01113424015045166, 0.011128800392150879, 0.01109705638885498, 0.011064288139343261, 0.01110099220275879, 0.011134176254272462, 0.01113145637512207, 0.011017951965332031, 0.01108784008026123, 0.011156000137329102, 0.011167296409606934, 0.011186047554016114, 0.011082176208496095, 0.011183584213256836, 0.011114432334899902, 0.01113161563873291, 0.011051008224487305, 0.011079775810241698, 0.011113887786865234, 0.011139552116394042, 0.011268128395080566, 0.011103967666625976, 0.011116191864013672, 0.011129471778869629, 0.011186176300048829, 0.011184127807617187, 0.011046560287475585, 0.011173919677734375, 0.011143487930297851, 0.011087424278259277, 0.011038240432739257, 0.01078700828552246, 0.0110632963180542, 0.011118399620056152, 0.011126976013183593, 0.01125376033782959, 0.011085824012756347, 0.0110448637008667, 0.011282431602478027, 0.01118723201751709, 0.011090911865234376, 0.011081119537353516, 0.011073823928833007, 0.011167167663574219, 0.011213695526123047, 0.011106304168701172, 0.011085824012756347, 0.011071552276611328, 0.011092000007629395, 0.011117695808410644, 0.011154208183288574, 0.011398271560668945, 0.011228032112121582, 0.011223039627075194, 0.011195391654968261, 0.011092415809631348, 0.011085856437683105, 0.011086655616760253, 0.01111366367340088, 0.01108022403717041, 0.011011199951171875, 0.011122943878173829, 0.011145855903625488, 0.01109830379486084, 0.011044447898864745, 0.011092191696166992, 0.011116800308227539, 0.011321056365966796, 0.011131199836730957, 0.011031295776367188, 0.01110319995880127, 0.011089920043945312, 0.011059391975402831, 0.011020352363586425, 0.01109552001953125, 0.011090208053588867, 0.01111638355255127, 0.011085887908935547, 0.011051103591918946, 0.011122688293457032, 0.011141119956970215, 0.011161248207092285, 0.011260255813598633, 0.011106304168701172, 0.011166815757751464, 0.011076319694519042, 0.011063488006591796, 0.01105072021484375, 0.011149600028991699, 0.011097887992858886, 0.011094240188598633, 0.01103001594543457, 0.011089728355407715, 0.011077407836914063, 0.01087980842590332, 0.011154879570007324, 0.01110358428955078, 0.011074527740478516, 0.011075231552124023, 0.01111689567565918, 0.011067647933959961, 0.011042336463928222, 0.011151679992675781, 0.011315103530883788, 0.011077055931091309, 0.011061408042907716, 0.011084351539611816, 0.011010944366455078, 0.011112992286682128, 0.01108627223968506, 0.011069439888000488, 0.011048959732055665, 0.01162668800354004, 0.011478752136230468, 0.012269887924194336, 0.011263775825500488, 0.01119001579284668, 0.011268351554870605, 0.011083583831787109, 0.011171392440795899, 0.011149888038635253, 0.011400287628173827, 0.011195008277893066, 0.011087200164794923, 0.011322591781616211, 0.011118751525878906, 0.011222368240356445, 0.011125023841857911, 0.01122713565826416, 0.011135135650634766, 0.011127840042114258, 0.01108886432647705, 0.011122207641601562, 0.011157279968261718, 0.011148127555847168, 0.011218111991882324, 0.011096896171569825, 0.01106707191467285, 0.011073663711547851, 0.011097984313964844, 0.011128576278686523, 0.011086367607116699, 0.011052767753601074, 0.011158559799194336, 0.011148480415344239, 0.01110428810119629, 0.011009792327880859, 0.011145119667053222, 0.011122400283813477, 0.01107148838043213, 0.0110546236038208, 0.01108905601501465, 0.01108140754699707, 0.01107529640197754, 0.011081055641174316, 0.011011008262634278, 0.0112391357421875, 0.01091443157196045, 0.011130623817443848, 0.011122048377990723, 0.011098079681396485, 0.011203519821166992, 0.011013759613037109, 0.011123231887817383, 0.011066240310668945, 0.01099068832397461, 0.010989439964294434, 0.011079296112060547, 0.011090304374694824, 0.011075391769409179, 0.01202400016784668, 0.011206815719604492, 0.011120256423950195, 0.01212054443359375, 0.011155200004577637, 0.011122528076171876, 0.011075584411621094, 0.011036704063415528, 0.01120032024383545, 0.011189760208129883, 0.011067520141601563, 0.010996352195739746, 0.011004192352294922, 0.011168895721435547, 0.011096735954284669, 0.011144831657409668, 0.01099232006072998, 0.011218624114990234, 0.011103936195373535, 0.011016511917114257, 0.011026559829711914, 0.011050111770629882, 0.011076352119445802, 0.011061504364013671, 0.0110829439163208, 0.011075551986694337, 0.011119199752807616, 0.011152704238891602, 0.011086175918579102, 0.010969440460205078, 0.01120240020751953, 0.011106399536132813, 0.01124358367919922, 0.011048959732055665, 0.011015520095825196, 0.011176959991455078, 0.01108137607574463, 0.011034848213195801, 0.011040543556213379, 0.011073823928833007, 0.011076512336730958, 0.01101318359375, 0.010989312171936036, 0.011177248001098632, 0.011097087860107421, 0.011202272415161132, 0.011142592430114746, 0.011035039901733398, 0.011093888282775879, 0.011084416389465331]",tokens/s,89.60756389368666,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4192.362496,4878.958592,0.0,4483.710976,4465.672704,s,1,10.4747197265625,10.4747197265625,0.0,10.4747197265625,10.4747197265625,10.4747197265625,10.4747197265625,[10.4747197265625],,kWh,0.00010290706125000497,1.1344051261301709e-05,3.134030284999284e-05,0.0001455914153612995,,MB,2009.448448,5302.583296,0.0,4892.655616,4841.339904,s,10,1.9589540557861327,0.19589540557861326,0.000730593187748986,0.1959886245727539,0.19668135223388672,0.19678909378051757,0.19687528701782225,"[0.19454258728027343, 0.1952676544189453, 0.1957604522705078, 0.19513203430175782, 0.19553926086425782, 0.1963337860107422, 0.19689683532714844, 0.19665740966796874, 0.19660723876953126, 0.196216796875]",tokens/s,1306.8198268553401,kWh,5.72639562581696e-06,6.315141935190448e-07,3.7844801517646213e-06,1.0142389971100627e-05,tokens/kWh,25240599.181202605,MB,2013.175808,5470.355456,0.0,5060.427776,5012.931584,s,10,18.542546630859377,1.8542546630859378,0.0027287882253230152,1.8539893188476562,1.8574702392578124,1.8578740966796876,1.8581971826171877,"[1.8564677734375, 1.8568209228515624, 1.8552591552734374, 1.851202392578125, 1.8520284423828124, 1.8582779541015626, 1.852719482421875, 1.8573804931640625, 1.850620361328125, 1.8517696533203125]",tokens/s,33.97591563564006,kWh,5.4722095011683493e-05,6.035683385533565e-06,3.645494092863534e-05,9.72127193258524e-05,tokens/kWh,648063.3443533968,,s,630,18.539297084808336,0.029427455690171984,0.00034974845589844236,0.02939247989654541,0.029673522758483888,0.0298257905960083,0.030526356506347663,"[0.029909503936767577, 0.029638656616210936, 0.02937411117553711, 0.02945574378967285, 0.029524160385131837, 0.029345855712890626, 0.029495616912841797, 0.029454751968383788, 0.029765151977539064, 0.029397472381591797, 0.02957107162475586, 0.029327360153198243, 0.029491199493408202, 0.029284191131591798, 0.02931884765625, 0.029153120040893554, 0.029159135818481445, 0.029221151351928713, 0.029237119674682618, 0.029270368576049803, 0.029288063049316405, 0.029253631591796874, 0.029272096633911133, 0.02945305633544922, 0.029200096130371094, 0.029105855941772462, 0.029194431304931642, 0.02931065559387207, 0.02925049591064453, 0.029285503387451173, 0.02926825523376465, 0.02973734474182129, 0.029673471450805664, 0.029378559112548826, 0.029515264511108398, 0.02967398452758789, 0.0293621768951416, 0.02944236755371094, 0.029636287689208986, 0.02928816032409668, 0.029359615325927735, 0.029237951278686523, 0.029365983963012696, 0.029413087844848633, 0.02949964714050293, 0.02945712089538574, 0.029346656799316407, 0.029310911178588868, 0.029416351318359374, 0.029439552307128906, 0.029510208129882812, 0.029767328262329102, 0.029622495651245116, 0.029453407287597655, 0.029606559753417968, 0.02973311996459961, 0.029749248504638674, 0.029574304580688476, 0.02967638397216797, 0.02955628776550293, 0.029507423400878908, 0.029516384124755858, 0.031001888275146484, 0.029954143524169922, 0.029574752807617188, 0.029323904037475586, 0.029220800399780273, 0.02999692726135254, 0.03162860870361328, 0.029438655853271486, 0.02944819259643555, 0.029464544296264647, 0.02954185676574707, 0.030299808502197264, 0.029459360122680665, 0.029310592651367186, 0.02936832046508789, 0.03026959991455078, 0.02949955177307129, 0.029281536102294923, 0.029370655059814454, 0.02941168022155762, 0.029419967651367188, 0.02921436882019043, 0.029162687301635744, 0.02950422477722168, 0.029546207427978515, 0.02944630432128906, 0.02931724739074707, 0.029560192108154297, 0.029442943572998048, 0.029304191589355467, 0.029278495788574218, 0.029324640274047853, 0.029187007904052733, 0.029247488021850586, 0.029232767105102538, 0.0294465274810791, 0.029461503982543946, 0.029527040481567384, 0.029529951095581056, 0.02947603225708008, 0.02949734306335449, 0.029506528854370117, 0.029345344543457032, 0.02918649673461914, 0.029361631393432615, 0.029876319885253907, 0.029235328674316406, 0.02920444869995117, 0.029301088333129884, 0.029220256805419922, 0.029301599502563478, 0.029248447418212892, 0.029299520492553712, 0.029245664596557617, 0.029251359939575197, 0.0292044792175293, 0.029278207778930664, 0.029365503311157226, 0.02936284828186035, 0.0295402889251709, 0.029642208099365235, 0.029563583374023438, 0.02944755172729492, 0.02956755256652832, 0.030022655487060547, 0.029667455673217772, 0.029375328063964843, 0.02900752067565918, 0.029028736114501952, 0.028999679565429686, 0.02918396759033203, 0.029216800689697266, 0.02949862480163574, 0.02939571189880371, 0.029153408050537108, 0.029073280334472658, 0.02902016067504883, 0.02914454460144043, 0.029032991409301757, 0.029065439224243164, 0.029169439315795898, 0.02944144058227539, 0.029257535934448242, 0.029057472229003907, 0.029147359848022462, 0.029644031524658204, 0.029641408920288086, 0.029490976333618163, 0.029284767150878906, 0.029294496536254884, 0.02913699150085449, 0.02909388732910156, 0.02928428840637207, 0.02946361541748047, 0.02939187240600586, 0.0336176643371582, 0.02991119956970215, 0.029376895904541015, 0.029505247116088866, 0.02959526443481445, 0.029585792541503907, 0.029623359680175782, 0.029445152282714843, 0.029308832168579102, 0.029360288619995116, 0.0293703670501709, 0.030125280380249024, 0.02945270347595215, 0.029357631683349608, 0.029270687103271485, 0.029290496826171877, 0.029256704330444337, 0.02927462387084961, 0.029406848907470702, 0.029352096557617186, 0.02949193572998047, 0.029442079544067384, 0.029292512893676757, 0.029324928283691407, 0.029614463806152343, 0.02952342414855957, 0.029528608322143556, 0.029421567916870117, 0.029503488540649415, 0.02954863929748535, 0.0294289608001709, 0.029684415817260744, 0.0297523193359375, 0.029770175933837892, 0.02915385627746582, 0.029159423828125, 0.029099807739257813, 0.029118688583374023, 0.029166784286499024, 0.029563104629516602, 0.028977760314941405, 0.029261503219604492, 0.029386911392211914, 0.02908380889892578, 0.029038591384887694, 0.029088991165161133, 0.029020063400268553, 0.028937088012695313, 0.029073055267333985, 0.02918147277832031, 0.029166303634643554, 0.029329504013061523, 0.029071359634399413, 0.029085920333862304, 0.029038368225097658, 0.02931657600402832, 0.029262367248535155, 0.029195552825927736, 0.029118335723876954, 0.029426048278808594, 0.029277791976928712, 0.02915622329711914, 0.029306400299072267, 0.029336000442504884, 0.029235231399536134, 0.029220863342285155, 0.029423551559448244, 0.029370304107666015, 0.029495168685913085, 0.029702655792236327, 0.029483999252319336, 0.02942051124572754, 0.02956883239746094, 0.029426816940307618, 0.029334400177001955, 0.029464128494262696, 0.029690303802490235, 0.029503231048583985, 0.029387008666992186, 0.02944112014770508, 0.029430335998535156, 0.0292926082611084, 0.0293798713684082, 0.029733312606811522, 0.02996895980834961, 0.031061183929443358, 0.029795135498046875, 0.0295546875, 0.029417184829711913, 0.029473056793212892, 0.029652992248535157, 0.029476255416870118, 0.02950819206237793, 0.02946406364440918, 0.029464448928833008, 0.029804447174072265, 0.029510431289672852, 0.029244863510131835, 0.028961343765258787, 0.02896895980834961, 0.029816831588745117, 0.02998454475402832, 0.02921084785461426, 0.029366207122802735, 0.02922310447692871, 0.029061151504516602, 0.029484895706176757, 0.029042144775390626, 0.029190656661987304, 0.029189727783203126, 0.029192352294921876, 0.029225248336791992, 0.029355648040771485, 0.029477247238159178, 0.029057024002075195, 0.029138944625854493, 0.029259584426879884, 0.029411231994628906, 0.02944643211364746, 0.029259136199951172, 0.029411008834838867, 0.029475008010864258, 0.029392831802368163, 0.029491327285766603, 0.02947747230529785, 0.0295251522064209, 0.029635520935058595, 0.029597183227539063, 0.02963817596435547, 0.029453279495239258, 0.029292640686035157, 0.02945167922973633, 0.029831680297851562, 0.029818592071533204, 0.02948534393310547, 0.02932326316833496, 0.029322719573974608, 0.029368032455444337, 0.029411264419555664, 0.029404256820678713, 0.02927788734436035, 0.02917180824279785, 0.02922700881958008, 0.029245311737060548, 0.02931622314453125, 0.02930521583557129, 0.02944063949584961, 0.029624319076538085, 0.029490367889404297, 0.029416095733642577, 0.029392127990722657, 0.02951046371459961, 0.029485151290893553, 0.029445600509643555, 0.029475360870361327, 0.029458080291748047, 0.029382848739624025, 0.02936627197265625, 0.029882623672485353, 0.029355968475341797, 0.029192256927490234, 0.02931711959838867, 0.029816640853881835, 0.029061311721801757, 0.028929279327392577, 0.029051647186279297, 0.029187616348266603, 0.029433887481689455, 0.02958790397644043, 0.0293951358795166, 0.029167104721069335, 0.029065536499023437, 0.029046432495117187, 0.029187744140625, 0.029477567672729493, 0.02921452713012695, 0.029044927597045897, 0.029197919845581056, 0.029217536926269532, 0.029451488494873047, 0.029360832214355467, 0.029515167236328126, 0.02936604881286621, 0.029381023406982423, 0.029409311294555665, 0.029412895202636718, 0.029237247467041014, 0.029327680587768554, 0.029423904418945313, 0.029560831069946288, 0.0295546875, 0.029480960845947264, 0.029542400360107423, 0.029582687377929687, 0.02946099281311035, 0.029913248062133788, 0.029570079803466796, 0.029487552642822264, 0.03219878387451172, 0.029562591552734375, 0.02949600028991699, 0.02947088050842285, 0.02940707206726074, 0.029658336639404297, 0.02957391929626465, 0.029539871215820312, 0.029364704132080078, 0.029391040802001955, 0.02971628761291504, 0.02975062370300293, 0.02956559944152832, 0.030261247634887696, 0.02964406394958496, 0.029465311050415038, 0.029503488540649415, 0.029550048828125, 0.029575647354125975, 0.029562463760375978, 0.029472896575927734, 0.02958176040649414, 0.029796255111694335, 0.030254016876220702, 0.030231359481811524, 0.029406816482543945, 0.029314464569091796, 0.02925811195373535, 0.02926019287109375, 0.029103967666625978, 0.029227392196655273, 0.02911836814880371, 0.029110015869140624, 0.029147232055664062, 0.02934796714782715, 0.02911039924621582, 0.029204320907592774, 0.029208127975463866, 0.029505823135375978, 0.029337440490722656, 0.029181791305541993, 0.029131391525268554, 0.029216768264770508, 0.029294591903686523, 0.029347679138183595, 0.02925827217102051, 0.02935308837890625, 0.029478687286376953, 0.02923593521118164, 0.02925574493408203, 0.029232704162597656, 0.029306976318359376, 0.02943824005126953, 0.02947804832458496, 0.029408096313476562, 0.029459775924682616, 0.029377216339111327, 0.029669376373291017, 0.029638240814208985, 0.029526432037353514, 0.029834367752075194, 0.02954489517211914, 0.029353952407836913, 0.02929916763305664, 0.029177215576171874, 0.029147071838378905, 0.029189920425415038, 0.029250463485717772, 0.02930678367614746, 0.029253503799438477, 0.02937059211730957, 0.02940928077697754, 0.02977689552307129, 0.02944691276550293, 0.02974131202697754, 0.029578527450561522, 0.029375200271606446, 0.029525632858276366, 0.02943833541870117, 0.02935398483276367, 0.02931692886352539, 0.029595327377319337, 0.02960742378234863, 0.029578367233276368, 0.029712064743041992, 0.029792448043823243, 0.03041539192199707, 0.029513696670532226, 0.029206527709960937, 0.029146495819091796, 0.029067903518676757, 0.028960031509399416, 0.029075775146484375, 0.029071775436401368, 0.02916147232055664, 0.029495296478271486, 0.029437952041625977, 0.029400672912597656, 0.029184415817260743, 0.0295280647277832, 0.02902016067504883, 0.029052831649780272, 0.02913699150085449, 0.029251615524291993, 0.029282272338867186, 0.02923091125488281, 0.029300352096557618, 0.02954911994934082, 0.029376096725463867, 0.029476320266723633, 0.032368896484375, 0.029327392578125, 0.029391519546508788, 0.029566656112670897, 0.029581151962280273, 0.029303264617919923, 0.02929654312133789, 0.029492992401123047, 0.029550592422485353, 0.029417119979858398, 0.029340351104736328, 0.02944553565979004, 0.029550687789916992, 0.029647104263305663, 0.02958892822265625, 0.02938479995727539, 0.02926665687561035, 0.029251583099365236, 0.029176959991455077, 0.029232320785522462, 0.02932703971862793, 0.029315071105957033, 0.029339487075805665, 0.029685184478759764, 0.029428287506103514, 0.029359359741210938, 0.029597951889038087, 0.029844127655029296, 0.02974220848083496, 0.02961292839050293, 0.030263296127319338, 0.029531360626220703, 0.02958415985107422, 0.029485055923461914, 0.029638656616210936, 0.02959974479675293, 0.029732704162597656, 0.029634592056274413, 0.029853279113769532, 0.029878559112548827, 0.029492895126342775, 0.029895328521728517, 0.0290119686126709, 0.029081087112426757, 0.02926643180847168, 0.029258752822875978, 0.029293567657470702, 0.029197696685791016, 0.02942425537109375, 0.02954444885253906, 0.02915043258666992, 0.029033248901367187, 0.02915123176574707, 0.029173696517944336, 0.02922502326965332, 0.02914508819580078, 0.029165567398071288, 0.029062976837158205, 0.029172992706298827, 0.028998336791992187, 0.029112287521362305, 0.029335840225219727, 0.02954444885253906, 0.029403135299682616, 0.02924736022949219, 0.029265375137329103, 0.02913961601257324, 0.02915328025817871, 0.02918809509277344, 0.02920243263244629, 0.029421567916870117, 0.02934988784790039, 0.02921388816833496, 0.02927712059020996, 0.029277503967285155, 0.029311424255371095, 0.02932748794555664, 0.029460639953613282, 0.02948899269104004, 0.02934988784790039, 0.029482080459594728, 0.029376480102539064, 0.029567935943603515, 0.030571680068969726, 0.029667167663574218, 0.029440128326416015, 0.029438848495483397, 0.02949734306335449, 0.02936841583251953, 0.029333568572998046, 0.029539520263671876, 0.02949734306335449, 0.029475488662719727, 0.02935807991027832, 0.02929254341125488, 0.029460479736328125, 0.02961756706237793, 0.029600351333618165, 0.02950886344909668, 0.0294849910736084, 0.029383487701416015, 0.029529407501220704, 0.029946079254150392, 0.029463008880615236, 0.02920243263244629, 0.029011648178100587, 0.028928319931030275, 0.02901798439025879, 0.028972320556640625, 0.02903536033630371, 0.029104127883911132, 0.02922652816772461, 0.029562623977661132, 0.029198463439941407, 0.029184415817260743, 0.029349855422973633, 0.029239519119262695, 0.029216768264770508, 0.029222272872924806, 0.029462623596191406, 0.029169792175292968, 0.02924790382385254, 0.02952191925048828, 0.029404352188110352, 0.029258207321166994, 0.029364160537719727, 0.029266336441040038, 0.02921881675720215, 0.029247104644775392, 0.02968409538269043, 0.029187328338623048, 0.029157760620117188, 0.029445663452148437, 0.029276159286499022, 0.02922172737121582, 0.029253568649291992, 0.029312480926513673, 0.02932566452026367, 0.029384223937988282, 0.029728607177734376, 0.02951628875732422, 0.029397375106811525, 0.029503488540649415, 0.02938265609741211, 0.02964860725402832, 0.029525503158569336, 0.029593503952026368, 0.029334016799926758, 0.0295548152923584, 0.029509632110595704, 0.02950320053100586, 0.029516319274902343, 0.02954572868347168, 0.029511615753173827, 0.02954323196411133, 0.029497152328491212, 0.02940889549255371, 0.0295350399017334, 0.029660703659057618, 0.02965116882324219, 0.029755392074584962, 0.02963862419128418, 0.029620256423950195, 0.02954841613769531, 0.029589632034301757]",tokens/s,33.98187089392082,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7142.019072,7948.075008,0.0,7545.552896,7295.865344,s,1,13.7755009765625,13.7755009765625,0.0,13.7755009765625,13.7755009765625,13.7755009765625,13.7755009765625,[13.7755009765625],,kWh,0.00016670733450002142,1.8381498402713e-05,4.816531630999488e-05,0.0002332541492127293,,MB,3076.268032,8264.74496,0.0,7847.542784,7548.649984,s,10,3.3178253479003903,0.33178253479003905,0.0014311546756754076,0.331835693359375,0.3332368133544922,0.33350926055908203,0.3337272183227539,"[0.32855322265625, 0.3309906311035156, 0.3315560302734375, 0.3321153564453125, 0.3337817077636719, 0.33078115844726563, 0.33277313232421873, 0.3312745666503906, 0.33317626953125, 0.3328232727050781]",tokens/s,771.5897407378714,kWh,9.664844486425048e-06,1.0658492624320035e-06,6.428437042387126e-06,1.7159130791244174e-05,tokens/kWh,14919170.622012489,MB,3091.816448,8558.34624,0.0,8141.144064,7829.444096,s,10,26.952484130859375,2.6952484130859373,0.005234273992958838,2.695465576171875,2.701098071289062,2.7014954467773435,2.7018133471679686,"[2.690505859375, 2.6835625, 2.6921923828125, 2.69528955078125, 2.701009765625, 2.6955673828125, 2.701892822265625, 2.69536376953125, 2.696771728515625, 2.700328369140625]",tokens/s,23.374468822288573,kWh,7.883778776190839e-05,8.695642465579662e-06,5.236274887941276e-05,0.00013989617910690084,tokens/kWh,450333.9576691292,,s,630,26.948327713012684,0.042775123353988406,0.00031395352876542853,0.04276924705505371,0.04311631278991699,0.04322626991271973,0.043786819648742675,"[0.04240991973876953, 0.042474369049072265, 0.044068862915039066, 0.042016769409179686, 0.042006526947021484, 0.042291168212890626, 0.042397216796875, 0.04220502471923828, 0.042248863220214844, 0.04230144119262695, 0.042339839935302735, 0.04239206314086914, 0.042565696716308596, 0.042655681610107424, 0.04247110366821289, 0.042348865509033204, 0.04255059051513672, 0.04257452774047851, 0.04265903854370117, 0.042973983764648435, 0.04305100631713867, 0.04279036712646484, 0.0426042251586914, 0.042641441345214845, 0.04277705764770508, 0.042690910339355466, 0.042417377471923826, 0.04256028747558594, 0.04273971176147461, 0.04275404739379883, 0.04256972885131836, 0.042517822265625, 0.04271174240112305, 0.04262406539916992, 0.04266463851928711, 0.04339030456542969, 0.04275641632080078, 0.04284681701660156, 0.04281731033325195, 0.04269814300537109, 0.0426910400390625, 0.043028865814208984, 0.04286163330078125, 0.04278137588500976, 0.042633438110351564, 0.042829822540283204, 0.04277657699584961, 0.04276220703125, 0.04272745513916015, 0.042797054290771484, 0.04269388961791992, 0.042879745483398436, 0.042897407531738284, 0.042616832733154295, 0.042690208435058594, 0.042786399841308595, 0.04301696014404297, 0.042962944030761716, 0.04273104095458984, 0.043222496032714844, 0.04312575912475586, 0.043036319732666015, 0.0429837760925293, 0.04263504028320313, 0.04261759948730469, 0.042266815185546876, 0.04240793609619141, 0.04228505706787109, 0.042256385803222656, 0.04217446517944336, 0.04206387329101562, 0.04211507034301758, 0.04226358413696289, 0.042163135528564454, 0.04213494491577149, 0.042238304138183594, 0.04253472137451172, 0.042205310821533205, 0.04223798370361328, 0.04269689559936524, 0.04339724731445312, 0.04318975830078125, 0.04261324691772461, 0.04243247985839844, 0.04250422286987305, 0.04257791900634766, 0.042477569580078124, 0.042390560150146486, 0.04257072067260742, 0.04242432022094727, 0.04244889450073242, 0.042407230377197264, 0.042609344482421874, 0.04231167984008789, 0.04228710556030273, 0.04247347259521484, 0.042509376525878904, 0.042460094451904296, 0.042620159149169924, 0.04263398361206055, 0.042753856658935545, 0.04255353546142578, 0.04250966262817383, 0.04287350463867187, 0.04309401702880859, 0.04261273574829102, 0.042967041015625, 0.043054622650146486, 0.04272304153442383, 0.04274252700805664, 0.04268409729003906, 0.04272284698486328, 0.04269750213623047, 0.042856063842773434, 0.04280969619750977, 0.04282748794555664, 0.04269686508178711, 0.04269641494750977, 0.042684864044189454, 0.042805118560791014, 0.04282492828369141, 0.042904319763183596, 0.042870944976806644, 0.0429334716796875, 0.04281423950195312, 0.042881023406982424, 0.04220108795166016, 0.04232806396484375, 0.042403839111328126, 0.04242217636108398, 0.04241363143920898, 0.04228681564331055, 0.042361663818359374, 0.04233216094970703, 0.04414054489135742, 0.04256972885131836, 0.042786014556884765, 0.04256028747558594, 0.042676097869873045, 0.04279308700561523, 0.04233603286743164, 0.04277679824829102, 0.042600448608398435, 0.042538944244384765, 0.04282374572753906, 0.042641407012939454, 0.042716960906982425, 0.0427762565612793, 0.04272159957885742, 0.04250646209716797, 0.04266726303100586, 0.04267494583129883, 0.04266393661499023, 0.042647361755371094, 0.04256924819946289, 0.042472095489501954, 0.04242777633666992, 0.04257414245605469, 0.042436927795410154, 0.04259372711181641, 0.04294889450073242, 0.04279894256591797, 0.04274630355834961, 0.04268956756591797, 0.04275408172607422, 0.04258671951293945, 0.0427973747253418, 0.04302441787719727, 0.042810943603515624, 0.04285065460205078, 0.04266608047485351, 0.04283801651000976, 0.04283596801757812, 0.04280319976806641, 0.042850143432617185, 0.04318838500976562, 0.04311654281616211, 0.04312063980102539, 0.04277155303955078, 0.04280179214477539, 0.04264169692993164, 0.04282681655883789, 0.04280620956420898, 0.042889217376708984, 0.04308297729492187, 0.043014942169189455, 0.04293584060668945, 0.043106369018554684, 0.04307980728149414, 0.04275260925292969, 0.0425164794921875, 0.042446849822998046, 0.042349567413330076, 0.042111392974853515, 0.04228566360473633, 0.04253696060180664, 0.04253900909423828, 0.04235059356689453, 0.04231926345825195, 0.04270345687866211, 0.042567680358886716, 0.04254233551025391, 0.042359294891357424, 0.04223001480102539, 0.042403839111328126, 0.04252796936035156, 0.04271593475341797, 0.04279897689819336, 0.04263539123535156, 0.042690464019775394, 0.04277664184570312, 0.04302195358276367, 0.042869152069091795, 0.04253433609008789, 0.04302700805664063, 0.04265760040283203, 0.0428355827331543, 0.042503902435302734, 0.04255625534057617, 0.04272880172729492, 0.04261280059814453, 0.042525279998779295, 0.04272316741943359, 0.042485057830810545, 0.042888031005859376, 0.04294246292114258, 0.043052032470703126, 0.04259328079223633, 0.04288486480712891, 0.04289152145385742, 0.043023616790771484, 0.0429698257446289, 0.042903583526611326, 0.04281753540039063, 0.0429486083984375, 0.043034175872802734, 0.04300640106201172, 0.042979328155517575, 0.04292940902709961, 0.04299238586425781, 0.04322470474243164, 0.04293215942382812, 0.04303427124023437, 0.042902305603027345, 0.04297868728637695, 0.043016830444335935, 0.04368342590332031, 0.04291625595092773, 0.043448318481445314, 0.043202560424804685, 0.04312473678588867, 0.04329619216918945, 0.04259513473510742, 0.04260147094726562, 0.04274998474121094, 0.04249875259399414, 0.042503841400146486, 0.042600318908691405, 0.043628318786621094, 0.04263782501220703, 0.042486240386962894, 0.04291584014892578, 0.043259807586669925, 0.04268009567260742, 0.042810943603515624, 0.04257868957519531, 0.04245094299316406, 0.04306739044189453, 0.04303257751464844, 0.04296681594848633, 0.04260895919799805, 0.04275331115722656, 0.04282400131225586, 0.042895263671875, 0.042991584777832034, 0.043041217803955076, 0.04289945602416992, 0.042992992401123045, 0.04260931015014648, 0.04268851089477539, 0.04269875335693359, 0.042995712280273435, 0.04294041442871094, 0.04267212677001953, 0.042890975952148434, 0.04281292724609375, 0.04268521499633789, 0.042921504974365234, 0.042718814849853515, 0.04272623825073242, 0.042960479736328126, 0.04422256088256836, 0.04280543899536133, 0.04299292755126953, 0.04294512176513672, 0.043003902435302735, 0.042952991485595705, 0.043022335052490236, 0.04286614227294922, 0.04287071990966797, 0.04270959854125977, 0.042874881744384766, 0.042668033599853515, 0.04276591873168945, 0.042723712921142576, 0.04299929428100586, 0.042963329315185546, 0.042768096923828124, 0.0427729263305664, 0.04278076934814453, 0.04311628723144531, 0.04315356826782227, 0.04298956680297852, 0.04311040115356445, 0.04313497543334961, 0.04229891204833985, 0.04242217636108398, 0.042492542266845706, 0.042471424102783206, 0.04245475387573242, 0.042468894958496095, 0.04237539291381836, 0.04229788970947266, 0.04229119873046875, 0.04267212677001953, 0.0423889274597168, 0.04264323043823242, 0.0425682258605957, 0.04267647933959961, 0.04276838302612305, 0.042602497100830077, 0.04269388961791992, 0.042748672485351566, 0.04267331314086914, 0.04288188934326172, 0.042708992004394535, 0.04272947311401367, 0.042896446228027345, 0.04263622283935547, 0.04252876663208008, 0.042888607025146484, 0.04251913452148438, 0.04284774398803711, 0.042748416900634766, 0.042848255157470705, 0.04379606246948242, 0.042848033905029295, 0.04248844909667969, 0.04261068725585938, 0.04281375885009766, 0.042770111083984375, 0.042751102447509765, 0.042912609100341795, 0.04295635223388672, 0.042791393280029295, 0.04302438354492188, 0.04308947372436524, 0.04289142227172851, 0.042928417205810546, 0.042998817443847655, 0.0431209602355957, 0.04287472152709961, 0.04279548645019531, 0.04278716659545898, 0.042831871032714845, 0.042700801849365234, 0.04298476791381836, 0.04296531295776367, 0.04288140869140625, 0.042925537109375, 0.04288771057128906, 0.0429279670715332, 0.04291516876220703, 0.04302227020263672, 0.04316783905029297, 0.04318697738647461, 0.04305100631713867, 0.04319027328491211, 0.04298950576782227, 0.042659454345703125, 0.042414047241210937, 0.04270742416381836, 0.042297344207763675, 0.042848255157470705, 0.04221542358398438, 0.042307071685791016, 0.04246768188476562, 0.042468799591064456, 0.04265852737426758, 0.04287228775024414, 0.042564128875732424, 0.04259635162353516, 0.0430571517944336, 0.04283801651000976, 0.04263935852050781, 0.04402928161621094, 0.042807071685791016, 0.04272627258300781, 0.042866687774658206, 0.042620288848876954, 0.04284070587158203, 0.04265369415283203, 0.04286444854736328, 0.04292012786865235, 0.04267212677001953, 0.04263888168334961, 0.04278239822387695, 0.04267702484130859, 0.042592254638671875, 0.04272470474243164, 0.042512351989746094, 0.04296160125732422, 0.042749088287353514, 0.0428809928894043, 0.04302937698364258, 0.043046783447265625, 0.043036800384521484, 0.042897407531738284, 0.04320460891723633, 0.04296908950805664, 0.04293836975097656, 0.042939456939697265, 0.04337452697753906, 0.04327731323242188, 0.04298489761352539, 0.043282463073730466, 0.04315584182739258, 0.04292204666137695, 0.043288673400878906, 0.04312854385375976, 0.04304105758666992, 0.0428851203918457, 0.04292607879638672, 0.04308540725708008, 0.043061504364013674, 0.043159679412841795, 0.04307056045532227, 0.042911903381347656, 0.04342377471923828, 0.04311321640014648, 0.04305068969726562, 0.04221132659912109, 0.04253286361694336, 0.042198177337646484, 0.04217913436889648, 0.042176799774169924, 0.042493343353271484, 0.04224233627319336, 0.042690208435058594, 0.04284892654418945, 0.04247942352294922, 0.04275628662109375, 0.042546432495117185, 0.0424005126953125, 0.04244060897827148, 0.042586208343505856, 0.042379264831542966, 0.04254515075683594, 0.042485633850097654, 0.04263129425048828, 0.0423702392578125, 0.042649856567382814, 0.042609214782714844, 0.04243836975097656, 0.042485279083251955, 0.0425107536315918, 0.042526111602783204, 0.042471359252929684, 0.04260761642456055, 0.04262297439575195, 0.042774528503417966, 0.04268620681762696, 0.043358463287353516, 0.0437391357421875, 0.04275609588623047, 0.04264326477050781, 0.042952831268310544, 0.04291385650634766, 0.043069438934326174, 0.04292607879638672, 0.042964160919189455, 0.04308361434936524, 0.04327683258056641, 0.04314566421508789, 0.04313638305664062, 0.042984062194824216, 0.04286185455322265, 0.04278937530517578, 0.042780895233154294, 0.04287897491455078, 0.04268812942504883, 0.042987903594970706, 0.04297727966308594, 0.04277612686157226, 0.0429815673828125, 0.04303811264038086, 0.04300271987915039, 0.04310015869140625, 0.043028480529785154, 0.04315094375610352, 0.0431558723449707, 0.04359920120239258, 0.04337456130981445, 0.043186878204345705, 0.042423423767089845, 0.0425049934387207, 0.042293407440185546, 0.04213913726806641, 0.042205631256103514, 0.04268851089477539, 0.04246527862548828, 0.04260611343383789, 0.042586593627929686, 0.0424194221496582, 0.04241427230834961, 0.042594913482666016, 0.0424796142578125, 0.042658878326416017, 0.042670047760009766, 0.04241097640991211, 0.04286787033081055, 0.04300067138671875, 0.042521984100341796, 0.04258025741577148, 0.042823936462402346, 0.04258006286621094, 0.04260441589355469, 0.042832000732421875, 0.042579776763916014, 0.04266175842285156, 0.042746177673339845, 0.042584064483642575, 0.042425952911376956, 0.042457504272460936, 0.04257382583618164, 0.04256358337402344, 0.042840065002441405, 0.04279296112060547, 0.04280934524536133, 0.042889217376708984, 0.04266393661499023, 0.042724510192871094, 0.04270361709594726, 0.04281967926025391, 0.04284739303588867, 0.042922847747802736, 0.042971134185791016, 0.04309401702880859, 0.0430489616394043, 0.043022335052490236, 0.043112449645996094, 0.043128128051757815, 0.042766815185546876, 0.04296112060546875, 0.0432803840637207, 0.04268569564819336, 0.04457295989990234, 0.04376419067382813, 0.04284182357788086, 0.04302467346191406, 0.0431956787109375, 0.04309849548339844, 0.042912094116210935, 0.043294078826904295, 0.0429349136352539, 0.04333158493041992, 0.04336844635009766, 0.042807743072509764, 0.042552608489990235, 0.04336844635009766, 0.04235747146606445, 0.04249331283569336, 0.04235123062133789, 0.042774528503417966, 0.04231782531738281, 0.04275606536865234, 0.04236291122436524, 0.04264940643310547, 0.04256377410888672, 0.042529918670654296, 0.042904449462890626, 0.04257539367675781, 0.04253334426879883, 0.04269055938720703, 0.04285785675048828, 0.04270758438110352, 0.04275958251953125, 0.042920543670654294, 0.04258816146850586, 0.042627071380615236, 0.042842113494873046, 0.04290150451660156, 0.042651649475097655, 0.042625022888183595, 0.04283955383300781, 0.04275251388549805, 0.04272947311401367, 0.04263849639892578, 0.042914657592773436, 0.04287062454223633, 0.04303007888793945, 0.042686878204345705, 0.043337921142578124, 0.042927902221679685, 0.04304825592041016, 0.04408607864379883, 0.0429917106628418, 0.04300288009643555, 0.04295888137817383, 0.04298591995239258, 0.04319696044921875, 0.04317532730102539, 0.043192928314208984, 0.04291353607177734, 0.04292428970336914, 0.04283321762084961, 0.04280412673950195, 0.04295657730102539, 0.042772159576416016, 0.042866912841796875, 0.04282780838012695, 0.04280694580078125, 0.0432275505065918, 0.04296015930175781, 0.0429780158996582, 0.04307891082763672, 0.04319513702392578, 0.04309571075439453, 0.0432151985168457, 0.04307353591918945]",tokens/s,23.378074020370036,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4036.907008,4592.631808,0.0,4190.109696,3918.037504,s,1,10.7208134765625,10.7208134765625,0.0,10.7208134765625,10.7208134765625,10.7208134765625,10.7208134765625,[10.7208134765625],,kWh,9.623351788748853e-05,1.0608073579911668e-05,2.8292244855995796e-05,0.00013513383632339602,,MB,1794.53952,4678.61504,0.0,4261.412864,4088.623616,s,10,1.8303253173828125,0.18303253173828124,0.0006722698597633327,0.18282723236083984,0.18386539154052736,0.18410175552368163,0.18429084671020507,"[0.18224566650390625, 0.18280067443847656, 0.18265628051757812, 0.18285379028320312, 0.1822172546386719, 0.18433811950683593, 0.1832626495361328, 0.18250364685058593, 0.18363436889648438, 0.1838128662109375]",tokens/s,1398.6584656221396,kWh,5.368643045075755e-06,5.917491852117389e-07,3.5486493035637547e-06,9.509041533851248e-06,tokens/kWh,26921745.907688517,MB,1807.200256,4804.44416,0.0,4387.241984,4262.434304,s,10,16.522649047851562,1.6522649047851563,0.004253204444550381,1.6535601196289063,1.6554445922851562,1.6571782897949219,1.6585652478027344,"[1.6440238037109376, 1.6506837158203125, 1.654978271484375, 1.6460321044921875, 1.6511090087890625, 1.654428466796875, 1.6589119873046876, 1.655059326171875, 1.6547305908203125, 1.6526917724609376]",tokens/s,38.129479006389644,kWh,4.8078132796590615e-05,5.3016627544367325e-06,3.205196251023539e-05,8.543175806126274e-05,tokens/kWh,737430.6865465998,,s,630,16.51655425071717,0.026216752778916132,0.00023718311238803477,0.02622006416320801,0.026458319664001466,0.026555543899536134,0.026918071708679204,"[0.02627356719970703, 0.026228960037231446, 0.025956287384033203, 0.025813056945800782, 0.025702144622802733, 0.025702112197875975, 0.025559520721435545, 0.025534048080444335, 0.02564963150024414, 0.025972736358642577, 0.025858047485351563, 0.02602969551086426, 0.02585433578491211, 0.02595408058166504, 0.025835071563720703, 0.02577836799621582, 0.026154943466186523, 0.02603267288208008, 0.025845760345458983, 0.02594611167907715, 0.025926687240600585, 0.026161184310913087, 0.02642220878601074, 0.02611721611022949, 0.026112607955932617, 0.026220191955566408, 0.026144927978515625, 0.026255456924438477, 0.026046592712402342, 0.025930015563964844, 0.02596249580383301, 0.025899007797241212, 0.02607513618469238, 0.0259967041015625, 0.025938528060913086, 0.026087039947509764, 0.02631923294067383, 0.026512575149536134, 0.026564640045166017, 0.026249631881713868, 0.026272127151489258, 0.026172639846801758, 0.026197952270507814, 0.026218463897705078, 0.026139520645141603, 0.02608880043029785, 0.026009248733520507, 0.026004447937011718, 0.025982112884521485, 0.026321792602539064, 0.026283552169799804, 0.02638643264770508, 0.02637254333496094, 0.026353696823120117, 0.026263551712036134, 0.026457759857177736, 0.026249568939208986, 0.026211519241333008, 0.026134912490844726, 0.026132928848266603, 0.026261503219604493, 0.026341087341308595, 0.026233119964599608, 0.026368000030517577, 0.02621356773376465, 0.025856224060058594, 0.02592211151123047, 0.025950176239013672, 0.025975936889648436, 0.025864927291870118, 0.025843936920166014, 0.025927104949951173, 0.026061216354370118, 0.026058464050292968, 0.0260614070892334, 0.02594723129272461, 0.02589116859436035, 0.025835935592651366, 0.026048511505126954, 0.026011648178100585, 0.025903104782104492, 0.02576383972167969, 0.025874431610107423, 0.02632908821105957, 0.02582636833190918, 0.025883583068847655, 0.026273088455200197, 0.026368703842163086, 0.026230464935302733, 0.0262740478515625, 0.026103872299194336, 0.026045536041259764, 0.025994144439697265, 0.026001407623291017, 0.026066495895385743, 0.026073535919189452, 0.02596601676940918, 0.025995840072631837, 0.02613043212890625, 0.026400480270385742, 0.026386367797851563, 0.026326656341552734, 0.02616803169250488, 0.02638572883605957, 0.02624924850463867, 0.026383007049560547, 0.02654412841796875, 0.026391807556152343, 0.026254079818725587, 0.026267648696899414, 0.026164928436279298, 0.026296640396118166, 0.02644134330749512, 0.0264400634765625, 0.02693529510498047, 0.02633318328857422, 0.026425344467163086, 0.026779647827148437, 0.026314144134521485, 0.026575456619262694, 0.026513408660888672, 0.02650489616394043, 0.026284351348876953, 0.026266815185546875, 0.02666102409362793, 0.026552671432495116, 0.026549024581909178, 0.02649087905883789, 0.026187776565551758, 0.0261079044342041, 0.02602720069885254, 0.02604524803161621, 0.025935871124267578, 0.02596249580383301, 0.02609766387939453, 0.026232736587524414, 0.02606224060058594, 0.026132768630981445, 0.02616499137878418, 0.026235551834106446, 0.02612019157409668, 0.026073087692260744, 0.02615705680847168, 0.026186912536621094, 0.026137439727783204, 0.026062751770019533, 0.026207584381103516, 0.02677631950378418, 0.026302463531494142, 0.026100032806396483, 0.026178783416748046, 0.026237407684326173, 0.026225664138793944, 0.02614918327331543, 0.02607993507385254, 0.026220544815063477, 0.026611200332641603, 0.02630431938171387, 0.02613104057312012, 0.026271839141845704, 0.027327808380126953, 0.026587839126586913, 0.02631667137145996, 0.026382015228271483, 0.026343360900878906, 0.026511871337890625, 0.026457887649536133, 0.026411231994628907, 0.02631884765625, 0.026315967559814454, 0.026303295135498048, 0.026355424880981446, 0.026274080276489257, 0.026324384689331053, 0.02644233512878418, 0.026411008834838868, 0.026266847610473633, 0.026204927444458008, 0.02626563262939453, 0.02631679916381836, 0.026193920135498046, 0.026262975692749022, 0.026253856658935548, 0.02629430389404297, 0.026292224884033204, 0.02620800018310547, 0.026134016036987305, 0.02611427116394043, 0.026042911529541017, 0.02623561668395996, 0.02633113670349121, 0.026034175872802736, 0.025972736358642577, 0.02564838409423828, 0.025641727447509765, 0.025683967590332032, 0.02570585632324219, 0.025896928787231446, 0.025868959426879883, 0.026201759338378906, 0.025790815353393556, 0.025796287536621092, 0.02593619155883789, 0.026842687606811525, 0.026186176300048828, 0.02604457664489746, 0.026104703903198242, 0.026131168365478515, 0.02596406364440918, 0.025827775955200194, 0.025910943984985352, 0.025821792602539063, 0.025922719955444335, 0.025805696487426758, 0.02596249580383301, 0.02614476776123047, 0.026187135696411134, 0.026183551788330078, 0.02616192054748535, 0.026060127258300782, 0.025964384078979493, 0.02590959930419922, 0.025907680511474608, 0.025772031784057618, 0.025968639373779297, 0.026136159896850586, 0.026120607376098632, 0.026254911422729493, 0.026527776718139648, 0.026371967315673827, 0.026396671295166017, 0.02633113670349121, 0.026219039916992187, 0.026251264572143555, 0.026310655593872072, 0.026249216079711913, 0.02609561538696289, 0.026129600524902343, 0.02609833526611328, 0.026013856887817384, 0.026003456115722655, 0.02614271926879883, 0.02638768005371094, 0.026403615951538086, 0.02652774429321289, 0.026424896240234374, 0.02624732780456543, 0.026153247833251955, 0.026218496322631835, 0.026234880447387695, 0.026464256286621093, 0.026875904083251953, 0.0263372802734375, 0.026267648696899414, 0.026071039199829102, 0.02588412857055664, 0.025807392120361327, 0.025783647537231447, 0.02582499122619629, 0.02580099105834961, 0.025929695129394532, 0.026020544052124023, 0.025839456558227537, 0.02587414360046387, 0.025995712280273437, 0.025991167068481445, 0.025989120483398437, 0.02617344093322754, 0.02619375991821289, 0.026189760208129884, 0.026065120697021483, 0.02614067268371582, 0.0260250244140625, 0.026167520523071287, 0.02622719955444336, 0.026333408355712892, 0.026221567153930665, 0.026371007919311525, 0.026273855209350584, 0.026330720901489257, 0.026108320236206056, 0.026135648727416992, 0.026104736328125, 0.02615705680847168, 0.026172576904296876, 0.026168064117431642, 0.026143840789794922, 0.026403711318969725, 0.0263240966796875, 0.026348384857177734, 0.02630672073364258, 0.026372095108032227, 0.02630451202392578, 0.026320159912109373, 0.026292608261108397, 0.026470752716064454, 0.026261503219604493, 0.026408960342407226, 0.026380287170410157, 0.026227775573730468, 0.02624403190612793, 0.026355712890625, 0.02629199981689453, 0.02638051223754883, 0.026330400466918945, 0.026425504684448244, 0.02632499122619629, 0.026415615081787108, 0.026380352020263672, 0.026298368453979492, 0.026191455841064453, 0.026370464324951173, 0.026301631927490233, 0.026486976623535156, 0.02638912010192871, 0.0263372802734375, 0.026204160690307617, 0.026064895629882814, 0.02609328079223633, 0.025903392791748046, 0.026014720916748047, 0.02594918441772461, 0.026181631088256836, 0.02614681625366211, 0.026117919921875, 0.02605078315734863, 0.02611404800415039, 0.026005504608154296, 0.026011199951171876, 0.026054719924926757, 0.025988960266113283, 0.026057247161865235, 0.026089471817016603, 0.025982271194458006, 0.02594476890563965, 0.02611404800415039, 0.02614886474609375, 0.026168703079223633, 0.026096256256103515, 0.026161151885986327, 0.026107744216918947, 0.02622889518737793, 0.02611609649658203, 0.026261503219604493, 0.026130239486694337, 0.02619615936279297, 0.026142080307006835, 0.026319744110107422, 0.02616703987121582, 0.02634281539916992, 0.0262619514465332, 0.026263200759887695, 0.02635212707519531, 0.026327039718627928, 0.026518880844116213, 0.026268320083618166, 0.026161151885986327, 0.02629631996154785, 0.026390527725219725, 0.026394847869873048, 0.026801952362060545, 0.02642483139038086, 0.02642790412902832, 0.026322208404541015, 0.026555103302001955, 0.026226335525512696, 0.026177343368530274, 0.026442272186279297, 0.026779487609863283, 0.02709225654602051, 0.026469247817993164, 0.026449920654296875, 0.02637004852294922, 0.0263656005859375, 0.026487136840820314, 0.02625654411315918, 0.02619068717956543, 0.02632908821105957, 0.02661417579650879, 0.02645020866394043, 0.026462207794189452, 0.02636185646057129, 0.026109952926635743, 0.026177536010742186, 0.02615817642211914, 0.0261231689453125, 0.026074655532836916, 0.026119712829589845, 0.026118751525878905, 0.02615449523925781, 0.02618172836303711, 0.026338048934936523, 0.026225696563720702, 0.026199264526367186, 0.02607244873046875, 0.026167680740356445, 0.026183679580688478, 0.026193920135498046, 0.026167295455932618, 0.026156415939331056, 0.0262478084564209, 0.02628112030029297, 0.026166112899780273, 0.026247167587280275, 0.026226688385009765, 0.026243072509765625, 0.02606278419494629, 0.026555904388427733, 0.026219072341918944, 0.026267648696899414, 0.026230783462524415, 0.02628995132446289, 0.026392799377441406, 0.02653183937072754, 0.026464256286621093, 0.026300416946411134, 0.0263372802734375, 0.026247167587280275, 0.026294271469116212, 0.026387903213500978, 0.026368576049804686, 0.026298368453979492, 0.026259456634521484, 0.02630428886413574, 0.02626495933532715, 0.02622060775756836, 0.026407712936401366, 0.026569984436035157, 0.02636854362487793, 0.02633545684814453, 0.02631785583496094, 0.026612703323364257, 0.026603519439697267, 0.02653984069824219, 0.02643168067932129, 0.026965024948120118, 0.02640380859375, 0.026382335662841795, 0.02636595153808594, 0.02659324836730957, 0.02648476791381836, 0.02627993583679199, 0.026187456130981446, 0.026011680603027342, 0.02626793670654297, 0.02608332824707031, 0.02609334373474121, 0.02596886444091797, 0.025937664031982423, 0.02588287925720215, 0.025956352233886718, 0.026355712890625, 0.02611974334716797, 0.026870208740234373, 0.026005504608154296, 0.026003103256225586, 0.026198368072509765, 0.025993215560913087, 0.025986623764038087, 0.02591584014892578, 0.026005504608154296, 0.025892192840576173, 0.025927616119384767, 0.026112735748291014, 0.026240575790405275, 0.0261144962310791, 0.026567840576171876, 0.026239231109619142, 0.026241632461547853, 0.026021888732910156, 0.025972736358642577, 0.026004480361938476, 0.026014623641967775, 0.026277824401855467, 0.026384544372558594, 0.02630201530456543, 0.026284479141235353, 0.02629395294189453, 0.026429407119750975, 0.026379743576049806, 0.026483583450317382, 0.026226112365722656, 0.02615340805053711, 0.026339456558227538, 0.026438943862915038, 0.02636854362487793, 0.026302047729492187, 0.026272287368774416, 0.0262259521484375, 0.026370208740234376, 0.02650553512573242, 0.027864704132080077, 0.026661472320556642, 0.026427488327026367, 0.02669753646850586, 0.026302656173706054, 0.02632089614868164, 0.02626710319519043, 0.026292768478393555, 0.026245119094848633, 0.02636185646057129, 0.02651955223083496, 0.026632192611694337, 0.02656870460510254, 0.026552928924560546, 0.026580831527709962, 0.026210464477539063, 0.026052608489990234, 0.026020959854125978, 0.026022815704345705, 0.025960447311401368, 0.025957759857177735, 0.025911104202270507, 0.02607391929626465, 0.02607923126220703, 0.026135711669921874, 0.02619068717956543, 0.026081279754638673, 0.026449920654296875, 0.02618940734863281, 0.02621196746826172, 0.026083871841430663, 0.026194175720214843, 0.02626665687561035, 0.02617238426208496, 0.026222591400146485, 0.02610585594177246, 0.026218496322631835, 0.026265600204467773, 0.026177536010742186, 0.025985023498535157, 0.025985023498535157, 0.026167295455932618, 0.02671615982055664, 0.026998783111572267, 0.026192928314208986, 0.02622972869873047, 0.02624502372741699, 0.026075231552124024, 0.026225984573364256, 0.02641971206665039, 0.02635385513305664, 0.026372095108032227, 0.026327039718627928, 0.026425344467163086, 0.02621993637084961, 0.026228351593017576, 0.026274784088134766, 0.026330848693847657, 0.02646646308898926, 0.02640880012512207, 0.02616927909851074, 0.026273855209350584, 0.02639017677307129, 0.02626736068725586, 0.026330015182495118, 0.026376192092895507, 0.026453344345092774, 0.026438304901123047, 0.02632089614868164, 0.026212352752685547, 0.026201311111450194, 0.026238847732543945, 0.02647542381286621, 0.026427391052246094, 0.02643315124511719, 0.026304672241210938, 0.02636595153808594, 0.026265600204467773, 0.026013120651245118, 0.025993600845336914, 0.025839807510375977, 0.025994495391845705, 0.025884511947631837, 0.025930496215820314, 0.026028192520141602, 0.02610585594177246, 0.025985023498535157, 0.02608902359008789, 0.025976736068725585, 0.025944448471069335, 0.025903295516967774, 0.02601775932312012, 0.02602169609069824, 0.02599955177307129, 0.026101503372192383, 0.026142047882080077, 0.02608195114135742, 0.025962560653686524, 0.026048704147338866, 0.026117919921875, 0.02773219108581543, 0.026191871643066408, 0.026159040451049803, 0.026178911209106447, 0.02620182418823242, 0.02617843246459961, 0.026171520233154298, 0.026201152801513673, 0.026155967712402344, 0.026161151885986327, 0.02651740837097168, 0.026198015213012696, 0.026329183578491212, 0.026404863357543946, 0.026437536239624023, 0.026431583404541017, 0.026236032485961912, 0.026313535690307616, 0.026265119552612303, 0.026304479598999023, 0.026287872314453124, 0.02648966407775879, 0.026336671829223633, 0.026317407608032226, 0.026255359649658205, 0.026214399337768556, 0.026291744232177734, 0.026320991516113282, 0.026445472717285156, 0.026433696746826173, 0.02633375930786133, 0.02621379280090332, 0.02627555274963379, 0.026291072845458983, 0.026400768280029296, 0.02650111961364746, 0.02631817626953125, 0.026257888793945312, 0.026191295623779295]",tokens/s,38.143549219574346,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1262.944256,1247.674368,0.0,845.152256,799.789056,s,1,8.562431640625,8.562431640625,0.0,8.562431640625,8.562431640625,8.562431640625,8.562431640625,[8.562431640625],,kWh,3.0741425604158696e-05,3.383782067100457e-06,8.653895812005263e-06,4.2779103483264416e-05,,MB,1503.531008,1298.006016,0.0,880.80384,837.353472,s,10,0.4125347480773926,0.04125347480773926,0.00014049897391803768,0.041219087600708006,0.04137855949401855,0.04150196857452393,0.041600695838928226,"[0.0416253776550293, 0.04125987243652344, 0.04135113525390625, 0.04111481475830078, 0.041258304595947266, 0.041169471740722656, 0.04119692611694336, 0.04122035217285156, 0.04121782302856445, 0.04112067031860352]",tokens/s,6205.537865430277,kWh,1.229329316421466e-06,1.3557348065849618e-07,8.182686284705437e-07,2.183171425550506e-06,tokens/kWh,117260604.00201847,MB,1540.890624,1298.006016,0.0,880.80384,837.356032,s,10,14.094769653320315,1.4094769653320314,0.010149239129179649,1.4057165527343751,1.4234639770507813,1.4280743347167968,1.4317626208496093,"[1.40944970703125, 1.422439453125, 1.41304833984375, 1.403366455078125, 1.4326846923828125, 1.403851318359375, 1.4070823974609374, 1.3987791748046876, 1.4043507080078126, 1.3997174072265626]",tokens/s,44.697431422839216,kWh,4.106764234399589e-05,4.52875092306177e-06,1.6821689927930227e-05,6.24180831949879e-05,tokens/kWh,1009322.8880994992,,s,630,14.089935638427745,0.022364977203853543,0.0004665815515747548,0.022191455841064453,0.02299331569671631,0.023130932521820066,0.02387912561416627,"[0.022347776412963868, 0.022284000396728516, 0.02203660774230957, 0.022157472610473634, 0.02244607925415039, 0.022253631591796875, 0.022468288421630858, 0.022163711547851562, 0.0221014404296875, 0.022180511474609376, 0.022910879135131835, 0.022410335540771483, 0.022254495620727538, 0.022337535858154296, 0.023505151748657225, 0.022523136138916017, 0.02215353584289551, 0.022077760696411132, 0.02206211280822754, 0.022039392471313476, 0.022400831222534178, 0.02248886489868164, 0.022266271591186524, 0.02255446434020996, 0.022117919921875, 0.022558624267578126, 0.022115232467651368, 0.02207686424255371, 0.02225391960144043, 0.022143007278442383, 0.022109439849853516, 0.022035200119018553, 0.022145023345947267, 0.02210201644897461, 0.02206515121459961, 0.022165151596069337, 0.02203830337524414, 0.02206777572631836, 0.022190080642700196, 0.022200319290161134, 0.022106111526489256, 0.02195631980895996, 0.022338943481445314, 0.022170528411865235, 0.02228236770629883, 0.02211008071899414, 0.022642688751220705, 0.02220262336730957, 0.022175487518310548, 0.022346752166748047, 0.022471712112426757, 0.022558687210083007, 0.02275328063964844, 0.022370304107666016, 0.022437887191772463, 0.02257459259033203, 0.022487552642822265, 0.022914783477783203, 0.022982336044311522, 0.02304377555847168, 0.022988927841186522, 0.023055135726928713, 0.023068672180175782, 0.023042943954467772, 0.023035903930664063, 0.022990943908691407, 0.02303116798400879, 0.023181856155395506, 0.02337785530090332, 0.023087167739868165, 0.022816768646240236, 0.022692863464355468, 0.022971391677856445, 0.023002815246582032, 0.022888992309570314, 0.0228185920715332, 0.022564031600952147, 0.027114080429077148, 0.023986400604248045, 0.02467193603515625, 0.022341951370239258, 0.022451423645019532, 0.022154272079467772, 0.022073375701904298, 0.022218080520629884, 0.021979520797729492, 0.022167360305786133, 0.022107872009277343, 0.022073823928833006, 0.02210201644897461, 0.021921056747436524, 0.022076095581054687, 0.0220467529296875, 0.022111520767211915, 0.02202899169921875, 0.022007871627807617, 0.021992576599121093, 0.022030624389648437, 0.021983232498168945, 0.02202886390686035, 0.022004800796508787, 0.022407968521118163, 0.022984128952026367, 0.022106847763061523, 0.022257055282592773, 0.021912160873413085, 0.022030464172363283, 0.0219237117767334, 0.022040576934814454, 0.021983232498168945, 0.022136831283569337, 0.022163455963134765, 0.02208742332458496, 0.022691904067993166, 0.022167743682861327, 0.022181888580322266, 0.022130687713623046, 0.02225503921508789, 0.02216531181335449, 0.0227193603515625, 0.02292927932739258, 0.02306025505065918, 0.02292911911010742, 0.02340287971496582, 0.02300476837158203, 0.022981151580810547, 0.02323980712890625, 0.023360383987426757, 0.02330419158935547, 0.023436704635620118, 0.02308572769165039, 0.023211967468261718, 0.022992576599121094, 0.022875968933105468, 0.02288252830505371, 0.023080608367919923, 0.022864511489868164, 0.02284156799316406, 0.02290255928039551, 0.02293667221069336, 0.022605888366699217, 0.022559295654296874, 0.022598976135253905, 0.02216649627685547, 0.022401023864746093, 0.02228339195251465, 0.022588287353515625, 0.023224319458007812, 0.022474720001220704, 0.022742208480834962, 0.022702560424804688, 0.022495807647705077, 0.022316864013671875, 0.022016000747680665, 0.021876735687255858, 0.022372352600097657, 0.022173696517944336, 0.02229209518432617, 0.022456703186035157, 0.022352928161621093, 0.022223840713500975, 0.022228992462158204, 0.02211840057373047, 0.022056224822998047, 0.022049503326416017, 0.022190080642700196, 0.02206105613708496, 0.022009248733520507, 0.022139104843139648, 0.021954944610595703, 0.022064735412597656, 0.02214748764038086, 0.022192127227783204, 0.02218569564819336, 0.02212278366088867, 0.021989376068115234, 0.021999616622924805, 0.02203385543823242, 0.021973567962646483, 0.021954431533813476, 0.02229043197631836, 0.022081472396850585, 0.022122432708740234, 0.022091072082519533, 0.02201900863647461, 0.02202390480041504, 0.02196303939819336, 0.02209174346923828, 0.022499391555786133, 0.02195804786682129, 0.022055519104003905, 0.021882944107055664, 0.021946079254150392, 0.021952735900878907, 0.022005760192871093, 0.022098976135253905, 0.02201625633239746, 0.021908191680908202, 0.022359840393066405, 0.021954784393310545, 0.022260896682739256, 0.022647647857666015, 0.022372352600097657, 0.022165216445922852, 0.02191798400878906, 0.02202569580078125, 0.022094432830810546, 0.022347711563110353, 0.02240716743469238, 0.02219980812072754, 0.022032064437866213, 0.022073888778686525, 0.022173280715942382, 0.022220544815063477, 0.02215212821960449, 0.022932960510253907, 0.022247488021850587, 0.022139360427856445, 0.022544384002685547, 0.02356150436401367, 0.02233318328857422, 0.02233977508544922, 0.0222256965637207, 0.022372352600097657, 0.022179840087890625, 0.022214624404907228, 0.022165088653564452, 0.022036928176879883, 0.02216067123413086, 0.02203232002258301, 0.02218694305419922, 0.022085472106933592, 0.022366207122802736, 0.022527999877929687, 0.023169023513793945, 0.02208742332458496, 0.02200601577758789, 0.02195590400695801, 0.022091999053955078, 0.022006240844726563, 0.022531967163085937, 0.02212387275695801, 0.022212991714477538, 0.022034015655517578, 0.022076223373413088, 0.022078943252563477, 0.02234832000732422, 0.022335487365722655, 0.022861183166503905, 0.02290140724182129, 0.023070079803466797, 0.02296278381347656, 0.02303696060180664, 0.022922079086303712, 0.022860000610351563, 0.02307084846496582, 0.022909503936767578, 0.023089344024658204, 0.023028703689575197, 0.023231264114379882, 0.023052511215209962, 0.023090112686157228, 0.0228768310546875, 0.022845632553100587, 0.022930784225463866, 0.02274985694885254, 0.022841215133666992, 0.022718591690063475, 0.022556896209716796, 0.022552352905273437, 0.022485151290893554, 0.022206304550170898, 0.022097120285034178, 0.02196985626220703, 0.02230451202392578, 0.022957632064819336, 0.02232729530334473, 0.02227235221862793, 0.022124736785888673, 0.022144800186157228, 0.02211199951171875, 0.022626304626464845, 0.022946239471435547, 0.023021600723266603, 0.023207935333251953, 0.02297996711730957, 0.023124479293823243, 0.02306390380859375, 0.023188255310058595, 0.023035903930664063, 0.022949344635009767, 0.023044256210327147, 0.023129823684692383, 0.02328848075866699, 0.023272607803344728, 0.023138496398925783, 0.022933311462402343, 0.022936416625976563, 0.02288332748413086, 0.022895647048950196, 0.02268124771118164, 0.023003456115722656, 0.023142400741577147, 0.022591487884521484, 0.02261097526550293, 0.02246307182312012, 0.02258777618408203, 0.022269952774047853, 0.022114303588867186, 0.022235136032104492, 0.02248294448852539, 0.022317344665527344, 0.02233296012878418, 0.02214521598815918, 0.022261760711669923, 0.02203433609008789, 0.02210063934326172, 0.0219835205078125, 0.022364160537719727, 0.02205695915222168, 0.02198121643066406, 0.021919679641723634, 0.02215750312805176, 0.02217558479309082, 0.022480384826660156, 0.022432256698608398, 0.02225971221923828, 0.02257417678833008, 0.022999967575073242, 0.022388736724853517, 0.02216547203063965, 0.0221278076171875, 0.022097951889038087, 0.02270476722717285, 0.022138143539428713, 0.021947296142578124, 0.022322751998901366, 0.02214886474609375, 0.022116159439086912, 0.022176607131958008, 0.021995552062988283, 0.0221265926361084, 0.02196268844604492, 0.02194985580444336, 0.02207196807861328, 0.02228223991394043, 0.022189056396484375, 0.021969919204711915, 0.022091455459594726, 0.022202144622802733, 0.02214147186279297, 0.022171615600585937, 0.02205404853820801, 0.022035327911376953, 0.02209791946411133, 0.022031967163085937, 0.022038944244384767, 0.02278803253173828, 0.02396985626220703, 0.0224167366027832, 0.022357791900634767, 0.022776704788208008, 0.022138879776000975, 0.022358015060424806, 0.02219363212585449, 0.02195510482788086, 0.022034719467163087, 0.022086816787719725, 0.02203910446166992, 0.02202828788757324, 0.02198908805847168, 0.022032352447509767, 0.022697887420654296, 0.02459075164794922, 0.023152639389038086, 0.022392831802368163, 0.022126176834106445, 0.022053472518920897, 0.021841920852661133, 0.021987327575683592, 0.023131839752197264, 0.02220403289794922, 0.02190320014953613, 0.021908319473266602, 0.021901311874389647, 0.022046239852905273, 0.022198751449584962, 0.022539392471313476, 0.022541183471679688, 0.02233344078063965, 0.02205081558227539, 0.02210371208190918, 0.02224684715270996, 0.022119199752807617, 0.022441823959350585, 0.02459177589416504, 0.02244492721557617, 0.022062944412231444, 0.022148319244384766, 0.022166208267211916, 0.022362367630004883, 0.022113759994506835, 0.022227231979370116, 0.022141311645507814, 0.022033279418945312, 0.023012575149536134, 0.0232138557434082, 0.022433376312255858, 0.022331808090209963, 0.022935552597045897, 0.022585344314575196, 0.022409215927124023, 0.022128639221191407, 0.022282175064086914, 0.022027519226074217, 0.022086463928222656, 0.022890495300292968, 0.02205900764465332, 0.022007680892944335, 0.021993120193481444, 0.022070880889892577, 0.022067487716674803, 0.022131231307983397, 0.022351936340332033, 0.022432863235473634, 0.022313919067382813, 0.022020063400268554, 0.02330575942993164, 0.022216575622558594, 0.02225008010864258, 0.022022144317626953, 0.02206105613708496, 0.022472415924072266, 0.02238287925720215, 0.022288383483886717, 0.022106111526489256, 0.02215936088562012, 0.022639999389648436, 0.022717056274414064, 0.022355968475341798, 0.022106016159057617, 0.022026912689208984, 0.021987520217895507, 0.022033599853515624, 0.022008575439453126, 0.021975103378295897, 0.022040319442749024, 0.022083839416503905, 0.022106111526489256, 0.022048063278198242, 0.022089792251586915, 0.02198182487487793, 0.022152767181396485, 0.022470943450927733, 0.02220150375366211, 0.022152000427246094, 0.02242755126953125, 0.022126720428466796, 0.022204479217529296, 0.022060384750366212, 0.022039295196533203, 0.02205881690979004, 0.02196303939819336, 0.022035871505737305, 0.022030847549438477, 0.022155263900756835, 0.022016000747680665, 0.022147071838378905, 0.022124479293823242, 0.022016063690185547, 0.021901311874389647, 0.0221214714050293, 0.022156511306762695, 0.022138240814208985, 0.022032800674438476, 0.022798336029052735, 0.02231500816345215, 0.021987327575683592, 0.02207049560546875, 0.02220684814453125, 0.022032575607299806, 0.022138816833496094, 0.022069536209106445, 0.021938175201416017, 0.02205286407470703, 0.021940223693847655, 0.022394208908081054, 0.022231712341308593, 0.02221670341491699, 0.0221693115234375, 0.022220479965209962, 0.02216012763977051, 0.02207686424255371, 0.02212700843811035, 0.022464160919189454, 0.02228873634338379, 0.022722080230712892, 0.022157312393188477, 0.02245884895324707, 0.022296575546264647, 0.023647647857666015, 0.023083391189575194, 0.02267158317565918, 0.02229846382141113, 0.022027711868286132, 0.02205753517150879, 0.02211840057373047, 0.02205900764465332, 0.022013952255249023, 0.022105535507202147, 0.021961280822753906, 0.022400480270385742, 0.022256160736083986, 0.022190208435058593, 0.025312543869018555, 0.023656991958618163, 0.022272064208984376, 0.022699071884155275, 0.022639360427856445, 0.02226416015625, 0.02241315269470215, 0.022169567108154296, 0.02206889533996582, 0.022088064193725585, 0.022119455337524414, 0.02218288040161133, 0.022108160018920898, 0.022180927276611327, 0.021884992599487306, 0.022213504791259765, 0.022035551071166993, 0.022029216766357423, 0.0225577278137207, 0.022370304107666016, 0.022162399291992187, 0.02252390480041504, 0.022177696228027344, 0.02239897537231445, 0.022155359268188478, 0.022250591278076173, 0.022277023315429686, 0.022091775894165038, 0.02206719970703125, 0.022173824310302733, 0.0220402889251709, 0.02281078338623047, 0.021961759567260743, 0.022166463851928712, 0.02201558494567871, 0.02232707214355469, 0.022409183502197266, 0.022216447830200194, 0.0221907844543457, 0.02211568069458008, 0.02207017517089844, 0.022007808685302735, 0.021860639572143556, 0.022143775939941407, 0.022221792221069337, 0.022009824752807616, 0.022169599533081053, 0.02226505661010742, 0.02226051139831543, 0.022962175369262695, 0.02228630447387695, 0.022288415908813478, 0.022372352600097657, 0.02194972801208496, 0.022462560653686525, 0.0221907844543457, 0.022237184524536133, 0.02206924819946289, 0.022132192611694336, 0.022122400283813477, 0.022010751724243164, 0.022025856018066406, 0.021975040435791016, 0.022630527496337892, 0.02216134452819824, 0.021899328231811524, 0.022236671447753906, 0.023036415100097657, 0.0224849910736084, 0.022262880325317383, 0.022241632461547853, 0.0223787841796875, 0.022389375686645507, 0.022207712173461912, 0.02202668762207031, 0.02195587158203125, 0.02224611282348633, 0.022193920135498046, 0.022159616470336915, 0.022169599533081053, 0.023031808853149413, 0.02227609634399414, 0.022185983657836913, 0.02208768081665039, 0.022215904235839842, 0.022000320434570314, 0.022192224502563477, 0.021964799880981444, 0.022054912567138672, 0.02211347198486328, 0.02216748809814453, 0.022127487182617187, 0.02230233573913574, 0.022110368728637694, 0.022331424713134766, 0.022235328674316407, 0.02210201644897461, 0.022449216842651366, 0.021979103088378905, 0.022027231216430663, 0.022024351119995116, 0.02208745574951172, 0.02213007926940918, 0.022005823135375975, 0.022112960815429687, 0.02204211235046387, 0.022165920257568358, 0.021978368759155275, 0.022217472076416014, 0.02265497589111328, 0.022434879302978515, 0.022899648666381837, 0.022117727279663085, 0.022403743743896483, 0.022042623519897463, 0.022158815383911134]",tokens/s,44.71276634378583,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,2199.990272,2551.119872,0.0,2155.872256,2032.413184,s,1,8.8898994140625,8.8898994140625,0.0,8.8898994140625,8.8898994140625,8.8898994140625,8.8898994140625,[8.8898994140625],,kWh,5.1338232924998315e-05,5.6559227750613706e-06,1.5266956657995767e-05,7.226111235805545e-05,,MB,2121.121792,2827.943936,0.0,2418.016256,2280.154112,s,10,0.9278674545288086,0.09278674545288088,9.926888700563933e-05,0.09277532958984375,0.09293991775512696,0.0929553981781006,0.0929677825164795,"[0.09284620666503907, 0.09277664184570313, 0.09267791748046875, 0.0927488021850586, 0.09266400146484376, 0.0927861099243164, 0.09293647766113282, 0.0926864013671875, 0.09297087860107422, 0.09277401733398437]",tokens/s,2759.0147574472517,kWh,2.7518656658488974e-06,3.0333095443466414e-07,1.8249469428224003e-06,4.880143563105961e-06,tokens/kWh,52457473.164389685,MB,2129.481728,2911.830016,0.0,2501.902336,2389.055488,s,10,18.071246215820313,1.807124621582031,0.008751352296018327,1.8075271606445313,1.816113879394531,1.8174338317871093,1.818489793701172,"[1.8187537841796875, 1.8053961181640625, 1.815820556640625, 1.802324951171875, 1.813374755859375, 1.8151702880859375, 1.8087320556640625, 1.806322265625, 1.7911900634765625, 1.794161376953125]",tokens/s,34.862011865483424,kWh,5.3613183272484675e-05,5.913438193190448e-06,2.6872853793177772e-05,8.63994752588529e-05,tokens/kWh,729171.0952092238,,s,630,18.068478263854985,0.028680124228341236,0.0003434489850813749,0.028610991477966308,0.029006570053100585,0.029212997245788574,0.030164768085479743,"[0.029655040740966795, 0.029017215728759767, 0.02900982475280762, 0.028979808807373046, 0.028851680755615235, 0.028976032257080078, 0.028837888717651368, 0.028603679656982423, 0.02885500717163086, 0.02993152046203613, 0.029683456420898438, 0.028732959747314452, 0.028680383682250978, 0.02863158416748047, 0.02895052719116211, 0.028675743103027344, 0.028694911956787108, 0.0288022403717041, 0.02867689514160156, 0.02865283203125, 0.028877536773681642, 0.028691904067993164, 0.02878316879272461, 0.028845535278320313, 0.0289531192779541, 0.02855526351928711, 0.028782848358154298, 0.02863692855834961, 0.02852835273742676, 0.028471200942993165, 0.028573280334472657, 0.028658464431762697, 0.030701568603515625, 0.030256895065307616, 0.029075712203979493, 0.02891062355041504, 0.02873651123046875, 0.028727264404296876, 0.028610559463500978, 0.028595935821533202, 0.028573568344116212, 0.028499647140502928, 0.028508895874023436, 0.028601951599121093, 0.028731807708740235, 0.0289136962890625, 0.02876963233947754, 0.028734079360961916, 0.028598272323608398, 0.028560895919799805, 0.02863564872741699, 0.02876825523376465, 0.028690431594848635, 0.028630815505981445, 0.02858415985107422, 0.02857164764404297, 0.028469440460205078, 0.028546432495117187, 0.030658720016479492, 0.029196575164794923, 0.028792287826538084, 0.02889094352722168, 0.028686559677124024, 0.02922208023071289, 0.029038944244384766, 0.028816064834594726, 0.028639007568359375, 0.02857574462890625, 0.028700672149658202, 0.02855344009399414, 0.028642879486083985, 0.028591615676879883, 0.028614463806152343, 0.028529567718505858, 0.028656671524047852, 0.028592479705810546, 0.02866592025756836, 0.02871571159362793, 0.028731264114379883, 0.028831743240356447, 0.02876620864868164, 0.0285614070892334, 0.028700672149658202, 0.028741472244262694, 0.028772192001342775, 0.028516672134399415, 0.02860851287841797, 0.028496192932128905, 0.028419776916503905, 0.028667903900146483, 0.029261152267456056, 0.02869251251220703, 0.02863974380493164, 0.028478815078735353, 0.028584127426147462, 0.028397792816162108, 0.02846348762512207, 0.028493824005126952, 0.02898944091796875, 0.028656639099121094, 0.02864361572265625, 0.028402399063110352, 0.02852799987792969, 0.028610208511352538, 0.028746368408203125, 0.028778303146362306, 0.02878060722351074, 0.028714975357055663, 0.02854902458190918, 0.0284881591796875, 0.028561151504516602, 0.028458400726318358, 0.029123552322387697, 0.02862828826904297, 0.028676799774169922, 0.02864454460144043, 0.028584224700927734, 0.028554880142211914, 0.028801727294921874, 0.028661535263061522, 0.02856595230102539, 0.02851430320739746, 0.028528287887573243, 0.028430431365966798, 0.028540384292602538, 0.0285765438079834, 0.028842111587524415, 0.028895103454589843, 0.029041183471679687, 0.028963008880615235, 0.028927711486816405, 0.028891679763793945, 0.028706783294677733, 0.028389408111572267, 0.02842848014831543, 0.02841580772399902, 0.028573408126831054, 0.028743711471557618, 0.02894054412841797, 0.029085695266723634, 0.028811264038085937, 0.028792287826538084, 0.028962560653686523, 0.02970636749267578, 0.02868614387512207, 0.028551551818847658, 0.02843280029296875, 0.02850377655029297, 0.028445024490356446, 0.02849001693725586, 0.02839254379272461, 0.028439104080200197, 0.02839360046386719, 0.028571136474609377, 0.02848409652709961, 0.028484928131103517, 0.028781055450439453, 0.028665727615356445, 0.02858624076843262, 0.03141334342956543, 0.029481887817382812, 0.028786752700805666, 0.02859779167175293, 0.028537248611450194, 0.028956159591674805, 0.029706335067749022, 0.02926246452331543, 0.029290271759033204, 0.028962080001831054, 0.02879689598083496, 0.029346559524536135, 0.028729343414306642, 0.029544704437255858, 0.029818624496459962, 0.028756128311157227, 0.028516191482543946, 0.02894643211364746, 0.028560735702514647, 0.028541023254394532, 0.028480064392089843, 0.028504032135009766, 0.02872118377685547, 0.028591520309448244, 0.028793439865112305, 0.028475072860717772, 0.028709184646606444, 0.028597503662109374, 0.02865843200683594, 0.028446720123291015, 0.028595232009887697, 0.02862886428833008, 0.028650720596313475, 0.028820159912109376, 0.028595359802246093, 0.02863030433654785, 0.028553184509277345, 0.02885193634033203, 0.028508127212524412, 0.028872703552246092, 0.02835251235961914, 0.028520383834838868, 0.028354623794555663, 0.028545024871826172, 0.028495328903198242, 0.028612991333007813, 0.028614175796508788, 0.028537471771240233, 0.028479488372802734, 0.028635135650634767, 0.028564640045166016, 0.028519264221191408, 0.028621055603027343, 0.028646400451660156, 0.02849827194213867, 0.02842460823059082, 0.02836675262451172, 0.028414047241210938, 0.028544672012329103, 0.028446144104003906, 0.028381919860839842, 0.028444864273071288, 0.028511327743530275, 0.02852137565612793, 0.02840985679626465, 0.028579519271850585, 0.028899648666381835, 0.028872703552246092, 0.02870675277709961, 0.028681663513183592, 0.028486175537109373, 0.028485727310180665, 0.028622783660888673, 0.028692544937133788, 0.02851020812988281, 0.02868783950805664, 0.02875347137451172, 0.02864022445678711, 0.028593215942382812, 0.028787647247314453, 0.029000864028930665, 0.028892000198364257, 0.02872422409057617, 0.0287324161529541, 0.028577184677124022, 0.028598175048828126, 0.028432640075683593, 0.02847964859008789, 0.028525951385498047, 0.028545951843261717, 0.028716543197631835, 0.02895088005065918, 0.028702335357666017, 0.029205184936523437, 0.028892864227294923, 0.028461376190185548, 0.028620031356811522, 0.02855603218078613, 0.02882508850097656, 0.028700319290161133, 0.028511072158813478, 0.028757631301879884, 0.029006208419799805, 0.028923904418945313, 0.02902835273742676, 0.028735488891601563, 0.0288603515625, 0.028934335708618163, 0.028783584594726564, 0.028593055725097655, 0.028536832809448243, 0.02855526351928711, 0.02872700881958008, 0.02885206413269043, 0.02891584014892578, 0.029085727691650392, 0.029161760330200195, 0.028882623672485352, 0.028706560134887694, 0.028604223251342775, 0.028723039627075196, 0.028631967544555666, 0.028659456253051756, 0.028569759368896483, 0.028569408416748047, 0.02863132858276367, 0.02852016067504883, 0.02850009536743164, 0.02847350311279297, 0.02856867218017578, 0.028887968063354492, 0.028762111663818358, 0.028909215927124022, 0.02888025665283203, 0.029024576187133787, 0.02918876838684082, 0.029164863586425782, 0.02900160026550293, 0.02892883110046387, 0.02937830352783203, 0.030646528244018555, 0.028692256927490233, 0.028600479125976564, 0.028473407745361327, 0.028676095962524413, 0.028569055557250977, 0.028524831771850587, 0.028532415390014648, 0.0287708797454834, 0.02876416015625, 0.028690431594848635, 0.028645376205444335, 0.028670272827148437, 0.028407487869262695, 0.028516288757324218, 0.028530752182006836, 0.02908585548400879, 0.028793312072753905, 0.02881724739074707, 0.02869059181213379, 0.02884841537475586, 0.029199647903442382, 0.028694143295288087, 0.028613664627075194, 0.028612384796142577, 0.028606752395629882, 0.02846281623840332, 0.02863260841369629, 0.028440736770629884, 0.02881772804260254, 0.028821504592895508, 0.02859132766723633, 0.028871456146240235, 0.028676095962524413, 0.028674272537231444, 0.02868332862854004, 0.028708927154541014, 0.028672767639160157, 0.02850806427001953, 0.02871014404296875, 0.028677888870239258, 0.028701087951660157, 0.030229087829589843, 0.030007295608520508, 0.0296013126373291, 0.028913536071777345, 0.02863929557800293, 0.028774944305419922, 0.028759647369384765, 0.0288505916595459, 0.028794879913330077, 0.028872480392456056, 0.02859644889831543, 0.02878463935852051, 0.02850201606750488, 0.028769760131835936, 0.028513887405395507, 0.028420703887939453, 0.0291560001373291, 0.02899510383605957, 0.02864703941345215, 0.02866640090942383, 0.028670015335083007, 0.02863030433654785, 0.028653535842895508, 0.03055686378479004, 0.029673471450805664, 0.0292096004486084, 0.028605440139770507, 0.028512256622314453, 0.028440576553344726, 0.02851968002319336, 0.02858880043029785, 0.028581695556640627, 0.02860665512084961, 0.02873958396911621, 0.028499103546142577, 0.02865443229675293, 0.028655616760253907, 0.0291267204284668, 0.028964799880981447, 0.028579839706420897, 0.02869980812072754, 0.028738399505615235, 0.028733247756958007, 0.028421407699584962, 0.028478464126586913, 0.029215776443481445, 0.029442943572998048, 0.02885647964477539, 0.028866016387939453, 0.02874950408935547, 0.02873750305175781, 0.028666591644287108, 0.028591840744018555, 0.028423904418945312, 0.029784576416015625, 0.028698495864868164, 0.028936384201049804, 0.028433727264404296, 0.028495967864990233, 0.028361183166503906, 0.02851238441467285, 0.028588031768798827, 0.02857779121398926, 0.02855116844177246, 0.028576095581054686, 0.0284946231842041, 0.028414783477783204, 0.02845907211303711, 0.028512191772460938, 0.028776512145996094, 0.029321216583251954, 0.02893619155883789, 0.029080863952636718, 0.029092607498168947, 0.028893152236938478, 0.028665855407714845, 0.028673696517944335, 0.02866771125793457, 0.028600160598754882, 0.028442720413208007, 0.02849443244934082, 0.028432384490966797, 0.028436832427978516, 0.028466720581054688, 0.028653696060180665, 0.028672000885009766, 0.028622880935668945, 0.028446687698364257, 0.028622848510742187, 0.028956672668457032, 0.02860611152648926, 0.028698976516723634, 0.028589632034301756, 0.028919744491577148, 0.028467615127563475, 0.028373088836669922, 0.028669599533081055, 0.028522048950195313, 0.02901865577697754, 0.028883007049560545, 0.029074623107910157, 0.028592960357666015, 0.028493919372558595, 0.02871286392211914, 0.02911631965637207, 0.028698720932006837, 0.028816831588745116, 0.02825222396850586, 0.02843084716796875, 0.028477439880371092, 0.028637184143066406, 0.028626943588256838, 0.02846073532104492, 0.028547391891479493, 0.02851840019226074, 0.028841983795166014, 0.02871494483947754, 0.02856284713745117, 0.028295839309692383, 0.02831974411010742, 0.028704767227172853, 0.028665855407714845, 0.028591487884521486, 0.028723840713500978, 0.02874367904663086, 0.028495872497558594, 0.028393728256225586, 0.028466880798339842, 0.028408000946044922, 0.028510080337524415, 0.028987136840820313, 0.028516128540039064, 0.028479551315307616, 0.028307775497436523, 0.029233440399169922, 0.028370752334594726, 0.02831155204772949, 0.028413951873779295, 0.028362688064575196, 0.028829311370849608, 0.02856185531616211, 0.028839744567871094, 0.028342464447021484, 0.028516351699829103, 0.02874275207519531, 0.02854185676574707, 0.028812864303588866, 0.028712799072265625, 0.028295200347900392, 0.0283756160736084, 0.029234752655029297, 0.02956742477416992, 0.028704767227172853, 0.028851648330688477, 0.028619327545166016, 0.02933350372314453, 0.029197952270507813, 0.029055360794067384, 0.0291362247467041, 0.02885209655761719, 0.028748575210571288, 0.028868608474731446, 0.028436479568481447, 0.02851878356933594, 0.02854262351989746, 0.02828633689880371, 0.02849072074890137, 0.028405311584472657, 0.028421728134155274, 0.028377952575683593, 0.028260351181030274, 0.02831564712524414, 0.028415199279785155, 0.028248863220214845, 0.028231679916381838, 0.028167327880859374, 0.028455072402954102, 0.028259008407592774, 0.028939584732055663, 0.028782943725585937, 0.028592479705810546, 0.028514400482177734, 0.02849577522277832, 0.028424192428588867, 0.028467199325561524, 0.02840131187438965, 0.028295520782470704, 0.028459007263183594, 0.028416000366210937, 0.028430335998535155, 0.028626943588256838, 0.028254207611083985, 0.028430335998535155, 0.02852659225463867, 0.028470720291137695, 0.0283571834564209, 0.02838528060913086, 0.028330015182495116, 0.028626720428466798, 0.028719295501708986, 0.028424192428588867, 0.028315008163452147, 0.028310304641723634, 0.02833827209472656, 0.02840278434753418, 0.028240480422973634, 0.02843244743347168, 0.028424192428588867, 0.028399616241455077, 0.02838118362426758, 0.028411903381347657, 0.028276735305786133, 0.028514272689819337, 0.028329696655273438, 0.028436672210693358, 0.02829324722290039, 0.028446720123291015, 0.028499967575073244, 0.028411615371704103, 0.028303232192993164, 0.028336063385009765, 0.02826697540283203, 0.02859014320373535, 0.0288023681640625, 0.02840230369567871, 0.028608480453491212, 0.028696159362792968, 0.028512256622314453, 0.02868614387512207, 0.028213855743408203, 0.028379135131835938, 0.02846211242675781, 0.028353504180908203, 0.028438528060913085, 0.02828886413574219, 0.028268703460693358, 0.02816204833984375, 0.028184576034545897, 0.028241504669189454, 0.02844076728820801, 0.02834636878967285, 0.028566911697387697, 0.02876608085632324, 0.028514495849609377, 0.028277503967285157, 0.028249216079711915, 0.028265024185180666, 0.028303680419921876, 0.02828822326660156, 0.028564287185668946, 0.028290943145751955, 0.028395263671875, 0.02833148765563965, 0.02848246383666992, 0.029099424362182616, 0.028584543228149413, 0.028395519256591797, 0.028588031768798827, 0.028217344284057616, 0.028274015426635744, 0.028446687698364257, 0.028703168869018556, 0.028379520416259765, 0.028482751846313478, 0.028207872390747072, 0.028409791946411134, 0.028409055709838867, 0.028672800064086915, 0.02840985679626465, 0.02851840019226074, 0.02838937568664551, 0.028280832290649413, 0.028329984664916992, 0.02856755256652832, 0.028530815124511718, 0.028338048934936525, 0.028294879913330077, 0.02853638458251953, 0.02846793556213379, 0.02846112060546875, 0.028453887939453124, 0.029133760452270507, 0.028489471435546875, 0.028583295822143556, 0.02861142349243164, 0.029011999130249023, 0.028804447174072264, 0.02887740707397461, 0.028934207916259766]",tokens/s,34.867352457693194,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1173.385216,1101.98784,0.0,706.740224,681.6384,s,1,7.9774697265625,7.9774697265625,0.0,7.9774697265625,7.9774697265625,7.9774697265625,7.9774697265625,[7.9774697265625],,kWh,3.170235369582694e-05,3.489330949884943e-06,9.687785528005033e-06,4.487947017371692e-05,,MB,1504.735232,1406.07488,0.0,996.1472,949.238272,s,10,0.30593103981018066,0.030593103981018065,0.0002849342081377975,0.030481040000915526,0.031006976318359376,0.031121599769592286,0.031213298530578613,"[0.031236223220825195, 0.030656864166259765, 0.03054080009460449, 0.030380895614624023, 0.030413471221923827, 0.030314720153808594, 0.030421279907226564, 0.03062700843811035, 0.030981504440307617, 0.030358272552490233]",tokens/s,8367.898862398497,kWh,9.069501885869118e-07,1.0001952580079885e-07,5.511840158260737e-07,1.5581537302137844e-06,tokens/kWh,164297010.64533335,MB,1538.02752,1414.463488,0.0,1004.535808,949.240832,s,10,14.960107177734374,1.4960107177734376,0.0069726967375618835,1.4946002197265624,1.5034655639648438,1.507519464111328,1.5107625842285155,"[1.49989990234375, 1.4917362060546875, 1.4974642333984376, 1.4908616943359374, 1.497645751953125, 1.502564697265625, 1.488336181640625, 1.4883565673828125, 1.4916685791015625, 1.5115733642578124]",tokens/s,42.111997762800115,kWh,4.406717211932881e-05,4.8602477698235655e-06,1.7025524489974185e-05,6.595294437912654e-05,tokens/kWh,955226.4966041285,,s,630,14.958072921752935,0.023742972891671318,0.0004972487734326744,0.023646528244018555,0.02399850559234619,0.024191565704345705,0.025756715698242196,"[0.0238920955657959, 0.023654272079467773, 0.023745567321777343, 0.02382703971862793, 0.023885536193847656, 0.02376156806945801, 0.023725088119506837, 0.024677343368530273, 0.023870784759521483, 0.023814847946166992, 0.023971839904785155, 0.023897504806518553, 0.02444758415222168, 0.024135679244995118, 0.02389580726623535, 0.023705856323242187, 0.02391196823120117, 0.023658016204833984, 0.023755647659301757, 0.023525440216064453, 0.023517183303833008, 0.02353971290588379, 0.02370969581604004, 0.023607295989990236, 0.023822336196899413, 0.023699424743652345, 0.023924736022949217, 0.02362985610961914, 0.023901407241821288, 0.02396454429626465, 0.023752607345581055, 0.023817535400390624, 0.02366320037841797, 0.023678592681884766, 0.023622112274169924, 0.023478239059448243, 0.023652032852172853, 0.023728479385375978, 0.02414556884765625, 0.02365884780883789, 0.0237260799407959, 0.02378495979309082, 0.023675392150878907, 0.023558176040649415, 0.023850175857543947, 0.023820383071899414, 0.02391110420227051, 0.023975807189941405, 0.02596284866333008, 0.02398134422302246, 0.023884288787841795, 0.023661632537841797, 0.023696319580078125, 0.02366166305541992, 0.023554624557495116, 0.023478336334228515, 0.023506656646728515, 0.023402240753173827, 0.023532352447509765, 0.023711103439331055, 0.023646848678588867, 0.023658496856689453, 0.02378278350830078, 0.02378976058959961, 0.0236112003326416, 0.023476224899291992, 0.02346188735961914, 0.023562240600585937, 0.023410688400268553, 0.023617536544799804, 0.023404735565185547, 0.023539520263671874, 0.023471519470214842, 0.023356000900268556, 0.023464096069335937, 0.02350592041015625, 0.02359996795654297, 0.023672224044799805, 0.02400320053100586, 0.02384009552001953, 0.023814783096313477, 0.023559520721435547, 0.023621408462524415, 0.023564863204956054, 0.02370591926574707, 0.023459808349609375, 0.023504735946655274, 0.02372380828857422, 0.024148384094238282, 0.024465599060058595, 0.023997983932495116, 0.023940383911132814, 0.02381670379638672, 0.02376550483703613, 0.024311040878295897, 0.02413369560241699, 0.0237795524597168, 0.0238637752532959, 0.023680639266967774, 0.023707103729248048, 0.023808927536010743, 0.023803903579711915, 0.023560192108154295, 0.023592960357666014, 0.02345779228210449, 0.0236395206451416, 0.02342108726501465, 0.023621984481811523, 0.02348569679260254, 0.023857952117919922, 0.023580671310424805, 0.02370684814453125, 0.0236408634185791, 0.023848543167114256, 0.02374083137512207, 0.02370355224609375, 0.023479551315307618, 0.02351923179626465, 0.023674688339233398, 0.02378438377380371, 0.023554048538208007, 0.023619583129882812, 0.02358460807800293, 0.023527584075927734, 0.02352924728393555, 0.023446752548217775, 0.023617504119873046, 0.02362166404724121, 0.023708736419677735, 0.023718528747558594, 0.023900480270385743, 0.023969791412353517, 0.023959711074829103, 0.02392848014831543, 0.02376313591003418, 0.02369331169128418, 0.02344960021972656, 0.02361302375793457, 0.02344905662536621, 0.023319488525390626, 0.02344688034057617, 0.023536224365234375, 0.023565568923950196, 0.023497535705566407, 0.023414783477783203, 0.02334467124938965, 0.023496992111206056, 0.023496896743774413, 0.02350601577758789, 0.02366582489013672, 0.02581068801879883, 0.023883775711059572, 0.023609344482421874, 0.02353958320617676, 0.023476352691650392, 0.023654399871826173, 0.02388159942626953, 0.023726463317871094, 0.023613183975219727, 0.023547903060913086, 0.023568384170532225, 0.02352025604248047, 0.02357046318054199, 0.02351203155517578, 0.023586559295654296, 0.02359321594238281, 0.02428313636779785, 0.02404761505126953, 0.02369126319885254, 0.023748607635498048, 0.023826208114624024, 0.023574304580688477, 0.023705312728881836, 0.02383535957336426, 0.023810176849365233, 0.023713632583618163, 0.02376406478881836, 0.024500543594360352, 0.025285247802734376, 0.024777759552001954, 0.02402777671813965, 0.02388115119934082, 0.023776159286499024, 0.02366464042663574, 0.023738048553466798, 0.02401299285888672, 0.023677055358886718, 0.023554048538208007, 0.023588863372802735, 0.02651683235168457, 0.027435680389404297, 0.023688671112060546, 0.023634271621704103, 0.02352761650085449, 0.023463552474975585, 0.02341516876220703, 0.023565824508666993, 0.023623424530029295, 0.023630592346191408, 0.023575679779052734, 0.02353660774230957, 0.023527328491210937, 0.023584768295288085, 0.02354380798339844, 0.023459840774536132, 0.02346188735961914, 0.02349465560913086, 0.02344960021972656, 0.023384063720703126, 0.023343103408813477, 0.023417024612426757, 0.023719743728637697, 0.02355311965942383, 0.023613855361938475, 0.023706111907958984, 0.023842079162597656, 0.02378607940673828, 0.023995807647705078, 0.023763679504394532, 0.02370515251159668, 0.023648704528808594, 0.023568384170532225, 0.023488512039184572, 0.02351513671875, 0.023537311553955078, 0.023556255340576173, 0.023514911651611327, 0.02364361572265625, 0.023466943740844726, 0.02348236846923828, 0.023358720779418946, 0.023522016525268554, 0.02350601577758789, 0.023714208602905275, 0.023570976257324218, 0.023617727279663086, 0.02346988868713379, 0.023434751510620116, 0.023302528381347658, 0.02348044776916504, 0.023435264587402343, 0.023410240173339845, 0.02344799995422363, 0.023459840774536132, 0.02344550323486328, 0.023537664413452147, 0.02352742385864258, 0.02349635124206543, 0.023596704483032225, 0.023621599197387697, 0.023651008605957032, 0.02367081642150879, 0.023865440368652343, 0.023478176116943358, 0.023646207809448243, 0.02352332878112793, 0.023597055435180665, 0.02364348793029785, 0.023633920669555664, 0.023861791610717772, 0.023758975982666016, 0.023631872177124022, 0.023455680847167967, 0.02335491180419922, 0.0233538875579834, 0.023797567367553712, 0.023684543609619142, 0.02394598388671875, 0.02389798355102539, 0.02368320083618164, 0.023658496856689453, 0.023635551452636717, 0.023480735778808593, 0.023541759490966797, 0.02361315155029297, 0.0234803524017334, 0.028160255432128908, 0.023734272003173826, 0.02369945526123047, 0.023613183975219727, 0.023668991088867188, 0.023822336196899413, 0.023793472290039062, 0.02366044807434082, 0.02385334396362305, 0.023746496200561525, 0.023670848846435548, 0.023650400161743163, 0.023701408386230468, 0.02367283248901367, 0.023576576232910155, 0.023528768539428712, 0.023548511505126952, 0.023664255142211914, 0.02386172866821289, 0.0237238712310791, 0.02363612747192383, 0.023692607879638672, 0.023715776443481447, 0.023724800109863282, 0.02372345542907715, 0.023970367431640625, 0.023705440521240233, 0.023459999084472657, 0.023611391067504883, 0.023667808532714843, 0.024595359802246093, 0.025053119659423827, 0.02361759948730469, 0.023590911865234376, 0.02371788787841797, 0.023484287261962892, 0.02350726318359375, 0.023835935592651368, 0.023568063735961913, 0.024106496810913085, 0.02358732795715332, 0.023638015747070314, 0.023400447845458985, 0.023506336212158203, 0.023501472473144533, 0.02361939239501953, 0.023484544754028322, 0.02359702491760254, 0.02338819122314453, 0.024063615798950194, 0.025059711456298827, 0.023543039321899415, 0.02350966453552246, 0.023421024322509764, 0.023375104904174805, 0.023519680023193358, 0.02355232048034668, 0.02393052864074707, 0.023877792358398438, 0.02459872055053711, 0.025624576568603515, 0.024131584167480468, 0.024153919219970704, 0.02418911933898926, 0.02391993522644043, 0.02420307159423828, 0.024927104949951172, 0.02432614326477051, 0.024131584167480468, 0.024252416610717774, 0.02410220718383789, 0.024013504028320313, 0.023901952743530273, 0.02386764717102051, 0.024004608154296874, 0.02386124801635742, 0.023799808502197265, 0.023782880783081054, 0.023681568145751952, 0.02364825630187988, 0.02368921661376953, 0.023641216278076173, 0.023401344299316406, 0.02346134376525879, 0.02345212745666504, 0.023467296600341796, 0.023490976333618165, 0.02362713623046875, 0.0238023681640625, 0.023919103622436523, 0.023779327392578126, 0.02386528015136719, 0.024145055770874023, 0.023959711074829103, 0.024110912322998047, 0.023845823287963867, 0.023741727828979493, 0.02358550453186035, 0.023525087356567383, 0.023758655548095704, 0.023585248947143554, 0.02368307113647461, 0.02375379180908203, 0.023647167205810546, 0.02371379280090332, 0.02349260711669922, 0.023917600631713866, 0.023833568572998048, 0.023750656127929686, 0.023805952072143553, 0.023812095642089845, 0.02364825630187988, 0.023727487564086915, 0.02373081588745117, 0.02365235137939453, 0.02361142349243164, 0.023481311798095702, 0.023634944915771484, 0.02344550323486328, 0.023478271484375, 0.02338515281677246, 0.023528064727783203, 0.023850400924682616, 0.023707584381103517, 0.023720928192138672, 0.023535167694091797, 0.023518783569335937, 0.023571264266967772, 0.023476287841796874, 0.023479551315307618, 0.023488672256469726, 0.023569087982177734, 0.023566240310668944, 0.023546880722045898, 0.02345235252380371, 0.023388351440429687, 0.023478399276733397, 0.023557855606079103, 0.023511072158813477, 0.023388320922851563, 0.023482271194458008, 0.023654367446899412, 0.02412566375732422, 0.023699583053588866, 0.023651647567749023, 0.023853311538696288, 0.023595327377319335, 0.023514911651611327, 0.02369968032836914, 0.02393087959289551, 0.023635967254638672, 0.023433216094970705, 0.023549951553344727, 0.023541952133178713, 0.023494464874267578, 0.02348236846923828, 0.02384000015258789, 0.023757087707519532, 0.02349235153198242, 0.023627552032470703, 0.02356934356689453, 0.023602848052978517, 0.023673120498657226, 0.02357004737854004, 0.023801792144775392, 0.023581375122070314, 0.023445695877075196, 0.023418752670288086, 0.02340255928039551, 0.0234432315826416, 0.023419168472290038, 0.02339027214050293, 0.023357440948486328, 0.023373760223388673, 0.023778335571289062, 0.023823328018188476, 0.023533504486083986, 0.0236945915222168, 0.023545759201049805, 0.023580671310424805, 0.023509920120239256, 0.02370512008666992, 0.02346236801147461, 0.02351923179626465, 0.023514175415039064, 0.023491199493408204, 0.023521600723266603, 0.023566144943237305, 0.02358086395263672, 0.02382195281982422, 0.02401523208618164, 0.023830528259277343, 0.023763071060180663, 0.023690208435058594, 0.023704479217529297, 0.023799392700195314, 0.02363155174255371, 0.02359574317932129, 0.024160255432128908, 0.023770624160766602, 0.023710399627685546, 0.02361724853515625, 0.023607391357421875, 0.02365644836425781, 0.02367897605895996, 0.023818239212036133, 0.023644159317016602, 0.02365644836425781, 0.023517183303833008, 0.023584192276000976, 0.02411782455444336, 0.02386025619506836, 0.02382080078125, 0.023628255844116212, 0.023550975799560548, 0.023591936111450194, 0.023549951553344727, 0.023625728607177734, 0.02351696014404297, 0.02349603271484375, 0.02354470443725586, 0.02344550323486328, 0.023379199981689452, 0.02353433609008789, 0.023574079513549805, 0.02368556785583496, 0.023517183303833008, 0.023764352798461914, 0.023732511520385743, 0.02350432014465332, 0.023540191650390625, 0.023801952362060546, 0.023756479263305662, 0.02350111961364746, 0.02357481575012207, 0.02376675224304199, 0.02349465560913086, 0.02352060890197754, 0.02338268852233887, 0.023472127914428712, 0.02352742385864258, 0.023615455627441405, 0.023563711166381836, 0.02394918441772461, 0.023687904357910156, 0.023696479797363282, 0.023825311660766603, 0.023432384490966796, 0.02360198402404785, 0.023541408538818358, 0.02349007987976074, 0.023571264266967772, 0.023561920166015625, 0.023548223495483397, 0.023594335556030275, 0.023567007064819335, 0.023479488372802733, 0.023530303955078127, 0.02371401596069336, 0.023621408462524415, 0.02374380874633789, 0.024146240234375, 0.023884159088134765, 0.02369923210144043, 0.023642335891723633, 0.02358787155151367, 0.023841760635375978, 0.023601152420043944, 0.023506591796875, 0.02368342399597168, 0.023487903594970702, 0.02343948745727539, 0.023555904388427733, 0.023920799255371095, 0.02368054389953613, 0.02377398490905762, 0.023646080017089843, 0.023786815643310547, 0.02373324775695801, 0.02388991928100586, 0.023820287704467775, 0.023615264892578126, 0.023450944900512697, 0.023567264556884765, 0.023588064193725587, 0.023624479293823244, 0.024071775436401367, 0.02362614440917969, 0.02446335983276367, 0.024461311340332033, 0.023755903244018554, 0.023903104782104494, 0.02375699234008789, 0.02372364807128906, 0.02379385566711426, 0.02369536018371582, 0.02373017692565918, 0.02354380798339844, 0.023576576232910155, 0.023626976013183594, 0.023567136764526368, 0.023570432662963867, 0.02375600051879883, 0.023857152938842774, 0.02377996826171875, 0.023670944213867187, 0.02356163215637207, 0.023685728073120117, 0.023803647994995118, 0.02412486457824707, 0.025300031661987306, 0.02402639961242676, 0.023986400604248045, 0.024169759750366213, 0.02516870307922363, 0.02745974349975586, 0.024098751068115234, 0.024070207595825194, 0.023814144134521483, 0.023736320495605468, 0.02353775978088379, 0.023548095703125, 0.023594079971313478, 0.023865440368652343, 0.02353721618652344, 0.023485183715820312, 0.025204959869384765, 0.023816192626953125, 0.023580671310424805, 0.030139583587646485, 0.024097440719604492, 0.023752864837646485, 0.023604671478271486, 0.023658464431762696, 0.023744287490844725, 0.023666912078857422, 0.02359891128540039, 0.023440160751342774, 0.023369728088378908, 0.023519392013549804, 0.023502687454223632, 0.02352345657348633, 0.023521408081054688, 0.023670528411865236, 0.023545856475830077, 0.02465177536010742, 0.02525539207458496, 0.024193567276000978, 0.023969791412353517, 0.02370150375366211, 0.023560287475585938, 0.0236911678314209, 0.023731391906738283, 0.023533920288085937]",tokens/s,42.117724876432185,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1845.112832,2760.835072,0.0,2365.587456,2314.318336,s,1,8.816744140625,8.816744140625,0.0,8.816744140625,8.816744140625,8.816744140625,8.816744140625,[8.816744140625],,kWh,5.478485249999494e-05,6.034266840466255e-06,1.6985013588002862e-05,7.780413292846406e-05,,MB,1848.582144,3092.185088,0.0,2682.257408,2607.60832,s,10,0.5065773773193359,0.05065773773193359,0.00020496744571672432,0.050588720321655276,0.05074995651245117,0.05099966545104981,0.05119943260192871,"[0.05124937438964844, 0.05058329772949219, 0.05051811218261719, 0.05058911895751953, 0.05054745483398437, 0.05058832168579101, 0.05066713714599609, 0.05069446563720703, 0.05062723159790039, 0.05051286315917969]",tokens/s,5053.522155977031,kWh,1.5431936126095368e-06,1.701864793126454e-07,1.024990586073712e-06,2.738370677995894e-06,tokens/kWh,93486247.88348827,MB,1848.582144,3092.185088,0.0,2682.257408,2607.61088,s,10,15.920609619140626,1.5920609619140627,0.005971955870236001,1.5915422973632811,1.5992711547851561,1.5996481506347657,1.5999497473144533,"[1.5907589111328124, 1.58003759765625, 1.5858822021484376, 1.58977587890625, 1.5933565673828125, 1.5989788818359374, 1.5991873779296875, 1.5902813720703124, 1.59232568359375, 1.600025146484375]",tokens/s,39.57134902941026,kWh,4.6056754418223436e-05,5.079726248708899e-06,2.1663277564525806e-05,7.279975823145815e-05,tokens/kWh,865387.4893333988,,s,630,15.918542459487922,0.02526752771347288,0.0003416172748956932,0.025196640014648435,0.025569696426391602,0.02576088399887085,0.02677633987426758,"[0.025562816619873047, 0.02546329689025879, 0.025427711486816405, 0.025194496154785157, 0.02530076789855957, 0.025295072555541993, 0.025429439544677735, 0.025172544479370118, 0.02522889518737793, 0.025182207107543944, 0.025069984436035156, 0.025357664108276366, 0.025485984802246092, 0.02529484748840332, 0.025363679885864257, 0.02517411231994629, 0.02503353691101074, 0.024985471725463868, 0.025083904266357423, 0.025057279586791992, 0.02493440055847168, 0.02507776069641113, 0.02521887969970703, 0.025130559921264648, 0.024956832885742186, 0.024984224319458008, 0.02503481674194336, 0.024971263885498047, 0.024905344009399415, 0.026468671798706055, 0.025141311645507813, 0.025143295288085937, 0.024983135223388672, 0.025294496536254884, 0.02518502426147461, 0.025177248001098634, 0.0251539192199707, 0.025160160064697266, 0.025309183120727538, 0.025032703399658202, 0.025001344680786134, 0.02510233688354492, 0.025060224533081054, 0.02537388801574707, 0.025319135665893555, 0.02654819107055664, 0.027015615463256835, 0.025450944900512695, 0.025375776290893555, 0.02513814353942871, 0.02512895965576172, 0.025047136306762696, 0.025536415100097656, 0.02519183921813965, 0.02511926460266113, 0.025080095291137694, 0.025061151504516602, 0.025075168609619142, 0.02508140754699707, 0.025048032760620117, 0.025155359268188477, 0.02506332778930664, 0.02517228889465332, 0.02506342315673828, 0.025057279586791992, 0.024922111511230468, 0.025108480453491212, 0.025011520385742187, 0.025055744171142577, 0.02504489517211914, 0.024883487701416015, 0.02488319969177246, 0.02512838363647461, 0.02513727951049805, 0.02548601531982422, 0.025392480850219726, 0.02538332748413086, 0.025212127685546874, 0.02510108757019043, 0.02511052894592285, 0.02497331237792969, 0.025016511917114258, 0.025068544387817384, 0.02501046371459961, 0.025004064559936524, 0.024963584899902344, 0.024980735778808594, 0.024869632720947266, 0.024823392868041992, 0.02522972869873047, 0.02546441650390625, 0.025477535247802736, 0.02536390495300293, 0.02524575996398926, 0.025303552627563477, 0.024878656387329102, 0.024830400466918947, 0.025059328079223633, 0.025022464752197264, 0.024800928115844726, 0.024828256607055663, 0.025054431915283202, 0.025043743133544922, 0.025047040939331053, 0.02493440055847168, 0.025037887573242188, 0.025155712127685546, 0.025187135696411133, 0.02509993553161621, 0.025123008728027342, 0.025067680358886717, 0.024851551055908205, 0.024963327407836914, 0.025196512222290038, 0.025000640869140625, 0.025067520141601563, 0.02509775924682617, 0.0251048641204834, 0.025010175704956054, 0.025020288467407226, 0.0250097599029541, 0.025133600234985353, 0.025069568634033205, 0.025092096328735353, 0.02512886428833008, 0.02512406349182129, 0.02547999954223633, 0.025442047119140623, 0.026634111404418945, 0.025792896270751955, 0.025153535842895508, 0.025012287139892578, 0.025120704650878907, 0.024936447143554686, 0.024856576919555663, 0.02494054412841797, 0.024808639526367186, 0.02492089653015137, 0.025060928344726563, 0.025067968368530272, 0.02499577522277832, 0.025087520599365233, 0.02572496032714844, 0.02522982406616211, 0.02513462448120117, 0.025126976013183595, 0.02499398422241211, 0.02495417594909668, 0.02509712028503418, 0.0249487361907959, 0.024907039642333983, 0.024896352767944337, 0.025071487426757813, 0.024971263885498047, 0.025184255599975586, 0.025079328536987303, 0.025069311141967775, 0.024902368545532228, 0.02497439956665039, 0.024921024322509765, 0.02545452880859375, 0.025142719268798828, 0.024986240386962892, 0.024999935150146483, 0.025251840591430662, 0.024983327865600587, 0.024883424758911133, 0.025053184509277345, 0.025387008666992186, 0.024989696502685548, 0.02527846336364746, 0.02548121643066406, 0.025179391860961915, 0.02482275199890137, 0.025018144607543945, 0.025099872589111328, 0.025316959381103517, 0.02490451240539551, 0.025087520599365233, 0.025002464294433594, 0.024997888565063478, 0.0271824951171875, 0.025160287857055662, 0.02529484748840332, 0.02525379180908203, 0.02526963233947754, 0.025129695892333985, 0.025356128692626954, 0.02521718406677246, 0.02517913627624512, 0.024976160049438475, 0.02503670310974121, 0.025268320083618165, 0.025006080627441408, 0.02510220718383789, 0.02514508819580078, 0.02500160026550293, 0.024887359619140625, 0.025094432830810545, 0.026052703857421877, 0.02498796844482422, 0.02509414482116699, 0.025097600936889647, 0.025031295776367188, 0.02494054412841797, 0.026804224014282226, 0.026626016616821287, 0.025489343643188476, 0.025425376892089843, 0.025498239517211915, 0.025311296463012695, 0.02537811279296875, 0.025293439865112306, 0.025260032653808592, 0.025184255599975586, 0.025247743606567383, 0.02505846405029297, 0.024981792449951173, 0.024998176574707032, 0.0250163516998291, 0.025008544921875, 0.025038463592529297, 0.025436384201049805, 0.02671308708190918, 0.02548863983154297, 0.025276159286499022, 0.025283967971801758, 0.02562704086303711, 0.025222816467285157, 0.024955007553100587, 0.02493280029296875, 0.02512825584411621, 0.02484908866882324, 0.02526924705505371, 0.025400352478027344, 0.025415008544921874, 0.02491628837585449, 0.025424192428588867, 0.025188352584838865, 0.025109823226928712, 0.024806079864501954, 0.025032703399658202, 0.025040960311889647, 0.025032480239868163, 0.025065696716308594, 0.025091552734375, 0.025051616668701173, 0.025049087524414062, 0.025034175872802735, 0.02505743980407715, 0.02516828727722168, 0.02498150444030762, 0.02511564826965332, 0.025138912200927736, 0.025090335845947265, 0.025085248947143556, 0.024858943939208983, 0.02497983932495117, 0.025169919967651368, 0.02494438362121582, 0.024942848205566408, 0.02502569580078125, 0.025082719802856444, 0.02495078468322754, 0.027107231140136717, 0.02651875114440918, 0.025428895950317384, 0.025292768478393554, 0.025234687805175782, 0.025207551956176757, 0.02527027130126953, 0.025034303665161132, 0.025467231750488283, 0.025417503356933595, 0.02549763107299805, 0.025315616607666017, 0.025209856033325196, 0.025154048919677735, 0.025164287567138673, 0.02507161521911621, 0.025163200378417967, 0.02524012756347656, 0.025608192443847655, 0.025370304107666015, 0.025320959091186524, 0.0253243522644043, 0.025785503387451173, 0.025219104766845704, 0.025606143951416017, 0.02561270332336426, 0.025569696426391602, 0.025124864578247072, 0.025108480453491212, 0.025214975357055663, 0.025268415451049804, 0.02504889678955078, 0.025364479064941405, 0.025366687774658205, 0.025642847061157225, 0.025130783081054688, 0.025196767807006835, 0.025103776931762696, 0.025227872848510743, 0.025208671569824218, 0.025346208572387695, 0.025290752410888673, 0.02532352066040039, 0.02529644775390625, 0.025213375091552734, 0.025285696029663084, 0.025242559432983397, 0.025083904266357423, 0.025109567642211915, 0.025191360473632813, 0.02516377639770508, 0.025141248703002928, 0.025177087783813477, 0.025731903076171875, 0.02512646484375, 0.025293216705322266, 0.02548918342590332, 0.02528857612609863, 0.025231935501098632, 0.0252860164642334, 0.0251757755279541, 0.025150367736816406, 0.02516377639770508, 0.025315008163452148, 0.0250729923248291, 0.025238496780395508, 0.025126911163330077, 0.025284032821655273, 0.025088575363159178, 0.025157632827758788, 0.02515545654296875, 0.02533967971801758, 0.025356639862060548, 0.02521446418762207, 0.025055744171142577, 0.025089408874511717, 0.02518284797668457, 0.025085887908935546, 0.025235456466674806, 0.025258047103881836, 0.02545254325866699, 0.025685792922973634, 0.026071264266967775, 0.025286272048950197, 0.02522137641906738, 0.025206079483032228, 0.02528748893737793, 0.025274368286132814, 0.02539244842529297, 0.028379840850830076, 0.025569280624389647, 0.025430015563964844, 0.024995712280273436, 0.02526425552368164, 0.025415103912353517, 0.026460128784179686, 0.025956960678100587, 0.02551807975769043, 0.025651199340820312, 0.025610240936279297, 0.02536419105529785, 0.026054208755493163, 0.025277408599853515, 0.025278207778930663, 0.0252205753326416, 0.02519708824157715, 0.025210304260253905, 0.025155328750610353, 0.025242431640625, 0.02512892723083496, 0.025363840103149415, 0.02535696029663086, 0.02512076759338379, 0.025165824890136718, 0.025953632354736328, 0.025737888336181642, 0.025761791229248047, 0.02558585548400879, 0.02567558479309082, 0.025600000381469725, 0.025486623764038086, 0.025348831176757812, 0.02527132797241211, 0.025258975982666014, 0.026195968627929687, 0.025302911758422853, 0.02547110366821289, 0.025251840591430662, 0.02535424041748047, 0.025235456466674806, 0.025233407974243165, 0.025182144165039062, 0.025597600936889647, 0.025569696426391602, 0.025181663513183593, 0.025165983200073242, 0.025561471939086915, 0.025075712203979493, 0.02523910331726074, 0.025227712631225585, 0.02532352066040039, 0.025092096328735353, 0.025860095977783205, 0.0257322883605957, 0.025446847915649416, 0.02504252815246582, 0.025368640899658203, 0.02559859275817871, 0.025475168228149415, 0.025038463592529297, 0.025147775650024413, 0.025380224227905274, 0.025256832122802733, 0.025105663299560547, 0.025266687393188478, 0.025367904663085937, 0.025326240539550782, 0.02525174331665039, 0.02534204864501953, 0.025752992630004884, 0.025367136001586913, 0.025050880432128907, 0.02528281593322754, 0.025174016952514647, 0.02513689613342285, 0.025244960784912108, 0.02513580894470215, 0.02525814437866211, 0.02533724784851074, 0.02527440071105957, 0.025340383529663085, 0.0254814395904541, 0.02577123260498047, 0.02542367935180664, 0.025148351669311522, 0.025401376724243165, 0.025443359375, 0.02538444709777832, 0.02524211120605469, 0.02515260887145996, 0.02510531234741211, 0.024983552932739257, 0.02509823989868164, 0.025266176223754884, 0.025108480453491212, 0.025206783294677734, 0.025188352584838865, 0.02512281608581543, 0.025010112762451173, 0.025212255477905274, 0.025133312225341795, 0.02504547119140625, 0.025495552062988282, 0.0253767032623291, 0.025411264419555664, 0.0255567684173584, 0.02542243194580078, 0.025364479064941405, 0.025233024597167968, 0.02522297668457031, 0.025010751724243163, 0.025114751815795897, 0.025173248291015624, 0.025156511306762695, 0.024923295974731446, 0.02527084732055664, 0.02532966423034668, 0.02507366371154785, 0.025014272689819338, 0.025284608840942382, 0.02572902488708496, 0.02548121643066406, 0.025454591751098633, 0.02520591926574707, 0.025195295333862305, 0.025378240585327148, 0.02522585678100586, 0.025341951370239257, 0.02507766342163086, 0.02528879928588867, 0.025279935836791993, 0.025457216262817384, 0.02515260887145996, 0.02523638343811035, 0.02520579147338867, 0.025336799621582032, 0.025182207107543944, 0.02529280090332031, 0.025188352584838865, 0.025151487350463866, 0.02565894317626953, 0.025184703826904298, 0.025126911163330077, 0.02523750305175781, 0.025148544311523437, 0.025154176712036132, 0.025045248031616212, 0.025444416046142577, 0.02530246353149414, 0.025215263366699218, 0.025411584854125976, 0.025278528213500975, 0.025108415603637694, 0.025200639724731445, 0.02506342315673828, 0.025136320114135743, 0.02512553596496582, 0.02566508865356445, 0.02549225616455078, 0.025070943832397462, 0.025089887619018553, 0.025209312438964845, 0.025163936614990234, 0.025135103225708007, 0.025208831787109375, 0.025217119216918944, 0.02506332778930664, 0.025089056015014648, 0.025180288314819336, 0.025032896041870117, 0.024936960220336913, 0.0250984001159668, 0.025212928771972655, 0.025153343200683593, 0.024930496215820313, 0.02559347152709961, 0.025034656524658205, 0.025057279586791992, 0.02508220863342285, 0.025106559753417967, 0.025083904266357423, 0.025006080627441408, 0.02511996841430664, 0.025209375381469726, 0.025317632675170898, 0.025300991058349608, 0.02533171272277832, 0.02517363166809082, 0.02521116828918457, 0.025184288024902343, 0.025401216506958008, 0.025126880645751953, 0.02520044708251953, 0.025759775161743163, 0.025148799896240234, 0.025122880935668945, 0.025269184112548828, 0.025091487884521483, 0.025391616821289063, 0.02613408088684082, 0.02554319953918457, 0.025694208145141603, 0.025429119110107423, 0.0257524471282959, 0.025447872161865233, 0.02529542350769043, 0.02514739227294922, 0.025201791763305663, 0.025278432846069336, 0.025056127548217774, 0.025933855056762694, 0.026226688385009765, 0.025382911682128906, 0.025218399047851562, 0.025318048477172853, 0.02508902359008789, 0.025289728164672853, 0.025263872146606445, 0.025333215713500976, 0.025242399215698243, 0.02496512031555176, 0.025134239196777344, 0.025068384170532226, 0.02514672088623047, 0.02637481689453125, 0.025581567764282227, 0.025260032653808592, 0.02546272087097168, 0.025276479721069337, 0.025200639724731445, 0.025210880279541017, 0.02525164794921875, 0.025080223083496094, 0.02512873649597168, 0.027023359298706053, 0.026802175521850585, 0.025605695724487305, 0.02593222427368164, 0.025550815582275392, 0.025417695999145506, 0.0256690559387207, 0.02597865676879883, 0.025449087142944336, 0.025179424285888673, 0.025260992050170898, 0.025427743911743163, 0.025229280471801757, 0.025220384597778322, 0.025230304718017578, 0.025378816604614256, 0.025154848098754883, 0.025258560180664063, 0.025437952041625977, 0.025645471572875975, 0.025390592575073243, 0.025353952407836913, 0.02558236885070801, 0.025425920486450194, 0.0252509765625, 0.02518649673461914, 0.025247808456420898, 0.025330272674560547, 0.02527027130126953, 0.02525951957702637, 0.025192960739135743, 0.025100288391113282, 0.02532761573791504, 0.025333759307861328, 0.026474496841430665, 0.025116672515869142, 0.025154815673828126, 0.02512575912475586, 0.02518809509277344, 0.025337984085083008, 0.025079616546630858, 0.02531564712524414]",tokens/s,39.57648770943232,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,824.741888,554.631168,0.0,159.383552,143.673856,s,1,7.478853515625,7.478853515625,0.0,7.478853515625,7.478853515625,7.478853515625,7.478853515625,[7.478853515625],,kWh,1.0678520679164194e-05,1.170553645839295e-06,2.7013910500056637e-06,1.4550465375009153e-05,,MB,1334.976512,609.15712,0.0,199.22944,186.684928,s,26,0.20124217653274534,0.007740083712797898,8.766793833376148e-05,0.007727728128433227,0.007832304000854492,0.007928336143493652,0.00797323191165924,"[0.007979519844055176, 0.007784575939178467, 0.007776576042175293, 0.0077358717918396, 0.007770431995391846, 0.0077610878944396975, 0.007806848049163819, 0.007745632171630859, 0.007740416049957275, 0.007731872081756592, 0.007612895965576172, 0.007652063846588134, 0.007627200126647949, 0.007639679908752441, 0.007671711921691894, 0.007814367771148682, 0.007954368114471436, 0.007850240230560303, 0.007647424221038818, 0.007723584175109864, 0.007691872119903564, 0.007701504230499268, 0.007705183982849121, 0.007722176074981689, 0.007712160110473633, 0.00768291187286377]",tokens/s,33074.57767888413,kWh,2.3046161862339852e-07,2.5411420892259663e-08,9.955721373323103e-08,3.5543025324888916e-07,tokens/kWh,720253826.6227344,MB,1374.285824,611.254272,0.0,201.326592,186.687488,s,26,10.150502136230466,0.39040392831655646,0.002830543431388422,0.3892741394042969,0.3940727081298828,0.3958575744628906,0.3988802185058594,"[0.39970950317382814, 0.38872210693359377, 0.3894590148925781, 0.3931019897460937, 0.3898243103027344, 0.3889993896484375, 0.3889587707519531, 0.3901562805175781, 0.3920888366699219, 0.38913449096679686, 0.38812295532226565, 0.3888493347167969, 0.38840655517578127, 0.3893247375488281, 0.3899820251464844, 0.3942532043457031, 0.39639236450195314, 0.39110336303710935, 0.3938922119140625, 0.38784860229492185, 0.38820147705078123, 0.3884596252441406, 0.38892135620117185, 0.3893332214355469, 0.3880328674316406, 0.3892235412597656]",tokens/s,161.3713270551849,kWh,1.1370478016312588e-05,1.2539743753767515e-06,3.9676896298051265e-06,1.6592142021494465e-05,tokens/kWh,3796978.10676801,,s,1638,10.138676737308518,0.006189668337795178,0.00015010116841278822,0.006172384023666383,0.006271177625656128,0.0063377505302429195,0.0067069341897964445,"[0.006173952102661133, 0.006246623992919922, 0.006181119918823242, 0.006245664119720459, 0.006173855781555175, 0.006268864154815674, 0.00621292781829834, 0.006273695945739746, 0.007050464153289795, 0.00792742395401001, 0.007961567878723144, 0.00797705602645874, 0.007917471885681152, 0.006174047946929931, 0.006233920097351074, 0.006347616195678711, 0.00642790412902832, 0.006175487995147705, 0.006289408206939697, 0.006230016231536865, 0.006166528224945069, 0.0061972479820251464, 0.006258687973022461, 0.006250495910644531, 0.006127391815185547, 0.006266335964202881, 0.006126336097717285, 0.006236159801483154, 0.006182911872863769, 0.006148384094238281, 0.0061718721389770505, 0.006175136089324951, 0.006199391841888427, 0.006127615928649902, 0.0062321281433105466, 0.006131648063659668, 0.0062197761535644535, 0.0061066560745239255, 0.006230303764343262, 0.006174911975860595, 0.006319680213928223, 0.006214079856872559, 0.006130943775177002, 0.0062041602134704586, 0.006108704090118408, 0.0062015681266784665, 0.006108543872833252, 0.00656492805480957, 0.006303071975708008, 0.006158304214477539, 0.006210048198699952, 0.0061296639442443845, 0.006237760066986084, 0.006144639968872071, 0.0063071041107177735, 0.006178783893585205, 0.006289984226226807, 0.006260735988616943, 0.006172671794891358, 0.006244383811950684, 0.00617468786239624, 0.006208831787109375, 0.006131904125213623, 0.006114848136901855, 0.006083040237426758, 0.0061972479820251464, 0.006131840229034424, 0.006174592018127442, 0.006196544170379638, 0.006159039974212646, 0.006188064098358154, 0.0061279358863830565, 0.006481728076934814, 0.006204127788543701, 0.006236288070678711, 0.006166783809661866, 0.006196000099182129, 0.006201632022857666, 0.006105663776397705, 0.006170752048492431, 0.006119423866271972, 0.006167712211608886, 0.006113408088684082, 0.006212319850921631, 0.006088704109191895, 0.006227968215942382, 0.006080383777618408, 0.006205535888671875, 0.006090784072875976, 0.006154240131378174, 0.006164480209350586, 0.006244128227233887, 0.006176383972167969, 0.006111839771270752, 0.006186592102050781, 0.006255008220672607, 0.006194431781768799, 0.006099679946899414, 0.006213664054870605, 0.006088736057281494, 0.006207456111907959, 0.0062481918334960935, 0.00610643196105957, 0.006170656204223633, 0.006091904163360595, 0.006170400142669678, 0.006086656093597412, 0.006207488059997559, 0.006111104011535645, 0.006193408012390137, 0.006071392059326172, 0.006202144145965576, 0.006078271865844726, 0.006167840003967285, 0.006120160102844239, 0.006146207809448242, 0.00616809606552124, 0.0061077442169189455, 0.006149919986724853, 0.0061175041198730466, 0.006162079811096191, 0.006107039928436279, 0.006227488040924072, 0.006108128070831299, 0.0062258877754211425, 0.006128672122955322, 0.006184832096099853, 0.006167103767395019, 0.006141952037811279, 0.006189055919647217, 0.00610643196105957, 0.00617910385131836, 0.006213024139404297, 0.006206816196441651, 0.006098015785217285, 0.0061773438453674315, 0.006066431999206543, 0.006172416210174561, 0.006108320236206054, 0.0061346240043640135, 0.0061421761512756344, 0.006100255966186524, 0.00613651180267334, 0.006088511943817139, 0.006162015914916992, 0.006089119911193847, 0.006164415836334229, 0.006076767921447754, 0.006176640033721923, 0.006084447860717774, 0.006213632106781006, 0.006115392208099365, 0.006266816139221192, 0.006196767807006836, 0.006263199806213379, 0.00625055980682373, 0.006180863857269287, 0.0062259521484375, 0.006635488033294678, 0.006211584091186524, 0.0061214399337768555, 0.006153439998626709, 0.00620198392868042, 0.006125760078430176, 0.006240255832672119, 0.006209536075592041, 0.006229023933410644, 0.006101984024047852, 0.006224991798400879, 0.00611740779876709, 0.006222720146179199, 0.006153823852539063, 0.006174272060394287, 0.00621449613571167, 0.006158336162567139, 0.006188543796539306, 0.006123231887817383, 0.006304543972015381, 0.00608790397644043, 0.006193952083587646, 0.006080319881439209, 0.006238656044006348, 0.00611081600189209, 0.006142111778259277, 0.0061521921157836916, 0.006109183788299561, 0.006160384178161621, 0.006350399971008301, 0.006186431884765625, 0.006044223785400391, 0.006187007904052734, 0.006105088233947754, 0.006185184001922608, 0.006106592178344726, 0.0061907520294189455, 0.006178688049316406, 0.006144927978515625, 0.006193183898925782, 0.006118783950805664, 0.006168352127075195, 0.006105760097503662, 0.006190207958221436, 0.00606873607635498, 0.006181280136108399, 0.006067647933959961, 0.006385471820831299, 0.006174975872039795, 0.006232063770294189, 0.006155968189239502, 0.00611411190032959, 0.006192768096923828, 0.0061032319068908695, 0.006282815933227539, 0.006098591804504395, 0.0061859521865844725, 0.0060928001403808595, 0.006176767826080322, 0.0060863041877746585, 0.006250080108642578, 0.006159039974212646, 0.006139071941375732, 0.006153088092803955, 0.0060928001403808595, 0.00616156816482544, 0.006111584186553955, 0.006152512073516846, 0.006134208202362061, 0.006202720165252686, 0.006105408191680908, 0.006185056209564209, 0.006063168048858643, 0.006153151988983154, 0.006107135772705078, 0.0061421761512756344, 0.006355967998504639, 0.006161183834075928, 0.006227519989013672, 0.006147744178771973, 0.006255551815032959, 0.0061049280166625974, 0.006220032215118408, 0.006097792148590088, 0.006187903881072998, 0.006128960132598877, 0.00618345594406128, 0.00683958387374878, 0.008580991744995117, 0.006486815929412842, 0.007170271873474121, 0.0062542400360107425, 0.006131040096282959, 0.0062904319763183595, 0.006124576091766357, 0.006222623825073242, 0.0061970877647399905, 0.006148416042327881, 0.006188223838806153, 0.00610975980758667, 0.0062297601699829105, 0.006095232009887696, 0.006189087867736817, 0.00610316801071167, 0.006217504024505615, 0.006191328048706055, 0.006170400142669678, 0.006172895908355713, 0.006144000053405762, 0.0061972479820251464, 0.0061380801200866695, 0.0062576642036437985, 0.006117856025695801, 0.006202847957611084, 0.006103392124176025, 0.006199808120727539, 0.006141952037811279, 0.006177887916564941, 0.006199520111083984, 0.006130112171173096, 0.006160639762878418, 0.006111423969268798, 0.006174431800842285, 0.0060867519378662105, 0.006164480209350586, 0.006070367813110352, 0.006209375858306884, 0.006156352043151856, 0.006256351947784424, 0.0061485438346862795, 0.00617574405670166, 0.006181727886199951, 0.006160384178161621, 0.006209536075592041, 0.006151711940765381, 0.0064058880805969234, 0.006130208015441894, 0.006245888233184814, 0.0061151041984558105, 0.006211520195007324, 0.0062137598991394045, 0.006155072212219238, 0.0061996798515319826, 0.006565536022186279, 0.006258687973022461, 0.006137887954711914, 0.006241951942443848, 0.006146368026733399, 0.006178624153137207, 0.006181056022644043, 0.006236159801483154, 0.006196447849273682, 0.006112192153930664, 0.006213471889495849, 0.00608787202835083, 0.006212224006652832, 0.006117216110229492, 0.0061562881469726565, 0.006193280220031738, 0.0061950721740722655, 0.006215007781982422, 0.006146719932556152, 0.0062259202003479, 0.006184288024902344, 0.006214079856872559, 0.00611894416809082, 0.006215968132019043, 0.006113791942596435, 0.006227424144744873, 0.006195519924163818, 0.006156576156616211, 0.006203199863433838, 0.006141983985900879, 0.006205440044403076, 0.0061214399337768555, 0.0061972799301147465, 0.0060965437889099125, 0.006205984115600586, 0.0061038718223571774, 0.006227295875549316, 0.006116608142852783, 0.006186431884765625, 0.006158400058746338, 0.0061244478225708, 0.0061799359321594236, 0.006122399806976318, 0.006185184001922608, 0.006125376224517822, 0.006211647987365723, 0.006099008083343506, 0.006217567920684814, 0.006146143913269043, 0.006229919910430908, 0.006119423866271972, 0.0061500477790832515, 0.006155807971954346, 0.006115808010101319, 0.006194399833679199, 0.006117280006408691, 0.0061933121681213375, 0.006119647979736328, 0.006203999996185303, 0.006098944187164307, 0.006177023887634277, 0.006100800037384033, 0.006206624031066895, 0.00628111982345581, 0.006189727783203125, 0.006224095821380615, 0.006139776229858398, 0.0061829757690429685, 0.006109119892120362, 0.006205088138580323, 0.006094687938690185, 0.006193984031677246, 0.006124576091766357, 0.006204192161560059, 0.006139904022216797, 0.006187007904052734, 0.0061561279296875, 0.0060414719581604005, 0.0061970877647399905, 0.006132800102233887, 0.006231904029846191, 0.006140223979949951, 0.00623145580291748, 0.006110496044158935, 0.006194303989410401, 0.006137983798980713, 0.006147903919219971, 0.006258304119110107, 0.006266975879669189, 0.006224095821380615, 0.006154016017913818, 0.006250720024108886, 0.006215519905090332, 0.006215712070465088, 0.0061703681945800785, 0.006166687965393066, 0.006172671794891358, 0.006129888057708741, 0.006178944110870361, 0.006129631996154785, 0.006193056106567383, 0.006117568016052246, 0.006186016082763672, 0.006103839874267578, 0.006192160129547119, 0.0061140480041503905, 0.006213856220245361, 0.006105343818664551, 0.006210912227630616, 0.006156832218170166, 0.00610700798034668, 0.006356959819793701, 0.006127776145935059, 0.006209248065948486, 0.006096992015838623, 0.006225984096527099, 0.006092319965362549, 0.006189536094665527, 0.006081535816192627, 0.006198400020599365, 0.0061418237686157225, 0.006129183769226074, 0.006170207977294922, 0.00611030387878418, 0.006165472030639649, 0.006115839958190918, 0.006169151782989502, 0.0061080641746521, 0.006216639995574951, 0.006104991912841797, 0.0062259202003479, 0.006106527805328369, 0.0062206401824951175, 0.006176544189453125, 0.0061560640335083005, 0.006199488162994385, 0.006111487865447998, 0.006198239803314209, 0.0061194877624511715, 0.006193888187408447, 0.005999040126800537, 0.006200799942016602, 0.006185247898101807, 0.006334720134735107, 0.006686336040496827, 0.0061627840995788575, 0.006373407840728759, 0.006572319984436035, 0.006242047786712646, 0.006223840236663818, 0.006174623966217041, 0.006199391841888427, 0.0061645441055297855, 0.006243264198303223, 0.0061242241859436036, 0.006224192142486572, 0.006196224212646485, 0.00621235179901123, 0.006292064189910889, 0.006149151802062988, 0.006162303924560547, 0.006136064052581787, 0.006199808120727539, 0.006121471881866455, 0.0061968002319335936, 0.006078911781311035, 0.006190144062042237, 0.006101280212402344, 0.006202303886413575, 0.006087423801422119, 0.006182112216949463, 0.006164224147796631, 0.006188032150268555, 0.006173120021820068, 0.006129248142242432, 0.0061898880004882815, 0.006113056182861328, 0.006219295978546142, 0.006097760200500488, 0.006204671859741211, 0.006089471817016602, 0.006213791847229004, 0.006110527992248535, 0.006177311897277832, 0.006141600131988526, 0.006142303943634033, 0.006170623779296875, 0.006123519897460937, 0.006243936061859131, 0.006128032207489014, 0.006212831974029541, 0.006087552070617676, 0.00620688009262085, 0.006101791858673096, 0.006200384140014648, 0.0061200962066650395, 0.006189055919647217, 0.006178944110870361, 0.00614796781539917, 0.006221824169158936, 0.006121471881866455, 0.006180863857269287, 0.006105088233947754, 0.006118368148803711, 0.006082560062408447, 0.006242015838623047, 0.006096223831176758, 0.006180960178375244, 0.006153312206268311, 0.006145792007446289, 0.00616864013671875, 0.006115263938903809, 0.006166528224945069, 0.006127615928649902, 0.0061682238578796384, 0.0061073598861694335, 0.006203519821166992, 0.006088704109191895, 0.006158080101013184, 0.0060841598510742185, 0.006183775901794433, 0.006243743896484375, 0.007389279842376709, 0.006201695919036865, 0.0060945281982421875, 0.006195519924163818, 0.006130015850067139, 0.006121119976043701, 0.0061541438102722164, 0.00610700798034668, 0.00614576005935669, 0.0060871682167053225, 0.006156576156616211, 0.006090528011322022, 0.006180960178375244, 0.0060793919563293455, 0.006198016166687012, 0.006087999820709228, 0.006204288005828857, 0.006072319984436035, 0.006187263965606689, 0.006192031860351562, 0.006116064071655274, 0.006174816131591797, 0.0061008319854736325, 0.006144383907318115, 0.00609987211227417, 0.006162432193756104, 0.006199359893798828, 0.006858848094940186, 0.006587135791778565, 0.0067358717918396, 0.006393856048583985, 0.00617574405670166, 0.006964223861694336, 0.006443039894104004, 0.006099167823791504, 0.0062481918334960935, 0.006105055809020996, 0.006192512035369873, 0.006161056041717529, 0.006168896198272705, 0.006166207790374756, 0.006139904022216797, 0.006174592018127442, 0.006126016139984131, 0.006082592010498047, 0.00609830379486084, 0.006261375904083252, 0.006381504058837891, 0.006215424060821533, 0.006175007820129395, 0.006134880065917969, 0.006193247795104981, 0.00613702392578125, 0.006212480068206787, 0.006124512195587158, 0.006182240009307861, 0.006177631855010987, 0.006182752132415772, 0.006090079784393311, 0.0062009282112121585, 0.0061874880790710445, 0.006162879943847656, 0.006187168121337891, 0.006119167804718018, 0.006201663970947266, 0.006143519878387451, 0.006274367809295654, 0.006105984210968017, 0.0062197761535644535, 0.006117055892944336, 0.006205760002136231, 0.006115327835083008, 0.006160416126251221, 0.006162335872650146, 0.006135424137115478, 0.006147712230682373, 0.006124351978302002, 0.006180543899536133, 0.0060910720825195315, 0.006237760066986084, 0.006120128154754639, 0.006196896076202393, 0.006094431877136231, 0.006244639873504639, 0.006244512081146241, 0.00611894416809082, 0.006144032001495361, 0.0061114559173583985, 0.006160672187805176, 0.006180863857269287, 0.006205344200134277, 0.006217823982238769, 0.006208896160125732, 0.006092576026916504, 0.006197440147399902, 0.0062304000854492185, 0.0062507839202880855, 0.006182911872863769, 0.006148096084594727, 0.006201344013214111, 0.006119423866271972, 0.006215167999267578, 0.006106815814971924, 0.006183680057525635, 0.0061010560989379885, 0.006158336162567139, 0.0060657281875610355, 0.006072319984436035, 0.006121471881866455, 0.0061411519050598145, 0.0061502718925476076, 0.006101664066314697, 0.006169663906097412, 0.006115808010101319, 0.006150815963745117, 0.006090816020965576, 0.006191199779510498, 0.006106783866882324, 0.0062156801223754886, 0.00609388780593872, 0.0062145919799804685, 0.006119391918182373, 0.0061785922050476075, 0.006156544208526612, 0.006105088233947754, 0.006174528121948243, 0.006109344005584717, 0.006193183898925782, 0.0061066560745239255, 0.006224671840667725, 0.006097695827484131, 0.006216608047485352, 0.006121312141418457, 0.0062046079635620114, 0.006159327983856201, 0.0061561279296875, 0.006190847873687744, 0.006128032207489014, 0.0061764798164367675, 0.006115935802459717, 0.006166207790374756, 0.006155807971954346, 0.006209472179412842, 0.006109119892120362, 0.00622160005569458, 0.006118207931518555, 0.0062197761535644535, 0.00614412784576416, 0.006129536151885986, 0.006170944213867187, 0.006131392002105713, 0.006170623779296875, 0.00612556791305542, 0.00618009614944458, 0.006138239860534668, 0.00619484806060791, 0.006062272071838379, 0.006180895805358887, 0.006189407825469971, 0.006240255832672119, 0.0061625919342041015, 0.006120831966400147, 0.006204095840454102, 0.006133920192718506, 0.006188416004180909, 0.006123936176300049, 0.006197567939758301, 0.006079264163970947, 0.006194272041320801, 0.006112383842468262, 0.006110400199890137, 0.0061305279731750486, 0.006189919948577881, 0.006168575763702393, 0.006111328125, 0.006166431903839111, 0.006119423866271972, 0.006158336162567139, 0.006123072147369385, 0.006214431762695313, 0.006108255863189698, 0.006234687805175781, 0.00613100814819336, 0.00636575984954834, 0.006233312129974365, 0.006153120040893555, 0.006230016231536865, 0.006137856006622314, 0.006191359996795654, 0.006133600234985352, 0.0062135357856750485, 0.006100992202758789, 0.006203680038452148, 0.0060984320640563965, 0.006197472095489502, 0.006166528224945069, 0.006144000053405762, 0.0061931519508361815, 0.006121471881866455, 0.006264544010162353, 0.006107423782348633, 0.006180416107177735, 0.006088992118835449, 0.006201632022857666, 0.006080383777618408, 0.006213791847229004, 0.00607747220993042, 0.0063127679824829105, 0.006158656120300293, 0.006135424137115478, 0.006185023784637451, 0.006182496070861816, 0.006190591812133789, 0.006112160205841065, 0.006196896076202393, 0.006092864036560059, 0.006314559936523438, 0.006102240085601807, 0.00619484806060791, 0.0061307201385498045, 0.006125376224517822, 0.006155744075775147, 0.006106815814971924, 0.006171487808227539, 0.006119423866271972, 0.006193120002746582, 0.006131680011749268, 0.006219840049743653, 0.006108736038208008, 0.006213471889495849, 0.006102719783782959, 0.006210464000701904, 0.006163584232330322, 0.00611516809463501, 0.006199295997619629, 0.006129824161529541, 0.006199295997619629, 0.006118783950805664, 0.006257215976715088, 0.006084671974182129, 0.006192736148834229, 0.006077119827270508, 0.0061803522109985356, 0.0061559038162231445, 0.006159039974212646, 0.006173920154571533, 0.006129600048065185, 0.0061877760887146, 0.006195199966430664, 0.006203264236450195, 0.006131487846374512, 0.006214272022247314, 0.006096608161926269, 0.006196512222290039, 0.006355679988861084, 0.006189055919647217, 0.006178815841674804, 0.006135968208312989, 0.006229856014251709, 0.006120800018310547, 0.00617244815826416, 0.006123904228210449, 0.006210048198699952, 0.006097087860107422, 0.0061868162155151365, 0.006083648204803466, 0.006161375999450684, 0.006131103992462158, 0.006150112152099609, 0.006156352043151856, 0.006136032104492187, 0.006162144184112549, 0.006115551948547363, 0.006173056125640869, 0.006107039928436279, 0.006207583904266357, 0.0060867519378662105, 0.006184671878814697, 0.006068031787872314, 0.006295360088348388, 0.006119584083557129, 0.00614847993850708, 0.006141791820526123, 0.0061289920806884765, 0.0061571521759033205, 0.006113599777221679, 0.006148863792419434, 0.006095808029174804, 0.006224127769470214, 0.006088448047637939, 0.0061933121681213375, 0.00609878396987915, 0.006205535888671875, 0.006108352184295654, 0.00619001579284668, 0.006176544189453125, 0.006088479995727539, 0.006166783809661866, 0.006109087944030762, 0.006182464122772216, 0.006111199855804443, 0.006208255767822266, 0.006094751834869384, 0.0062642240524291995, 0.006125631809234619, 0.0062176318168640134, 0.006131807804107666, 0.0061504640579223635, 0.006203616142272949, 0.006109183788299561, 0.0061662721633911136, 0.006096896171569824, 0.006193408012390137, 0.006137856006622314, 0.0062271361351013185, 0.006090943813323975, 0.006226560115814209, 0.006303872108459473, 0.006264544010162353, 0.006214144229888916, 0.006139552116394043, 0.006186495780944825, 0.006121312141418457, 0.00617958402633667, 0.006090752124786377, 0.006188960075378418, 0.006080704212188721, 0.006214943885803223, 0.006114175796508789, 0.006219711780548096, 0.006191904067993164, 0.006173503875732422, 0.006219903945922852, 0.006217023849487305, 0.0061918082237243655, 0.006135807991027832, 0.0062259202003479, 0.006131711959838867, 0.006187039852142334, 0.0060927681922912594, 0.006244351863861084, 0.0061662721633911136, 0.00626854419708252, 0.006206399917602539, 0.006126944065093994, 0.006426976203918457, 0.006102015972137451, 0.006214655876159668, 0.006108960151672363, 0.006217951774597168, 0.006181024074554444, 0.006188896179199219, 0.006168575763702393, 0.006126719951629639, 0.006155136108398437, 0.006117728233337403, 0.006172287940979004, 0.006094304084777832, 0.006204127788543701, 0.00598195219039917, 0.006206975936889648, 0.006071040153503418, 0.00620854377746582, 0.00611846399307251, 0.006164383888244629, 0.006315616130828858, 0.006221343994140625, 0.006166463851928711, 0.006116288185119629, 0.006187007904052734, 0.006072351932525635, 0.006188767910003662, 0.006076032161712646, 0.006181503772735596, 0.006094655990600586, 0.006199776172637939, 0.006170688152313232, 0.0061641278266906735, 0.006170559883117676, 0.006146111965179443, 0.006207488059997559, 0.006184351921081543, 0.006181663990020752, 0.0060878081321716305, 0.006174528121948243, 0.006075039863586425, 0.006158559799194336, 0.006067776203155518, 0.0061651840209960935, 0.006118591785430908, 0.006128416061401367, 0.00613705587387085, 0.00610591983795166, 0.006133503913879394, 0.006097119808197021, 0.006147935867309571, 0.006082496166229248, 0.006258687973022461, 0.00610316801071167, 0.00618016004562378, 0.00609503984451294, 0.006200992107391358, 0.006077151775360107, 0.006182784080505371, 0.006563519954681397, 0.006335231781005859, 0.006338240146636963, 0.006274591922760009, 0.006287839889526367, 0.006211647987365723, 0.006373311996459961, 0.006240255832672119, 0.006195199966430664, 0.006299647808074951, 0.006245728015899658, 0.006271647930145264, 0.006175968170166015, 0.006265632152557373, 0.006261792182922363, 0.006187808036804199, 0.006232255935668945, 0.0062009282112121585, 0.006176928043365479, 0.006148640155792236, 0.006262784004211426, 0.006334464073181153, 0.006250495910644531, 0.006241951942443848, 0.006256991863250732, 0.00628326416015625, 0.0062046399116516115, 0.006303616046905517, 0.0062024321556091305, 0.006346816062927246, 0.006411744117736816, 0.006244671821594238, 0.006303135871887207, 0.006172287940979004, 0.006286367893218994, 0.006196352005004882, 0.006319200038909912, 0.006275968074798584, 0.006217984199523926, 0.006293471813201904, 0.00635148811340332, 0.006282815933227539, 0.0061807999610900876, 0.0062754878997802735, 0.006260992050170899, 0.006199488162994385, 0.006231776237487793, 0.006247392177581787, 0.006279200077056885, 0.006146431922912598, 0.006232639789581299, 0.006146175861358643, 0.006256063938140869, 0.006286079883575439, 0.006185664176940918, 0.006230559825897217, 0.006316127777099609, 0.006285727977752686, 0.00616649580001831, 0.006245791912078857, 0.006222432136535645, 0.00620908784866333, 0.006230463981628418, 0.006199295997619629, 0.006209792137145996, 0.006148863792419434, 0.006343008041381836, 0.006185152053833008, 0.006244319915771485, 0.006283455848693847, 0.006205760002136231, 0.006354623794555664, 0.006203936100006103, 0.006686495780944824, 0.006151487827301025, 0.0062707200050354005, 0.006242591857910157, 0.006175680160522461, 0.00621062421798706, 0.006219744205474853, 0.00625273609161377, 0.006111072063446045, 0.006349664211273193, 0.006262815952301025, 0.00631820821762085, 0.006315936088562011, 0.006198751926422119, 0.006341119766235351, 0.006180704116821289, 0.006717152118682861, 0.006281888008117676, 0.006287168025970459, 0.006315711975097656, 0.006211584091186524, 0.006320032119750976, 0.00680998420715332, 0.006266880035400391, 0.006314112186431885, 0.00625161600112915, 0.006359456062316894, 0.0062631678581237795, 0.006272768020629883, 0.006321695804595948, 0.006329247951507569, 0.006326015949249268, 0.0062032961845397945, 0.006296768188476563, 0.006213791847229004, 0.00630406379699707, 0.006287871837615967, 0.006217728137969971, 0.006254591941833496, 0.006205632209777832, 0.006284863948822022, 0.006160704135894776, 0.0062871999740600586, 0.006266335964202881, 0.0062921919822692875, 0.006342559814453125, 0.006551136016845703, 0.006318496227264404, 0.006192416191101074, 0.006288095951080323, 0.006270400047302246, 0.006183616161346436, 0.00624832010269165, 0.006232063770294189, 0.006288479804992675, 0.006154304027557373, 0.0062679038047790524, 0.006217343807220459, 0.006244351863861084, 0.0062568001747131344, 0.006193215847015381, 0.006291456222534179, 0.006311935901641846, 0.006284543991088867, 0.006222559928894043, 0.006379551887512207, 0.006257919788360595, 0.0062000641822814945, 0.006253888130187988, 0.006189760208129883, 0.006302752017974853, 0.006072319984436035, 0.006305600166320801, 0.006354176044464112, 0.006210847854614258, 0.006264544010162353, 0.006193088054656982, 0.006348991870880127, 0.006192255973815918, 0.006265312194824219, 0.006220032215118408, 0.0063504958152771, 0.006315648078918457, 0.006419007778167725, 0.006330495834350586, 0.006233727931976319, 0.006428800106048584, 0.006207744121551514, 0.006150144100189209, 0.006260096073150635, 0.0061421761512756344, 0.006201759815216064, 0.0061194877624511715, 0.006418176174163818, 0.00609939193725586, 0.00621449613571167, 0.006185887813568115, 0.006146048069000244, 0.006182911872863769, 0.006107135772705078, 0.006168416023254394, 0.006096479892730713, 0.006149824142456055, 0.006103648185729981, 0.00618723201751709, 0.006082496166229248, 0.006205088138580323, 0.006689536094665527, 0.006420256137847901, 0.006267072200775147, 0.006102784156799317, 0.006166528224945069, 0.006072415828704834, 0.00617571210861206, 0.00605679988861084, 0.006178016185760498, 0.006082687854766846, 0.0061550078392028805, 0.006111231803894043, 0.006123104095458984, 0.0061324481964111326, 0.006176447868347168, 0.006261087894439697, 0.006110879898071289, 0.006166528224945069, 0.006123519897460937, 0.006268928050994873, 0.006063392162322998, 0.0061675839424133305, 0.006059967994689942, 0.006246143817901611, 0.0061147198677062985, 0.006091104030609131, 0.0061337919235229495, 0.006010752201080322, 0.006124800205230713, 0.006286431789398193, 0.006368256092071533, 0.006089375972747803, 0.0061931519508361815, 0.006231776237487793, 0.006203680038452148, 0.006143392086029053, 0.006103456020355225, 0.006164768218994141, 0.006110496044158935, 0.006303616046905517, 0.006189824104309082, 0.006205440044403076, 0.006068287849426269, 0.006189184188842773, 0.0060659837722778324, 0.00618064022064209, 0.0061875200271606446, 0.006358751773834229, 0.006431007862091064, 0.006843999862670898, 0.00625871992111206, 0.0065821118354797365, 0.0071478400230407715, 0.006469567775726318, 0.006234432220458984, 0.006172351837158203, 0.0061931519508361815, 0.006137856006622314, 0.00618720006942749, 0.006184768199920654, 0.006209504127502441, 0.006232096195220947, 0.006479872226715088, 0.006178815841674804, 0.006123519897460937, 0.006371327877044678, 0.006107135772705078, 0.006187007904052734, 0.006090079784393311, 0.006215936183929444, 0.006120192050933838, 0.006217567920684814, 0.006267871856689453, 0.006204351902008056, 0.006539167881011963, 0.006270976066589356, 0.006508543968200684, 0.006148352146148682, 0.006258431911468506, 0.006188672065734863, 0.00620147180557251, 0.00627888011932373, 0.006156832218170166, 0.006233151912689209, 0.006136064052581787, 0.006220479965209961, 0.006156576156616211, 0.006211296081542968, 0.00611737585067749, 0.006194623947143554, 0.00602291202545166, 0.006112927913665771, 0.006146399974822998, 0.0061151041984558105, 0.006158559799194336, 0.0060928001403808595, 0.006301504135131836, 0.006090943813323975, 0.0061699838638305665, 0.006066815853118896, 0.006174719810485839, 0.006055935859680176, 0.006187007904052734, 0.00606447982788086, 0.006151840209960937, 0.006158336162567139, 0.006129280090332031, 0.006156703948974609, 0.006176447868347168, 0.006171072006225586, 0.006100992202758789, 0.0061847038269042965, 0.006065343856811524, 0.006173952102661133, 0.006067071914672852, 0.006179840087890625, 0.006106912136077881, 0.006146048069000244, 0.006094848155975342, 0.006129151821136475, 0.006129824161529541, 0.006090176105499267, 0.0061281280517578125, 0.006105343818664551, 0.006125728130340576, 0.00606822395324707, 0.0066130561828613285, 0.006204991817474365, 0.006161888122558593, 0.006059135913848877, 0.006186783790588379, 0.0061289920806884765, 0.0061200962066650395, 0.006232063770294189, 0.006107135772705078, 0.006258687973022461, 0.006098336219787598, 0.006226784229278565, 0.006090496063232422, 0.006213632106781006, 0.0060878400802612306, 0.00619212818145752, 0.006091968059539795, 0.006199967861175537, 0.006163839817047119, 0.006341248035430908, 0.0061784000396728515, 0.006109600067138672, 0.0062230081558227535, 0.006081376075744629, 0.0062176637649536135, 0.006060256004333496, 0.006157567977905273, 0.0059502081871032715, 0.00615993595123291, 0.006111743927001953, 0.006131648063659668, 0.00614799976348877, 0.006110559940338134, 0.006169343948364258, 0.006100736141204834, 0.006338304042816162, 0.006103551864624023, 0.006189216136932373, 0.006075551986694336, 0.0061651840209960935, 0.00609062385559082, 0.0061809921264648435, 0.00607747220993042, 0.006169568061828614, 0.006133632183074951, 0.006127488136291504, 0.006156415939331055, 0.006125696182250976, 0.006157919883728027, 0.006094751834869384, 0.006148128032684326, 0.006155839920043945, 0.006163360118865967, 0.006143583774566651, 0.006171103954315186, 0.006072159767150879, 0.0064635839462280275, 0.006185311794281006, 0.006172063827514648, 0.0062120318412780765, 0.006110879898071289, 0.0062035517692565914, 0.00612886381149292, 0.006243103981018066, 0.006117055892944336, 0.0061848959922790525, 0.0060993280410766605, 0.006176767826080322, 0.00611737585067749, 0.006160031795501709, 0.00614844799041748, 0.00613702392578125, 0.006187839984893799, 0.006129312038421631, 0.006170976161956787, 0.0060908799171447755, 0.0061765117645263675, 0.006088223934173584, 0.006173408031463623, 0.006073728084564209, 0.006387968063354493, 0.00612172794342041, 0.006182079792022705, 0.006274943828582764, 0.0061244797706604005, 0.006159808158874512, 0.006105216026306153, 0.006263552188873291, 0.0060795841217041015, 0.0061855678558349605, 0.005990015983581543, 0.006193535804748535, 0.006113279819488526, 0.006154240131378174, 0.006146048069000244, 0.006174719810485839, 0.0061561598777771, 0.0060908479690551754, 0.006169727802276611, 0.006086880207061767, 0.006199359893798828, 0.006068128108978272, 0.006173408031463623, 0.006139904022216797, 0.006191008090972901, 0.006353280067443848, 0.006235712051391602, 0.0062527041435241695, 0.006120672225952148, 0.006187263965606689, 0.006099487781524658, 0.006238207817077636, 0.006153215885162353, 0.006185887813568115, 0.006143167972564697, 0.006203423976898193, 0.006280064105987549, 0.0061437439918518065, 0.006154496192932129, 0.006105088233947754, 0.006163936138153076, 0.006131264209747314, 0.006179808139801025, 0.006122879981994629, 0.0061896958351135255, 0.006090752124786377, 0.0061701440811157224, 0.006070752143859864, 0.006176352024078369, 0.006101408004760742, 0.006425792217254638, 0.0063376641273498536, 0.0060815677642822264, 0.006170559883117676, 0.006099679946899414, 0.006191103935241699, 0.006090271949768067, 0.006195680141448974, 0.0061040959358215335, 0.00618390417098999, 0.006114560127258301, 0.00613427209854126, 0.006146304130554199, 0.006114336013793945, 0.006247392177581787, 0.006116928100585938, 0.0061550397872924805, 0.0061164479255676266, 0.006193600177764893, 0.006071616172790528, 0.006167359828948974, 0.006082592010498047, 0.006178751945495605, 0.005960800170898437, 0.006173439979553223, 0.00612502384185791, 0.006124320030212402, 0.006139904022216797, 0.00616755199432373, 0.006232160091400147, 0.006148064136505127, 0.006178880214691162, 0.00607913589477539, 0.006183135986328125, 0.00608460807800293, 0.006269152164459228, 0.0060917119979858395, 0.006187359809875489, 0.006127359867095947, 0.006103519916534424, 0.006211872100830078, 0.006113279819488526, 0.0061519680023193355, 0.006123744010925293, 0.0061708478927612305, 0.006135168075561523, 0.006221568107604981, 0.006283552169799805, 0.006261119842529297, 0.0061374402046203615, 0.006165088176727295, 0.006596255779266358, 0.006148255825042724, 0.0062156801223754886, 0.006106336116790772, 0.006185919761657715, 0.006072159767150879, 0.0061972479820251464, 0.00611078405380249, 0.006207935810089112, 0.0061868162155151365, 0.006139711856842041, 0.0062073922157287595, 0.00617465591430664, 0.006171167850494385, 0.006221248149871826, 0.006192895889282226, 0.006074656009674073, 0.0061998400688171385, 0.006096320152282715, 0.0061866559982299806, 0.006138656139373779, 0.006159840106964111, 0.006186975955963135, 0.006118080139160156, 0.0062046079635620114, 0.006118207931518555, 0.0061983041763305664, 0.006108128070831299, 0.006189343929290772, 0.0061356801986694335, 0.006256383895874023, 0.006051680088043213, 0.006168831825256347, 0.006290783882141113, 0.006113823890686035, 0.006031551837921143, 0.0060845761299133305, 0.006146016120910644, 0.006089727878570556, 0.00616480016708374, 0.006097536087036132, 0.00617299222946167, 0.006074048042297363, 0.006172063827514648, 0.00606441593170166, 0.006302015781402588, 0.006107391834259034, 0.006141695976257324, 0.006143712043762207, 0.00617091178894043, 0.006176767826080322, 0.006113279819488526, 0.006180895805358887, 0.006103072166442871, 0.006178751945495605, 0.00609497594833374, 0.007206111907958984, 0.006517471790313721, 0.006143807888031006, 0.006250463962554932, 0.006157599925994873, 0.0062350077629089356, 0.006146080017089843, 0.006153567790985108, 0.006261631965637207, 0.0061265921592712404, 0.006304863929748535, 0.006113279819488526, 0.006237855911254883, 0.006086656093597412, 0.006170048236846924, 0.006059711933135987, 0.00615718412399292, 0.006144192218780518, 0.006252575874328614, 0.006130784034729004, 0.006095359802246094, 0.006111072063446045, 0.0060991039276123045, 0.006112448215484619, 0.006065343856811524, 0.006154399871826172, 0.006076384067535401, 0.006211264133453369, 0.006080512046813965, 0.006166304111480713, 0.006258592128753662, 0.006342527866363526, 0.006160831928253173, 0.006140960216522217, 0.006146304130554199, 0.006195775985717774, 0.006193376064300537, 0.00609609603881836, 0.0061972799301147465, 0.006068927764892578, 0.006184959888458252, 0.006060192108154297, 0.006086368083953858, 0.0061018881797790525, 0.006192607879638672, 0.006165023803710938, 0.006123104095458984, 0.006154496192932129, 0.006146207809448242, 0.0061699519157409665, 0.006165152072906494, 0.0061842241287231444, 0.006288095951080323, 0.0061943359375, 0.006077375888824463, 0.006188320159912109, 0.0061027522087097165, 0.006154816150665283, 0.006172255992889404, 0.0061173119544982914, 0.006173503875732422, 0.0061064958572387695, 0.00617852783203125, 0.006089983940124511, 0.006190080165863037, 0.006077280044555664, 0.006174528121948243, 0.00605785608291626, 0.006174848079681396, 0.006068287849426269, 0.00616428804397583, 0.006087007999420166, 0.006149919986724853, 0.0061521921157836916, 0.006090752124786377, 0.006151840209960937, 0.006093152046203613, 0.006139488220214844, 0.0060829758644104, 0.006145567893981934, 0.006088319778442383, 0.006277984142303467, 0.006068128108978272, 0.006285344123840332, 0.006094912052154541, 0.006166528224945069, 0.006110208034515381, 0.006140927791595459, 0.006203392028808594, 0.006203392028808594, 0.0062008957862854, 0.0060830078125, 0.006169663906097412, 0.006107679843902588, 0.006187136173248291, 0.006308127880096435, 0.006217887878417969, 0.006126751899719238, 0.006160704135894776, 0.006162047863006592, 0.006337503910064698, 0.006180543899536133, 0.006093952178955078, 0.006188064098358154, 0.006094783782958984, 0.006118656158447266, 0.006049471855163574, 0.006165535926818848, 0.006074687957763672, 0.006185632228851318, 0.006135647773742676, 0.006133920192718506, 0.006150144100189209, 0.0061211838722229, 0.006180672168731689, 0.006129983901977539, 0.006166687965393066, 0.006105088233947754, 0.006186304092407227, 0.006081215858459473, 0.006178624153137207, 0.00608076810836792, 0.006188992023468017, 0.006154240131378174, 0.006180384159088135, 0.006191584110260009, 0.006241375923156738, 0.006236991882324219, 0.006129983901977539, 0.006217504024505615, 0.006131487846374512, 0.0063654079437255855, 0.00620963191986084, 0.0062195839881896975, 0.006183008193969727, 0.006141952037811279, 0.006253632068634033, 0.006185919761657715, 0.00619923210144043, 0.006109248161315918, 0.006244448184967041, 0.006119264125823975, 0.006209216117858887, 0.0061075201034545895, 0.006219295978546142, 0.006213856220245361, 0.0061357121467590335, 0.006222015857696534, 0.006146431922912598, 0.006183712005615234, 0.006132863998413086, 0.006207359790802002, 0.006097087860107422, 0.006196544170379638, 0.006095359802246094, 0.006189055919647217, 0.006150144100189209, 0.006182623863220215, 0.006248000144958496, 0.006150496006011963, 0.006195583820343018, 0.00630790376663208, 0.006225855827331543, 0.006108992099761963, 0.006248960018157959, 0.0061047677993774415, 0.006188735961914062, 0.006148223876953125]",tokens/s,161.55954494263096,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1851.834368,2552.102912,0.0,2149.5808,2147.522048,s,1,9.436158203125,9.436158203125,0.0,9.436158203125,9.436158203125,9.436158203125,9.436158203125,[9.436158203125],,kWh,4.56581563958404e-05,5.028804683037981e-06,1.457306721399515e-05,6.526002829287353e-05,,MB,1958.13376,2621.308928,0.0,2204.106752,2190.263808,s,10,0.6549226913452149,0.06549226913452148,0.00024039975726003688,0.06561625671386719,0.06572649154663086,0.06574690895080566,0.0657632428741455,"[0.06568019104003907, 0.06576732635498046, 0.06568857574462891, 0.06528060913085937, 0.06559078216552734, 0.06572195434570313, 0.06564173126220703, 0.06510848236083984, 0.06517196655273437, 0.0652710723876953]",tokens/s,3908.858303171243,kWh,1.9705799180368815e-06,2.173194313264438e-07,1.2991509274630619e-06,3.4870502768263874e-06,tokens/kWh,73414484.93050955,MB,1968.656384,2621.308928,0.0,2206.203904,2190.266368,s,10,16.235507568359374,1.6235507568359373,0.007715403638741234,1.6270310668945314,1.63019921875,1.6332171630859376,1.6356315185546875,"[1.6272069091796875, 1.627784423828125, 1.636235107421875, 1.629266845703125, 1.613973876953125, 1.629528564453125, 1.6168304443359376, 1.6131666259765625, 1.6146595458984374, 1.626855224609375]",tokens/s,38.803837659364454,kWh,4.7701132796129564e-05,5.2610999078392674e-06,2.6002982025336977e-05,7.896521472930582e-05,tokens/kWh,797819.6502848138,,s,630,16.233455354690573,0.02576738945188976,0.0003477135149423601,0.02569625663757324,0.026029622840881346,0.02623759145736694,0.027274815139770515,"[0.025931552886962892, 0.025596128463745118, 0.025550527572631834, 0.025563455581665038, 0.025533695220947266, 0.025504512786865233, 0.025591072082519532, 0.025483999252319336, 0.025614336013793947, 0.02558083152770996, 0.02561916732788086, 0.02574083137512207, 0.025788543701171875, 0.025657567977905273, 0.029083200454711914, 0.026095392227172852, 0.02589571189880371, 0.025863775253295897, 0.02579088020324707, 0.025761760711669923, 0.025726463317871092, 0.02573161506652832, 0.025722240447998045, 0.025784320831298828, 0.025753759384155275, 0.025633119583129884, 0.025628639221191407, 0.02601795196533203, 0.025911296844482422, 0.025808895111083984, 0.025861183166503907, 0.02580371284484863, 0.02571673583984375, 0.025653247833251954, 0.025782272338867186, 0.025740959167480468, 0.02577427291870117, 0.025700511932373046, 0.02570035171508789, 0.025700576782226564, 0.025749151229858398, 0.025579328536987304, 0.02566115188598633, 0.02564678382873535, 0.02564361572265625, 0.027572576522827148, 0.02608527946472168, 0.025823295593261717, 0.025746688842773438, 0.025823999404907226, 0.02576806449890137, 0.025855871200561525, 0.025856000900268555, 0.02603932762145996, 0.025866655349731444, 0.02578665542602539, 0.025706783294677734, 0.025808895111083984, 0.025642847061157225, 0.025703807830810547, 0.02576464080810547, 0.025628671646118165, 0.025815040588378906, 0.026770879745483398, 0.02604444885253906, 0.026306688308715822, 0.025996063232421877, 0.025861791610717774, 0.02570070457458496, 0.02570444869995117, 0.025672927856445312, 0.025778976440429688, 0.025815040588378906, 0.025804479598999022, 0.025923904418945314, 0.025812992095947264, 0.025853952407836913, 0.025730432510375975, 0.025885311126708985, 0.0258571834564209, 0.025897823333740234, 0.025855968475341797, 0.02583545684814453, 0.02573321533203125, 0.02568806457519531, 0.025775936126708983, 0.026097856521606445, 0.025997312545776367, 0.025948160171508788, 0.026084896087646484, 0.02611862373352051, 0.025973920822143556, 0.025766752243041993, 0.025755584716796873, 0.02568351936340332, 0.025881088256835938, 0.025491455078125, 0.025912736892700194, 0.02568662452697754, 0.025825439453125, 0.025638240814208985, 0.025725439071655275, 0.025951391220092775, 0.025603935241699217, 0.025625600814819335, 0.025741247177124022, 0.025821184158325194, 0.025704511642456053, 0.025792512893676758, 0.025634815216064453, 0.025802751541137696, 0.02571878433227539, 0.02575939178466797, 0.0256658878326416, 0.025747007369995117, 0.025853727340698244, 0.02583228874206543, 0.02574652862548828, 0.025686752319335936, 0.025669631958007814, 0.025843711853027345, 0.02655436706542969, 0.02554275131225586, 0.025560800552368163, 0.025520320892333984, 0.026306560516357422, 0.02637606430053711, 0.02643040084838867, 0.025956352233886718, 0.026763263702392577, 0.025935871124267578, 0.025929407119750978, 0.025786495208740233, 0.025661632537841796, 0.025509632110595704, 0.02567398452758789, 0.025586784362792967, 0.025613216400146483, 0.025679840087890624, 0.025734975814819337, 0.026089696884155272, 0.026292224884033204, 0.02630451202392578, 0.026185728073120116, 0.026032127380371094, 0.025779296875, 0.026300479888916015, 0.025791328430175783, 0.02575564765930176, 0.025853952407836913, 0.02573311996459961, 0.026374303817749023, 0.028811103820800783, 0.025952255249023438, 0.025888832092285156, 0.025812543869018555, 0.025889152526855468, 0.025707551956176758, 0.025903072357177735, 0.025975807189941406, 0.026003456115722655, 0.025976831436157227, 0.025855520248413085, 0.026027679443359375, 0.025789247512817384, 0.025837791442871093, 0.025958175659179687, 0.025780223846435548, 0.025859807968139647, 0.02589673614501953, 0.026013856887817384, 0.025786720275878906, 0.025767328262329102, 0.026356319427490234, 0.025859968185424805, 0.025931903839111328, 0.02587238311767578, 0.025863264083862306, 0.025781152725219726, 0.025867456436157225, 0.025861183166503907, 0.026592735290527345, 0.025854240417480467, 0.02593951988220215, 0.025923967361450195, 0.025788415908813478, 0.025736352920532227, 0.02554934310913086, 0.02563520050048828, 0.02598700714111328, 0.025615840911865233, 0.02558172798156738, 0.025778688430786133, 0.025654624938964844, 0.025756383895874025, 0.025618656158447266, 0.025618431091308593, 0.02549350357055664, 0.025595903396606445, 0.025601696014404297, 0.025687744140625, 0.025590272903442384, 0.025542463302612305, 0.02546019172668457, 0.025500543594360353, 0.025421024322509766, 0.025428768157958984, 0.025554943084716796, 0.025407424926757814, 0.025542207717895508, 0.026290687561035156, 0.027161888122558594, 0.025938655853271483, 0.025853952407836913, 0.025657344818115234, 0.027897472381591796, 0.025727008819580077, 0.025817440032958983, 0.025589120864868163, 0.025585792541503907, 0.02582691192626953, 0.02611497688293457, 0.02597599983215332, 0.025979711532592775, 0.026070623397827147, 0.02595471954345703, 0.02594620704650879, 0.025950111389160157, 0.0259150390625, 0.025859519958496092, 0.027164896011352538, 0.025803071975708008, 0.02588502311706543, 0.025776031494140626, 0.025896160125732422, 0.02584623908996582, 0.026010047912597655, 0.02586332893371582, 0.026249759674072264, 0.02666352081298828, 0.025896671295166016, 0.02588640022277832, 0.025909568786621092, 0.026007551193237305, 0.02580233573913574, 0.025846176147460938, 0.025701536178588866, 0.025991455078125, 0.02554105567932129, 0.02549519920349121, 0.025573247909545897, 0.025706464767456055, 0.02592563247680664, 0.02590675163269043, 0.02550828742980957, 0.025612096786499023, 0.02558176040649414, 0.02568502426147461, 0.02569664001464844, 0.025614944458007813, 0.025503744125366212, 0.025548799514770508, 0.025601280212402343, 0.02552022361755371, 0.02571126365661621, 0.02556438446044922, 0.02575027275085449, 0.026007583618164062, 0.025795648574829102, 0.02557423973083496, 0.02573539161682129, 0.02561542320251465, 0.025961280822753906, 0.025561088562011718, 0.025877824783325197, 0.025710464477539063, 0.025611072540283202, 0.025535808563232423, 0.025519840240478514, 0.025488351821899412, 0.025546112060546876, 0.02558016014099121, 0.025513120651245117, 0.025535327911376953, 0.02549760055541992, 0.025665536880493164, 0.025482688903808594, 0.025686592102050782, 0.025744991302490236, 0.02556694412231445, 0.025608896255493164, 0.025491455078125, 0.025585664749145507, 0.025393152236938478, 0.025397247314453125, 0.025447807312011718, 0.025834112167358397, 0.025511936187744142, 0.025849855422973633, 0.02552422332763672, 0.025584800720214844, 0.025455455780029296, 0.02546409606933594, 0.02541436767578125, 0.02547420883178711, 0.02557219123840332, 0.02548940849304199, 0.02553446388244629, 0.025664575576782228, 0.02570745658874512, 0.02570854377746582, 0.02570649528503418, 0.025599008560180665, 0.025617376327514648, 0.02558473587036133, 0.02603232002258301, 0.02570675277709961, 0.026072416305541992, 0.025876895904541015, 0.02590924835205078, 0.02597478485107422, 0.026102943420410155, 0.02588310432434082, 0.025859519958496092, 0.026356672286987303, 0.02604025650024414, 0.025935935974121093, 0.025956352233886718, 0.026141727447509765, 0.02606096076965332, 0.026108224868774413, 0.025983488082885742, 0.02604377555847168, 0.025893152236938475, 0.025980960845947264, 0.02598240089416504, 0.026222719192504882, 0.02597964859008789, 0.026205663681030274, 0.025979232788085938, 0.02609328079223633, 0.025975263595581055, 0.02591062355041504, 0.025813663482666015, 0.02590889549255371, 0.025913055419921876, 0.025936159133911132, 0.025919839859008788, 0.02591744041442871, 0.025871967315673827, 0.026022335052490235, 0.025972448348999023, 0.02594790458679199, 0.0258253116607666, 0.025723039627075197, 0.025690431594848632, 0.025624767303466797, 0.025629888534545897, 0.02562086486816406, 0.02565350341796875, 0.025605632781982423, 0.025673791885375975, 0.025694656372070312, 0.02587004852294922, 0.0256628475189209, 0.02560051155090332, 0.025677919387817383, 0.025727296829223634, 0.025655296325683592, 0.02560755157470703, 0.02562931251525879, 0.026037824630737304, 0.02562451171875, 0.025522207260131834, 0.02553286361694336, 0.025597984313964844, 0.025753599166870117, 0.025595264434814455, 0.025849855422973633, 0.02550783920288086, 0.02575103950500488, 0.02550160026550293, 0.025598560333251953, 0.02561359977722168, 0.02564374351501465, 0.025531871795654297, 0.02599990463256836, 0.025619680404663087, 0.025500511169433592, 0.025572832107543946, 0.025496255874633788, 0.025530143737792967, 0.025569280624389647, 0.02556723213195801, 0.025475072860717773, 0.025753856658935547, 0.02559564781188965, 0.025636863708496094, 0.02553446388244629, 0.026359455108642578, 0.025786239624023436, 0.02565782356262207, 0.025593439102172853, 0.025555360794067384, 0.026119903564453126, 0.025526496887207033, 0.02559187126159668, 0.02547302436828613, 0.025560895919799806, 0.025530559539794922, 0.0254967041015625, 0.025484159469604493, 0.025689983367919923, 0.025770111083984373, 0.025772031784057618, 0.025602048873901367, 0.025669631958007814, 0.02659507179260254, 0.025970016479492188, 0.02572995185852051, 0.025622432708740234, 0.025876575469970704, 0.02555904006958008, 0.02571059226989746, 0.025573375701904297, 0.025792512893676758, 0.025563135147094726, 0.025593599319458007, 0.02547315216064453, 0.025597408294677736, 0.02556300735473633, 0.02559699249267578, 0.02556284713745117, 0.025579519271850586, 0.025636480331420897, 0.02566592025756836, 0.02602934455871582, 0.02543894386291504, 0.025430015563964844, 0.02569625663757324, 0.025686016082763673, 0.02596659278869629, 0.02573311996459961, 0.025609695434570312, 0.026124832153320312, 0.02571059226989746, 0.025687519073486327, 0.025799200057983397, 0.025694208145141603, 0.02595430374145508, 0.025659296035766603, 0.025701696395874024, 0.025768735885620116, 0.025579200744628907, 0.025843135833740233, 0.02561027145385742, 0.025570016860961914, 0.025575103759765624, 0.025577375411987305, 0.02564137649536133, 0.025675136566162108, 0.025701343536376955, 0.025560543060302733, 0.025579839706420898, 0.02551807975769043, 0.0260927677154541, 0.02549225616455078, 0.025462783813476563, 0.025652544021606445, 0.02547372817993164, 0.025513887405395508, 0.025483360290527345, 0.025445856094360352, 0.0254420166015625, 0.02539174461364746, 0.02557369613647461, 0.0261712646484375, 0.025450496673583983, 0.02553241539001465, 0.02547711944580078, 0.025578752517700195, 0.025518848419189454, 0.025397247314453125, 0.02555392074584961, 0.02544112014770508, 0.025507999420166017, 0.025431520462036134, 0.025455135345458985, 0.025419776916503906, 0.025483232498168945, 0.02546816062927246, 0.025430816650390625, 0.02552992057800293, 0.02556563186645508, 0.02547302436828613, 0.02551807975769043, 0.025515104293823244, 0.025438175201416016, 0.025559999465942382, 0.02545270347595215, 0.025507072448730468, 0.0254814395904541, 0.025528799057006835, 0.026205759048461914, 0.02612444877624512, 0.02569625663757324, 0.025645536422729494, 0.025614048004150392, 0.025560543060302733, 0.025471872329711914, 0.02553254318237305, 0.025504831314086915, 0.025762752532958986, 0.02568191909790039, 0.02573107147216797, 0.025582624435424806, 0.025479488372802735, 0.025544416427612304, 0.025535423278808592, 0.025798463821411134, 0.02561862373352051, 0.025556991577148438, 0.025609792709350584, 0.025573183059692382, 0.025594335556030273, 0.025653600692749023, 0.025478431701660156, 0.025612800598144532, 0.025561344146728514, 0.025593439102172853, 0.025718591690063478, 0.02548374366760254, 0.025569183349609375, 0.025577472686767577, 0.0255467529296875, 0.025610240936279297, 0.025647104263305662, 0.025679391860961916, 0.025731456756591796, 0.025699583053588868, 0.025774944305419923, 0.025882623672485353, 0.025806848526000976, 0.025653024673461915, 0.02569411277770996, 0.025690431594848632, 0.02557542419433594, 0.025560352325439455, 0.02555673599243164, 0.025668575286865235, 0.025540128707885742, 0.025636768341064452, 0.02577465629577637, 0.025609920501708985, 0.02562819290161133, 0.02561724853515625, 0.02555897521972656, 0.025782175064086914, 0.025581567764282227, 0.025593631744384764, 0.02553887939453125, 0.025591808319091795, 0.025701536178588866, 0.025520992279052735, 0.025573375701904297, 0.025482303619384767, 0.025490367889404297, 0.025946271896362304, 0.02571468734741211, 0.027319711685180666, 0.028015199661254882, 0.025652799606323242, 0.02553081512451172, 0.026714111328125, 0.02557651138305664, 0.025448415756225588, 0.025488351821899412, 0.02574950408935547, 0.025647104263305662, 0.02554787254333496, 0.025662303924560547, 0.025750751495361327, 0.02564796829223633, 0.025479167938232423, 0.026670368194580078, 0.02587926483154297, 0.025753599166870117, 0.02615910339355469, 0.025845760345458983, 0.026007551193237305, 0.027729248046875, 0.025671392440795898, 0.02556175994873047, 0.025526559829711915, 0.025683967590332032, 0.025645055770874024, 0.02550809669494629, 0.02556492805480957, 0.025462783813476563, 0.025611520767211914, 0.02558847999572754, 0.025484384536743163, 0.025604543685913087, 0.026329055786132812, 0.02573481559753418, 0.025626720428466795, 0.025639680862426757, 0.025790464401245116, 0.025746816635131835, 0.025536191940307616, 0.025473983764648437, 0.025491455078125, 0.025630720138549806, 0.025671680450439452, 0.02558758354187012, 0.025622016906738283, 0.025764480590820312, 0.02574131202697754, 0.025597951889038087, 0.02569011116027832, 0.025652288436889648, 0.02579961585998535, 0.02553446388244629, 0.025702112197875975, 0.025561376571655272, 0.025599231719970705, 0.02684499168395996, 0.025850208282470703, 0.025763423919677734, 0.025809471130371093]",tokens/s,38.80874319329469,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 128.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 254780 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 16.07 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4766.3104,6194.855936,0.0,5792.333824,5786.358272,s,1,11.6089970703125,11.6089970703125,0.0,11.6089970703125,11.6089970703125,11.6089970703125,11.6089970703125,[11.6089970703125],,kWh,0.00012417271836666257,1.3685218407782616e-05,3.840864183800308e-05,0.00017626657861244827,,MB,1790.844928,6396.182528,0.0,5978.980352,5957.943808,s,10,2.2974053802490237,0.22974053802490238,0.00046682882541032903,0.2299390869140625,0.23016837463378906,0.23025994567871091,0.23033320251464842,"[0.22882284545898438, 0.22905641174316407, 0.230018310546875, 0.22994345092773438, 0.22999363708496093, 0.22972679138183594, 0.22993472290039063, 0.2303515167236328, 0.2301480255126953, 0.22940966796875]",tokens/s,1114.3005157072073,kWh,6.730694151894161e-06,7.422628908922506e-07,4.474902569818177e-06,1.1947859612604588e-05,tokens/kWh,21426431.871523555,MB,1806.004224,6408.76544,0.0,5991.563264,5959.126016,s,10,24.418260253906244,2.4418260253906245,0.009945550451675902,2.4399852294921875,2.4551181884765625,2.4568189819335937,2.4581796166992187,"[2.43397900390625, 2.42788720703125, 2.432810546875, 2.434169677734375, 2.443701416015625, 2.452275146484375, 2.458519775390625, 2.43626904296875, 2.443908203125, 2.454740234375]",tokens/s,25.800363885433544,kWh,7.132620543893882e-05,7.867236102126488e-06,4.725566659238166e-05,0.000126449108133447,tokens/kWh,498224.1546022886,,s,630,24.41583216094969,0.038755289144364616,0.0005852936471110883,0.03863489723205567,0.039194652175903316,0.03948933849334717,0.04043306468963623,"[0.039215137481689456, 0.0385984001159668, 0.038416606903076175, 0.03877273559570313, 0.03851200103759766, 0.03832271957397461, 0.03836259078979492, 0.038595390319824216, 0.03864556884765625, 0.03856700897216797, 0.038624160766601565, 0.03879935836791992, 0.040323070526123043, 0.03837513732910156, 0.03878121566772461, 0.03860585784912109, 0.03832112121582031, 0.03899596786499023, 0.038329566955566406, 0.0384417610168457, 0.03863961410522461, 0.038537216186523435, 0.03858124923706055, 0.038487041473388675, 0.03849353790283203, 0.03847628784179687, 0.038543521881103514, 0.0384450569152832, 0.038366401672363284, 0.03839059066772461, 0.03837318420410156, 0.03833379364013672, 0.03834473419189453, 0.038324798583984375, 0.03859072113037109, 0.03969580841064453, 0.038860897064208984, 0.03880550384521484, 0.03857404708862305, 0.038412799835205076, 0.03890927886962891, 0.03831788635253906, 0.0384356803894043, 0.03862527847290039, 0.03838560104370117, 0.03834272003173828, 0.03841619110107422, 0.03884460830688476, 0.03867644882202149, 0.0386396484375, 0.03856700897216797, 0.03851766586303711, 0.03877811050415039, 0.03840428924560547, 0.038460063934326175, 0.03896652984619141, 0.038588863372802734, 0.03863369750976563, 0.03893564987182617, 0.03890678405761719, 0.0388587532043457, 0.03892838287353516, 0.03871744155883789, 0.03985203170776367, 0.03855769729614258, 0.038830078125, 0.03847139358520508, 0.03864604949951172, 0.03869852828979492, 0.03854140853881836, 0.03839014434814453, 0.03854336166381836, 0.03863347244262695, 0.03846675109863281, 0.03842540740966797, 0.03846089553833008, 0.03854556655883789, 0.03839132690429688, 0.0383045768737793, 0.03839798355102539, 0.03831398391723633, 0.038458847045898435, 0.03836572647094726, 0.03850572967529297, 0.03827552032470703, 0.03853548812866211, 0.0386902084350586, 0.038903392791748044, 0.039201793670654295, 0.040224704742431644, 0.03868678283691406, 0.038645503997802734, 0.038500545501708984, 0.03849184036254883, 0.038496479034423825, 0.038686943054199216, 0.038477855682373045, 0.03841388702392578, 0.03864393615722656, 0.03830303955078125, 0.03824496078491211, 0.03864188766479492, 0.03859360122680664, 0.03839276885986328, 0.03849334335327149, 0.038424766540527344, 0.038510433197021486, 0.03842086410522461, 0.038572479248046875, 0.038526432037353515, 0.038316574096679684, 0.0383260498046875, 0.03838313674926758, 0.038382270812988284, 0.03835289764404297, 0.03839318466186523, 0.03831798553466797, 0.03840390396118164, 0.03842758560180664, 0.03837747192382813, 0.038370304107666016, 0.03834774398803711, 0.03859868621826172, 0.038286464691162106, 0.038327167510986325, 0.03824822235107422, 0.03911382293701172, 0.038538177490234374, 0.038497344970703125, 0.03848684692382812, 0.03844723129272461, 0.038487968444824217, 0.03842364883422852, 0.03837180709838867, 0.03836368179321289, 0.038561790466308594, 0.03840204620361328, 0.03845119857788086, 0.03835084915161133, 0.03913318252563477, 0.039790592193603515, 0.03859251022338867, 0.03868175888061524, 0.03857084655761719, 0.03845939254760742, 0.03828960037231445, 0.03840716934204102, 0.03849708938598633, 0.038400001525878906, 0.03839798355102539, 0.038277088165283205, 0.038332416534423826, 0.03848806381225586, 0.038529022216796875, 0.040346656799316406, 0.038535457611083984, 0.038490207672119144, 0.03864627075195313, 0.03839350509643555, 0.03851923370361328, 0.038469566345214846, 0.03875171279907227, 0.038373985290527345, 0.03861276626586914, 0.038547679901123046, 0.03849356842041016, 0.038535808563232424, 0.0385700798034668, 0.03829520034790039, 0.03870515060424805, 0.038727935791015626, 0.038499584197998045, 0.03844172668457031, 0.03854131317138672, 0.03850239944458008, 0.03865599822998047, 0.03849420928955078, 0.03863552093505859, 0.039002113342285157, 0.039929855346679685, 0.03866198348999023, 0.03867391967773438, 0.03850227355957031, 0.03851705551147461, 0.038810081481933593, 0.03865388870239258, 0.03865107345581055, 0.03869168090820312, 0.03834268951416016, 0.03930316925048828, 0.038787071228027346, 0.03860275268554687, 0.038780033111572264, 0.038609760284423825, 0.038547294616699215, 0.03858451080322266, 0.0384716796875, 0.038572032928466796, 0.038438911437988284, 0.03882508850097656, 0.03873222351074219, 0.038549472808837894, 0.040530399322509764, 0.03830988693237305, 0.03833628845214844, 0.03842275238037109, 0.03823948669433594, 0.03841449737548828, 0.03824496078491211, 0.03848387145996094, 0.03856803131103516, 0.038670337677001954, 0.03847987365722656, 0.03870924758911133, 0.03850649642944336, 0.03858428955078125, 0.03848809432983399, 0.03878297424316406, 0.038658016204833986, 0.03858844757080078, 0.038623233795166016, 0.03865711975097656, 0.038542240142822266, 0.038561214447021486, 0.0387815055847168, 0.03879935836791992, 0.038860801696777345, 0.03869696044921875, 0.03873996734619141, 0.03854131317138672, 0.038844417572021485, 0.03873177719116211, 0.038831455230712894, 0.03857206344604492, 0.0392567024230957, 0.03855974578857422, 0.03864144134521484, 0.03836336135864258, 0.03851468658447266, 0.03843449783325195, 0.03842483139038086, 0.03845276641845703, 0.03835548782348633, 0.03848191833496094, 0.039311359405517575, 0.03857600021362305, 0.03850223922729492, 0.038448543548583985, 0.038581024169921874, 0.038539360046386716, 0.03845040130615234, 0.038433567047119144, 0.03954079818725586, 0.03871123123168945, 0.03870889663696289, 0.03861516952514649, 0.03853881454467773, 0.038609569549560546, 0.038618431091308594, 0.038521438598632815, 0.03844275283813477, 0.03847372817993164, 0.03850806427001953, 0.03856022262573242, 0.03862972640991211, 0.03880672073364258, 0.03866707229614258, 0.03855769729614258, 0.03845059204101563, 0.03847769546508789, 0.03907974243164063, 0.04103855895996094, 0.03933718490600586, 0.03880239868164063, 0.03865190505981445, 0.03867647933959961, 0.03866147232055664, 0.038591136932373045, 0.03876784133911133, 0.038508544921875, 0.038356990814208985, 0.03843062210083008, 0.03843948745727539, 0.03857030487060547, 0.03843686294555664, 0.038438911437988284, 0.03850239944458008, 0.038386913299560545, 0.03837168121337891, 0.03859471893310547, 0.03903311920166016, 0.038988929748535156, 0.03988159942626953, 0.03899148941040039, 0.03885647964477539, 0.038949344635009764, 0.03887318420410156, 0.03881577682495117, 0.03930931091308594, 0.03881369781494141, 0.0386945915222168, 0.03879558563232422, 0.03869839859008789, 0.0387303352355957, 0.03919443130493164, 0.03889395141601563, 0.038846271514892575, 0.038956478118896486, 0.03883785629272461, 0.03884716796875, 0.03872963333129883, 0.0389365119934082, 0.03888787078857422, 0.03906876754760742, 0.03874294281005859, 0.039882846832275394, 0.03886931228637695, 0.03909209442138672, 0.039440704345703126, 0.039027969360351564, 0.03896928024291992, 0.03913568115234375, 0.038488449096679686, 0.04501504135131836, 0.03967964935302734, 0.03877068710327149, 0.03851708984375, 0.03843299102783203, 0.03847145462036133, 0.03846758270263672, 0.038266880035400394, 0.03834672164916992, 0.03846966552734375, 0.03858403015136719, 0.03842879867553711, 0.0388240966796875, 0.03829145431518555, 0.038399616241455076, 0.038516670227050784, 0.03855926513671875, 0.03835382461547852, 0.03843686294555664, 0.03841747283935547, 0.038320415496826174, 0.03889014434814453, 0.03861212921142578, 0.038437728881835935, 0.03845734405517578, 0.038370784759521485, 0.038461505889892576, 0.039121376037597654, 0.038645759582519534, 0.03848806381225586, 0.03853878402709961, 0.03855535888671875, 0.038427135467529294, 0.03848422241210937, 0.03837542343139649, 0.0385043830871582, 0.03864096069335937, 0.038485824584960936, 0.03842899322509766, 0.03844675064086914, 0.03845568084716797, 0.03856035232543945, 0.03870505523681641, 0.03871500778198242, 0.03861142349243164, 0.03852694320678711, 0.03862483215332031, 0.0390634880065918, 0.0477619514465332, 0.039726497650146485, 0.03936316680908203, 0.03878092956542969, 0.038735870361328126, 0.03872153472900391, 0.03883129501342773, 0.03961347198486328, 0.03897174453735352, 0.038799999237060546, 0.03892019271850586, 0.03868652725219727, 0.03864092636108398, 0.038863296508789065, 0.03861142349243164, 0.038997119903564456, 0.038709793090820316, 0.040411231994628906, 0.03872383880615234, 0.039229438781738284, 0.03893862533569336, 0.038780033111572264, 0.038771392822265625, 0.03860889434814453, 0.038539360046386716, 0.03848611068725586, 0.038678176879882814, 0.038532833099365234, 0.03852329635620117, 0.038617118835449216, 0.03919664001464844, 0.03851443099975586, 0.03843289566040039, 0.03871945571899414, 0.038456863403320315, 0.038464351654052736, 0.03865212631225586, 0.038842144012451174, 0.03884828948974609, 0.03883235168457031, 0.03889110565185547, 0.03900048065185547, 0.03999129486083984, 0.039502880096435544, 0.0392262077331543, 0.03944252777099609, 0.03949100875854492, 0.04013558578491211, 0.039220928192138675, 0.039171520233154296, 0.03930579376220703, 0.03909427261352539, 0.03965468978881836, 0.03922534561157227, 0.03945702362060547, 0.03895548629760742, 0.03920598220825195, 0.039069889068603515, 0.03896752166748047, 0.03895142364501953, 0.03908812713623047, 0.039190399169921876, 0.039118465423583985, 0.03927705764770508, 0.03941151809692383, 0.03916204833984375, 0.03915468978881836, 0.03888844680786133, 0.03930112075805664, 0.03909222412109375, 0.03988800048828125, 0.03897433471679688, 0.038814849853515625, 0.038534271240234376, 0.038634273529052736, 0.03846579360961914, 0.038521568298339845, 0.038313663482666016, 0.03852435302734375, 0.03834764862060547, 0.0384901123046875, 0.03838566589355469, 0.03848169708251953, 0.03852310562133789, 0.03848601531982422, 0.038432479858398434, 0.038501983642578126, 0.03846806335449219, 0.03850876617431641, 0.03884646224975586, 0.03858534240722656, 0.03902751922607422, 0.039067840576171874, 0.03874816131591797, 0.03887071990966797, 0.03865817642211914, 0.03950147247314453, 0.03884086227416992, 0.03867136001586914, 0.03904000091552735, 0.03876249694824219, 0.0384266242980957, 0.03840409469604492, 0.038391807556152346, 0.03848921585083008, 0.03855804824829102, 0.03843945693969727, 0.038817790985107424, 0.03860684967041016, 0.03854950332641602, 0.0383631362915039, 0.03849609756469727, 0.038451358795166014, 0.038586368560791014, 0.03838771057128906, 0.03879689788818359, 0.03882844924926758, 0.03909939193725586, 0.03881881713867188, 0.03873177719116211, 0.03863894271850586, 0.038736255645751956, 0.03870544052124023, 0.039430145263671876, 0.03889548873901367, 0.03884249496459961, 0.038563838958740236, 0.03861872100830078, 0.038461727142333986, 0.03862745666503906, 0.03850035095214844, 0.038438911437988284, 0.03842284774780273, 0.04108252716064453, 0.038957569122314455, 0.0389222412109375, 0.03879923248291016, 0.03873331069946289, 0.038830718994140624, 0.03880944061279297, 0.03877203369140625, 0.03888009643554687, 0.038674110412597655, 0.03876691055297852, 0.03871744155883789, 0.03864166259765625, 0.03862460708618164, 0.038640289306640624, 0.038602432250976565, 0.03860063934326172, 0.03853964614868164, 0.0385351676940918, 0.03890790557861328, 0.03869289779663086, 0.03865388870239258, 0.038666046142578125, 0.038582496643066407, 0.03855728149414062, 0.0384659538269043, 0.038547454833984376, 0.03858227157592774, 0.03864985656738281, 0.03856700897216797, 0.03882281494140625, 0.038809600830078124, 0.038860321044921875, 0.038902240753173827, 0.03902259063720703, 0.038938209533691405, 0.03850281524658203, 0.03856166458129883, 0.03853939056396485, 0.03862268829345703, 0.03880799865722656, 0.03874415969848633, 0.03845119857788086, 0.038670337677001954, 0.038505630493164064, 0.038585182189941405, 0.03852835083007813, 0.038738590240478515, 0.03851481628417969, 0.03864358520507812, 0.038491905212402346, 0.03863785552978516, 0.03859452819824219, 0.03869852828979492, 0.03870912170410156, 0.038789535522460936, 0.03867052841186523, 0.039265758514404295, 0.03932828903198242, 0.04074655914306641, 0.03918073654174805, 0.03905267333984375, 0.038728321075439456, 0.03948729705810547, 0.038716991424560546, 0.038982017517089844, 0.03850060653686523, 0.03870028686523438, 0.038740734100341796, 0.03861913681030273, 0.03861663818359375, 0.03871078491210937, 0.03874006271362305, 0.038728736877441404, 0.038715198516845704, 0.03889926528930664, 0.03869900894165039, 0.03900870513916015, 0.03887104034423828, 0.038919296264648434, 0.039041152954101564, 0.03872230529785156, 0.038825984954833984, 0.038860801696777345, 0.0386473617553711, 0.038885822296142576, 0.038659553527832034, 0.038713886260986326, 0.03873708724975586, 0.0387276496887207, 0.03868758392333985, 0.03863347244262695, 0.03859251022338867, 0.03866825485229492, 0.03849977493286133, 0.038637569427490234, 0.038777278900146483, 0.03922492980957031, 0.039307361602783204, 0.03912287902832031, 0.03909276962280273, 0.03926015853881836, 0.04018544006347656, 0.03956579208374023, 0.03916998291015625, 0.039008255004882815, 0.039360511779785154, 0.03905945587158203, 0.03922716903686523, 0.039567455291748044, 0.04044198226928711, 0.038950912475585936, 0.03897958374023437, 0.03891513442993164, 0.03910956954956055, 0.03872927856445312, 0.03912729644775391, 0.039110847473144535, 0.03914547348022461, 0.0388218879699707, 0.03886521530151367, 0.03872735977172852, 0.039206912994384766, 0.03891814422607422, 0.03959568023681641, 0.03870140838623047]",tokens/s,25.8029296665797,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,820.404224,538.836992,0.0,136.31488,130.303488,s,1,7.61305126953125,7.61305126953125,0.0,7.61305126953125,7.61305126953125,7.61305126953125,7.61305126953125,[7.61305126953125],,kWh,1.4275024983339788e-05,1.5674343510408715e-06,4.2555589600049015e-06,2.009801829438556e-05,,MB,1300.97152,616.431616,0.0,199.22944,174.868992,s,19,0.21869004917144774,0.011510002587970934,0.00018160719681779692,0.011484319686889649,0.01171907196044922,0.01178399076461792,0.011921470241546632,"[0.011479424476623536, 0.011707615852355958, 0.011455488204956055, 0.011571968078613281, 0.011217503547668458, 0.011325216293334961, 0.011372896194458007, 0.011293439865112305, 0.011428576469421386, 0.011764896392822266, 0.011484319686889649, 0.011572704315185547, 0.011686944007873535, 0.011457183837890624, 0.011955840110778809, 0.011517727851867676, 0.011611328125, 0.011548255920410156, 0.011238719940185547]",tokens/s,22241.524104220858,kWh,3.3857509911542186e-07,3.7338877544917535e-08,1.6881716263805107e-07,5.447311392983905e-07,tokens/kWh,469956610.75980717,MB,1334.02624,629.014528,0.0,211.812352,174.871552,s,19,10.04919241333008,0.5289048638594779,0.01081307573427528,0.532142333984375,0.5409972167968751,0.5432424438476563,0.5443411645507813,"[0.5385031127929687, 0.5404740600585938, 0.5446158447265625, 0.5362752075195313, 0.5186209106445312, 0.5128590087890625, 0.5140167236328125, 0.5120408325195313, 0.5302291259765625, 0.54308984375, 0.5396038818359375, 0.532142333984375, 0.5337122192382813, 0.5311636352539062, 0.5323350219726563, 0.53248876953125, 0.5276687622070313, 0.519033203125, 0.5103199157714844]",tokens/s,119.11404924560905,kWh,1.5312466089701253e-05,1.6886957079988829e-06,5.477035307256724e-06,2.247819710495686e-05,tokens/kWh,2802715.8808971974,,s,1197,10.03867111492157,0.00838652557637558,0.00024285614379801823,0.008392064094543458,0.00866423053741455,0.008724435424804687,0.009046640663146968,"[0.008494943618774413, 0.008554431915283203, 0.00850220775604248, 0.008453791618347167, 0.00840499210357666, 0.008269439697265625, 0.008371711730957031, 0.00825830364227295, 0.008298303604125977, 0.008263104438781739, 0.00842636775970459, 0.00859171199798584, 0.008570528030395508, 0.008701312065124511, 0.008853407859802246, 0.008761311531066894, 0.008678144454956054, 0.008691904067993163, 0.00874060821533203, 0.00877347183227539, 0.0087608003616333, 0.008683360099792481, 0.008637344360351563, 0.008763392448425293, 0.008790016174316406, 0.008724160194396972, 0.008479040145874023, 0.00848025608062744, 0.008513824462890625, 0.00852400016784668, 0.00846998405456543, 0.008427776336669923, 0.008610367774963379, 0.008584447860717773, 0.008534079551696777, 0.008553088188171387, 0.00868329620361328, 0.008611231803894044, 0.008665696144104004, 0.008572928428649903, 0.008632320404052735, 0.008637439727783204, 0.00846566390991211, 0.00868278408050537, 0.008591391563415528, 0.008669280052185058, 0.008607551574707032, 0.008561183929443359, 0.00834108829498291, 0.008290143966674804, 0.008329792022705078, 0.008384736061096191, 0.00837814426422119, 0.008355104446411132, 0.008231743812561036, 0.008329119682312012, 0.00851318359375, 0.0086080961227417, 0.008519680023193359, 0.008487135887145995, 0.008507200241088867, 0.008498559951782227, 0.008532768249511718, 0.008343392372131348, 0.008534496307373046, 0.008527456283569336, 0.008575360298156738, 0.008595168113708496, 0.008793760299682617, 0.008422911643981934, 0.008410112380981445, 0.008337247848510742, 0.00862003231048584, 0.008366080284118652, 0.00828544044494629, 0.008229632377624512, 0.008322239875793457, 0.008302783966064453, 0.008809087753295898, 0.008593215942382812, 0.008405183792114258, 0.008351743698120117, 0.008375807762145996, 0.00832153606414795, 0.008361184120178222, 0.008985376358032227, 0.008920639991760253, 0.010494400024414062, 0.008705920219421386, 0.008582304000854492, 0.008684127807617188, 0.008419712066650391, 0.008443103790283204, 0.008315072059631348, 0.008276576042175294, 0.008765439987182617, 0.008719615936279297, 0.008673791885375976, 0.008613344192504882, 0.00845680046081543, 0.008535455703735352, 0.008646592140197754, 0.008525856018066406, 0.008556639671325684, 0.008461248397827149, 0.008561568260192871, 0.008408032417297363, 0.008433088302612305, 0.008592063903808594, 0.008549280166625976, 0.008639360427856445, 0.008679424285888672, 0.008761343955993652, 0.008681471824645997, 0.008429023742675781, 0.008667263984680176, 0.008699359893798829, 0.008592320442199706, 0.008466400146484376, 0.008558624267578125, 0.008637855529785157, 0.008664704322814942, 0.008542495727539063, 0.008508095741271973, 0.008453696250915528, 0.00863481616973877, 0.008277312278747558, 0.008440447807312011, 0.008556608200073243, 0.008725536346435548, 0.008632351875305175, 0.00854700756072998, 0.008460288047790527, 0.008528223991394043, 0.008519583702087403, 0.008474176406860351, 0.008389056205749512, 0.008365599632263184, 0.00833788776397705, 0.008382464408874512, 0.008460288047790527, 0.008570879936218261, 0.008620320320129394, 0.008506464004516602, 0.008487104415893554, 0.008458687782287597, 0.008457599639892578, 0.008495648384094239, 0.008525919914245606, 0.008576255798339844, 0.008644895553588867, 0.008536352157592773, 0.008601792335510254, 0.008631584167480468, 0.008556768417358399, 0.008665599822998048, 0.008702112197875976, 0.008744095802307129, 0.008753952026367187, 0.008718239784240722, 0.008787967681884766, 0.008678560256958007, 0.00874182415008545, 0.00867039966583252, 0.008681504249572754, 0.008665056228637695, 0.00866335964202881, 0.008677696228027344, 0.00868182373046875, 0.008589056015014649, 0.00867852783203125, 0.00861888027191162, 0.008644607543945313, 0.00870195198059082, 0.008727871894836425, 0.008698559761047364, 0.00870195198059082, 0.009248767852783203, 0.009025504112243652, 0.008736800193786622, 0.008726176261901855, 0.00868556785583496, 0.008642911911010742, 0.00870620822906494, 0.009041728019714356, 0.008904735565185546, 0.008775456428527832, 0.008720000267028809, 0.008771871566772461, 0.00871014404296875, 0.008696063995361328, 0.00869155216217041, 0.008664383888244628, 0.008682080268859863, 0.008831999778747558, 0.008776448249816894, 0.008539392471313476, 0.008375295639038086, 0.008302592277526855, 0.008401023864746093, 0.008664128303527831, 0.008725567817687989, 0.008625887870788574, 0.008675359725952148, 0.008650239944458007, 0.008677568435668945, 0.008613439559936523, 0.008702719688415527, 0.008574975967407226, 0.008621279716491698, 0.008595775604248047, 0.008605695724487305, 0.008604127883911133, 0.008748576164245606, 0.008665151596069336, 0.008636832237243652, 0.008650752067565918, 0.008603679656982421, 0.008587231636047363, 0.008495103836059571, 0.008579360008239747, 0.008604864120483398, 0.009265695571899414, 0.008612095832824707, 0.008619392395019531, 0.008657535552978516, 0.008701760292053223, 0.008564672470092773, 0.00857430362701416, 0.00842409610748291, 0.008500384330749513, 0.008469344139099121, 0.008423423767089844, 0.008378368377685547, 0.008580960273742676, 0.008433088302612305, 0.008338144302368164, 0.008407039642333984, 0.008372127532958984, 0.00821615982055664, 0.008191519737243653, 0.008108511924743652, 0.008063712120056153, 0.008085280418395997, 0.008115455627441407, 0.008065440177917481, 0.008105376243591308, 0.008147551536560058, 0.008284607887268066, 0.00817347240447998, 0.0081693115234375, 0.00812668800354004, 0.00793398380279541, 0.008240287780761718, 0.008104479789733887, 0.00815494441986084, 0.008122112274169922, 0.008190688133239745, 0.00826534366607666, 0.00835804843902588, 0.00839897632598877, 0.00841327953338623, 0.008282112121582032, 0.008307840347290039, 0.008229375839233399, 0.008247679710388183, 0.008280223846435546, 0.008222623825073242, 0.008393856048583984, 0.008233823776245118, 0.008175392150878907, 0.008204480171203614, 0.008216575622558593, 0.008282112121582032, 0.00819814395904541, 0.00820633602142334, 0.008209471702575683, 0.008270784378051758, 0.008218624114990235, 0.008216575622558593, 0.00817955207824707, 0.008144831657409668, 0.008181983947753907, 0.00817302417755127, 0.008202783584594727, 0.0082227201461792, 0.008191712379455566, 0.008132160186767578, 0.008141535758972168, 0.008113920211791992, 0.008141056060791015, 0.008105183601379394, 0.00810086441040039, 0.00831056022644043, 0.008466431617736817, 0.008368127822875977, 0.008372223854064942, 0.00818518352508545, 0.008166239738464356, 0.008470335960388183, 0.008177984237670898, 0.008171199798583984, 0.008241312026977539, 0.008387776374816894, 0.00825171184539795, 0.008206687927246093, 0.008347200393676759, 0.008141247749328614, 0.008155136108398438, 0.008155136108398438, 0.008152607917785645, 0.008132863998413085, 0.008151264190673828, 0.008214495658874511, 0.008271552085876465, 0.007925824165344238, 0.0082161283493042, 0.00814732837677002, 0.008153087615966797, 0.008160479545593262, 0.008204256057739258, 0.008149824142456054, 0.008139007568359375, 0.008228384017944335, 0.008173567771911621, 0.00827609634399414, 0.008199616432189942, 0.008190624237060547, 0.008163328170776368, 0.008200287818908691, 0.008191904067993165, 0.008181759834289551, 0.008136927604675293, 0.008097760200500488, 0.008059743881225587, 0.008086496353149415, 0.008039615631103515, 0.008180543899536133, 0.008084511756896973, 0.007993728160858153, 0.00803110408782959, 0.008066975593566894, 0.008068927764892578, 0.008068832397460937, 0.008046815872192383, 0.008131744384765625, 0.008084447860717773, 0.008087488174438476, 0.008067071914672852, 0.008054688453674316, 0.008061280250549316, 0.00807910442352295, 0.008076959609985352, 0.008091456413269043, 0.008081536293029785, 0.008075679779052734, 0.008077312469482421, 0.00813436794281006, 0.008141087532043458, 0.008122400283813476, 0.00809721565246582, 0.00804099178314209, 0.008039775848388673, 0.008043519973754883, 0.008076959609985352, 0.008069024085998536, 0.008052831649780273, 0.008175135612487793, 0.008090080261230469, 0.008077312469482421, 0.008122015953063964, 0.008114336013793945, 0.008249823570251464, 0.008189984321594238, 0.008100831985473633, 0.008126432418823243, 0.008082304000854492, 0.009402239799499511, 0.007903232097625732, 0.008114144325256348, 0.008132672309875489, 0.00810812759399414, 0.008242207527160645, 0.008098208427429199, 0.008137056350708008, 0.008126560211181641, 0.008072768211364746, 0.008036800384521484, 0.008089599609375, 0.008121600151062012, 0.00825216007232666, 0.008964096069335938, 0.008062975883483887, 0.008032256126403809, 0.008095744132995606, 0.008026111602783203, 0.008275967597961426, 0.008295840263366699, 0.00834982395172119, 0.008071359634399413, 0.008038687705993652, 0.008054783821105957, 0.008056511878967285, 0.008154623985290528, 0.008125247955322266, 0.00809779167175293, 0.008105183601379394, 0.008099776268005371, 0.00816982364654541, 0.00807372760772705, 0.008065024375915527, 0.008042336463928223, 0.008059264183044434, 0.00822156810760498, 0.008054880142211914, 0.008358847618103028, 0.008089471817016601, 0.008194047927856446, 0.008203392028808593, 0.00820473575592041, 0.008619647979736328, 0.008153247833251952, 0.008139583587646484, 0.008170495986938477, 0.008090527534484863, 0.008229951858520507, 0.008162272453308106, 0.008069024085998536, 0.00805497646331787, 0.008066880226135253, 0.008130559921264649, 0.008103936195373536, 0.008119968414306641, 0.008254976272583007, 0.008331711769104005, 0.008075519561767578, 0.008122719764709473, 0.008195520401000977, 0.008106207847595215, 0.00810371208190918, 0.00812060832977295, 0.007834527969360352, 0.00811564826965332, 0.00811411190032959, 0.00819264030456543, 0.008082847595214843, 0.008091327667236328, 0.008122528076171875, 0.008061759948730469, 0.008077247619628906, 0.008159232139587403, 0.008091648101806641, 0.008099072456359864, 0.008093695640563964, 0.008315648078918457, 0.008138751983642578, 0.008316032409667969, 0.008264575958251953, 0.00809603214263916, 0.008086912155151367, 0.008124768257141113, 0.008185855865478516, 0.008180928230285645, 0.008165439605712891, 0.00816204833984375, 0.00824892807006836, 0.008134688377380371, 0.008129183769226073, 0.008077343940734864, 0.008210111618041992, 0.008156864166259766, 0.008077919960021973, 0.008053952217102051, 0.008101568222045899, 0.008074080467224121, 0.008093152046203613, 0.00804304027557373, 0.008154144287109375, 0.008012639999389648, 0.008216064453125, 0.008065664291381835, 0.008099583625793457, 0.008048895835876466, 0.008202239990234375, 0.008180800437927247, 0.008065216064453125, 0.008073023796081543, 0.00810694408416748, 0.008136704444885253, 0.00812179183959961, 0.008118847846984863, 0.008189248085021974, 0.008045248031616211, 0.00809596824645996, 0.008100831985473633, 0.00809177589416504, 0.008188896179199219, 0.008216287612915039, 0.008095744132995606, 0.008097951889038085, 0.008046272277832032, 0.008050848007202148, 0.00812172794342041, 0.00805337619781494, 0.007907616138458253, 0.008196512222290038, 0.008311840057373046, 0.0083538236618042, 0.008358271598815917, 0.008265631675720215, 0.00826809597015381, 0.009249024391174317, 0.008733792304992676, 0.008923135757446288, 0.008488639831542969, 0.009190624237060546, 0.00858512020111084, 0.008515423774719238, 0.008507072448730469, 0.008396160125732422, 0.008440799713134765, 0.008454015731811523, 0.008462495803833008, 0.008396736145019531, 0.008392224311828613, 0.008395584106445312, 0.008340576171875, 0.008210495948791504, 0.00810604763031006, 0.008087615966796875, 0.008153471946716308, 0.008199647903442383, 0.00840937614440918, 0.008132287979125977, 0.008108799934387206, 0.008099648475646972, 0.00851353645324707, 0.008445952415466309, 0.008763680458068848, 0.00838422393798828, 0.008257280349731446, 0.00823526382446289, 0.008739871978759766, 0.008511551856994629, 0.00854099178314209, 0.008619872093200684, 0.008447711944580077, 0.00843132781982422, 0.008383296012878417, 0.008308735847473145, 0.008295743942260743, 0.008380736351013184, 0.008507776260375976, 0.008349696159362792, 0.008369600296020508, 0.008402976036071777, 0.008352288246154785, 0.008359935760498047, 0.00830463981628418, 0.008277055740356445, 0.00829148769378662, 0.008273695945739746, 0.00830025577545166, 0.008403488159179687, 0.008421119689941406, 0.008525856018066406, 0.00857699203491211, 0.008585375785827637, 0.008619232177734375, 0.008625856399536133, 0.009655360221862794, 0.010676032066345214, 0.009283647537231446, 0.008781824111938476, 0.008577024459838867, 0.00846553611755371, 0.008500096321105956, 0.008472576141357421, 0.008386560440063476, 0.008432671546936036, 0.008465120315551758, 0.008464608192443848, 0.008439840316772461, 0.008429568290710449, 0.008476672172546386, 0.008441344261169433, 0.008525759696960448, 0.008387136459350585, 0.008493023872375488, 0.00841919994354248, 0.00835804843902588, 0.008429727554321288, 0.008481760025024414, 0.008506272315979004, 0.008697664260864258, 0.008555904388427734, 0.008813183784484863, 0.00862019157409668, 0.008683520317077637, 0.00855027198791504, 0.008607040405273438, 0.00855942440032959, 0.008544544219970703, 0.008546112060546875, 0.00847862434387207, 0.008476287841796874, 0.008488863945007323, 0.008533503532409668, 0.008681856155395507, 0.008577631950378419, 0.008681471824645997, 0.008715871810913087, 0.008690208435058593, 0.00865993595123291, 0.008575008392333985, 0.008704895973205567, 0.008826272010803222, 0.008624320030212402, 0.008694175720214845, 0.008593664169311524, 0.008677120208740235, 0.008635711669921876, 0.008637120246887207, 0.008462271690368652, 0.008461664199829102, 0.008426207542419434, 0.008439519882202148, 0.008438048362731933, 0.008407103538513183, 0.008375967979431152, 0.008317055702209473, 0.008514495849609375, 0.008512448310852051, 0.0085032958984375, 0.008426976203918458, 0.008401408195495605, 0.009185312271118164, 0.008466431617736817, 0.008540160179138183, 0.008886272430419923, 0.008466560363769532, 0.008400704383850098, 0.008452159881591797, 0.008339679718017579, 0.008359871864318847, 0.00837820816040039, 0.008435711860656739, 0.008386560440063476, 0.008382719993591308, 0.008420991897583007, 0.008697376251220704, 0.008702591896057129, 0.008621408462524413, 0.008647616386413575, 0.00864019203186035, 0.009003007888793945, 0.008646656036376953, 0.008812031745910644, 0.009284095764160156, 0.00844166374206543, 0.00833574390411377, 0.008424384117126465, 0.008495360374450684, 0.008513407707214355, 0.008518400192260743, 0.008447967529296874, 0.008633631706237793, 0.008731136322021485, 0.008755359649658203, 0.008640607833862305, 0.008548192024230957, 0.008549951553344726, 0.008616543769836426, 0.008582943916320801, 0.0085731201171875, 0.00856383991241455, 0.008561823844909668, 0.008647839546203613, 0.008518239974975587, 0.008648287773132325, 0.008722847938537597, 0.008632320404052735, 0.00857744026184082, 0.008469632148742676, 0.008495712280273437, 0.008490240097045899, 0.00846083164215088, 0.00856054401397705, 0.008461503982543946, 0.008510111808776855, 0.008401247978210449, 0.008325119972229005, 0.008392704010009766, 0.008228128433227538, 0.008389216423034668, 0.008337535858154298, 0.008421183586120606, 0.008460479736328125, 0.008443648338317872, 0.008497407913208008, 0.008523776054382324, 0.008622079849243165, 0.008623392105102538, 0.008597215652465821, 0.008542655944824219, 0.00858512020111084, 0.008532671928405762, 0.008549983978271485, 0.008505727767944336, 0.008466431617736817, 0.008376319885253907, 0.008376319885253907, 0.008288031578063964, 0.008309280395507813, 0.008232959747314453, 0.008183712005615235, 0.008175040245056152, 0.00824124813079834, 0.008381888389587403, 0.008299039840698243, 0.008323360443115234, 0.008450048446655273, 0.008415103912353516, 0.00838259220123291, 0.0084203519821167, 0.008403743743896484, 0.008348992347717285, 0.008329631805419922, 0.008552063941955566, 0.008504192352294921, 0.008407039642333984, 0.008421088218688965, 0.008325152397155762, 0.008347904205322266, 0.008456192016601562, 0.00851683235168457, 0.008429535865783691, 0.008475616455078125, 0.00844489574432373, 0.008356608390808106, 0.00827353572845459, 0.008327936172485352, 0.00838748836517334, 0.008438624382019044, 0.008525823593139649, 0.008623552322387696, 0.008612416267395019, 0.008637887954711914, 0.008657471656799316, 0.008566143989562989, 0.008583456039428712, 0.008572735786437989, 0.008459967613220215, 0.008438176155090332, 0.008478431701660157, 0.008495840072631836, 0.008333312034606934, 0.008370016098022461, 0.008314080238342285, 0.008443072319030761, 0.008494560241699218, 0.008628512382507324, 0.008621088027954102, 0.00857596778869629, 0.008589247703552245, 0.008634431838989258, 0.008631936073303223, 0.008737088203430176, 0.008588959693908691, 0.008492639541625976, 0.008485312461853028, 0.008708383560180664, 0.00857532787322998, 0.008574239730834961, 0.008472288131713867, 0.00844876766204834, 0.00838377571105957, 0.0084203519821167, 0.008375295639038086, 0.008321375846862792, 0.008344223976135254, 0.008432703971862793, 0.00848095989227295, 0.008462816238403321, 0.008465791702270508, 0.008736448287963867, 0.008639391899108886, 0.008495455741882324, 0.008713631629943848, 0.008546624183654785, 0.008527839660644531, 0.00841113567352295, 0.008447999954223634, 0.008502304077148437, 0.008498368263244628, 0.008488960266113281, 0.008418848037719726, 0.008448351860046386, 0.00846771240234375, 0.008366304397583008, 0.008225215911865234, 0.008332768440246581, 0.008260128021240234, 0.00821065616607666, 0.008216352462768554, 0.008331583976745606, 0.008296128273010253, 0.008349216461181641, 0.008552927970886231, 0.008523776054382324, 0.008633791923522949, 0.008466591835021972, 0.008401151657104491, 0.008351903915405273, 0.008441632270812988, 0.008443584442138672, 0.008278528213500976, 0.008304512023925781, 0.008384672164916993, 0.008364383697509765, 0.008419103622436523, 0.008309087753295898, 0.008338720321655273, 0.008374048233032226, 0.008342464447021485, 0.008431391716003418, 0.008511232376098634, 0.0085664644241333, 0.008591808319091798, 0.008589311599731446, 0.008575039863586426, 0.00846777629852295, 0.008489952087402344, 0.00829849624633789, 0.008344608306884766, 0.008388992309570312, 0.008260191917419434, 0.008179488182067872, 0.008224479675292969, 0.008326656341552734, 0.008406335830688477, 0.008259391784667969, 0.008454015731811523, 0.008574496269226075, 0.008592000007629394, 0.00860758399963379, 0.008499360084533692, 0.0084551362991333, 0.008482912063598632, 0.008258432388305663, 0.008239359855651855, 0.008349311828613282, 0.008424896240234375, 0.008304736137390138, 0.008396672248840333, 0.008435744285583496, 0.008407903671264648, 0.008439552307128907, 0.008664671897888183, 0.008420063972473144, 0.008453503608703613, 0.00835206413269043, 0.008339743614196777, 0.00840828800201416, 0.008406815528869628, 0.00844057559967041, 0.008377375602722168, 0.0084716157913208, 0.008441727638244629, 0.008398847579956055, 0.008403039932250977, 0.00840617561340332, 0.008370944023132324, 0.008361984252929687, 0.008352031707763672, 0.008459391593933106, 0.00863702392578125, 0.008552448272705078, 0.008459712028503418, 0.008659808158874512, 0.008378080368041992, 0.008409088134765624, 0.008456416130065918, 0.008393216133117675, 0.008346624374389648, 0.008403231620788574, 0.008367072105407715, 0.008506591796875, 0.008403583526611328, 0.00850268840789795, 0.008497664451599121, 0.008424639701843262, 0.008418111801147462, 0.008357728004455566, 0.008347488403320313, 0.008354111671447754, 0.008497376441955567, 0.00858518409729004, 0.008480640411376953, 0.008502528190612793, 0.00859615993499756, 0.008425791740417481, 0.00847225570678711, 0.008357760429382324, 0.00839078426361084, 0.008423423767089844, 0.008431615829467774, 0.008435359954833984, 0.008399200439453126, 0.008546367645263672, 0.008343487739562989, 0.008359935760498047, 0.008400896072387695, 0.008378368377685547, 0.008556544303894043, 0.008491007804870606, 0.008521727561950684, 0.008461407661437988, 0.008414175987243653, 0.008587167739868165, 0.008509183883666992, 0.008489248275756835, 0.00840617561340332, 0.008340319633483887, 0.008299776077270508, 0.00828223991394043, 0.008330880165100097, 0.008350111961364747, 0.008479328155517578, 0.008455679893493653, 0.008424063682556153, 0.009164544105529785, 0.008439840316772461, 0.008439295768737793, 0.008466783523559571, 0.00865068817138672, 0.00843500804901123, 0.00838758373260498, 0.008347071647644044, 0.008278592109680176, 0.008310943603515626, 0.008465632438659668, 0.008358816146850585, 0.008373984336853027, 0.008425472259521484, 0.008328319549560547, 0.008501919746398925, 0.00840230369567871, 0.008464927673339845, 0.00837446403503418, 0.008442208290100098, 0.008392064094543458, 0.008366304397583008, 0.008454560279846191, 0.008398688316345215, 0.008505120277404785, 0.008361824035644531, 0.008561023712158203, 0.008441951751708985, 0.008533503532409668, 0.008517184257507325, 0.00857583999633789, 0.008454143524169922, 0.008495136260986328, 0.008396767616271972, 0.008410623550415039, 0.008471039772033692, 0.008555520057678222, 0.008423680305480958, 0.00847100830078125, 0.008415807723999023, 0.008410112380981445, 0.008590368270874024, 0.008539999961853027, 0.008726304054260253, 0.008527935981750489, 0.008439807891845704, 0.00841932773590088, 0.008439807891845704, 0.008621791839599609, 0.008397088050842286, 0.008586624145507812, 0.008335264205932617, 0.008326016426086425, 0.00832051181793213, 0.008386303901672363, 0.0083372163772583, 0.008313887596130371, 0.0082772798538208, 0.008276384353637695, 0.00830675220489502, 0.008388607978820802, 0.008505151748657227, 0.008503487586975098, 0.008548352241516113, 0.008381792068481445, 0.008345824241638183, 0.008401344299316407, 0.008398271560668946, 0.008319552421569825, 0.008643967628479004, 0.008421728134155274, 0.008395008087158203, 0.008689023971557618, 0.008560864448547364, 0.008410655975341796, 0.008335807800292968, 0.008372544288635254, 0.008464927673339845, 0.00847657585144043, 0.008560223579406738, 0.008482912063598632, 0.008475168228149414, 0.008355104446411132, 0.008311871528625489, 0.00833296012878418, 0.008300543785095215, 0.008312704086303711, 0.008259872436523437, 0.008268768310546876, 0.008278911590576171, 0.008367456436157227, 0.008387104034423828, 0.008419039726257325, 0.008344223976135254, 0.008255231857299804, 0.008246463775634766, 0.008223615646362304, 0.008238783836364746, 0.008180224418640136, 0.008443167686462402, 0.008312383651733398, 0.00825011157989502, 0.00835331153869629, 0.008348287582397461, 0.008335359573364258, 0.008311903953552247, 0.00817859172821045, 0.00818169593811035, 0.00821168041229248, 0.008178624153137207, 0.008257439613342285, 0.00823481559753418, 0.008363295555114746, 0.008307071685791015, 0.008293184280395507, 0.008281760215759278, 0.00821401596069336, 0.008241312026977539, 0.008414624214172363, 0.008360960006713868, 0.008234304428100586, 0.008252351760864259, 0.008306431770324706, 0.008354144096374512, 0.008498751640319824, 0.008904800415039063, 0.008599552154541015, 0.008613887786865235, 0.008480095863342285, 0.00843228816986084, 0.00841113567352295, 0.008386624336242677, 0.008599488258361817, 0.008525823593139649, 0.008445952415466309, 0.008523008346557618, 0.008560992240905762, 0.008442272186279296, 0.008482815742492676, 0.008431615829467774, 0.008332415580749512, 0.00853433609008789, 0.008550975799560547, 0.008726271629333496, 0.008485119819641113, 0.008429568290710449, 0.00828384017944336, 0.008256863594055176, 0.008285152435302734, 0.00823852825164795, 0.008297023773193359, 0.008253439903259278, 0.00828006362915039, 0.008265727996826172, 0.008256671905517578, 0.00839129638671875, 0.008560416221618652, 0.008515647888183594, 0.008474687576293945, 0.00841536045074463, 0.00841097640991211, 0.008351327896118164, 0.008311360359191895, 0.008370368003845215, 0.00831283187866211, 0.008327487945556641, 0.00832051181793213, 0.008195808410644531, 0.008200672149658203, 0.008290495872497559, 0.008173376083374024, 0.008159168243408203, 0.008128640174865722, 0.008150976181030273, 0.008113856315612794, 0.008140831947326661, 0.00818819236755371, 0.008112128257751466, 0.008302335739135741, 0.008132863998413085, 0.0081080322265625, 0.008071392059326173, 0.008060192108154297, 0.00812399959564209, 0.008067263603210448, 0.00810700798034668, 0.008060416221618653, 0.008117792129516601, 0.008080127716064454, 0.008087488174438476, 0.008141823768615723, 0.008086527824401855, 0.008091648101806641, 0.008071167945861817, 0.0081080322265625, 0.008070560455322265, 0.008131168365478515, 0.008046688079833985, 0.00807753562927246, 0.008046272277832032, 0.008170528411865234, 0.00805945587158203, 0.008034720420837402, 0.007895008087158203, 0.008077343940734864, 0.008042495727539062, 0.008052576065063476, 0.008064288139343262, 0.008016736030578613, 0.008074655532836914, 0.008026752471923828, 0.008025535583496094, 0.008054368019104004, 0.008076255798339843, 0.008087552070617676, 0.00808569622039795, 0.008107135772705078, 0.008108480453491211, 0.008087807655334473, 0.008091648101806641, 0.008058527946472167, 0.007999519824981689, 0.008085824012756348, 0.008009407997131348, 0.00812012767791748, 0.008181952476501464, 0.008057151794433594, 0.008087552070617676, 0.008160575866699219, 0.00809993553161621, 0.008002143859863281, 0.00806499195098877, 0.00806704044342041, 0.00807753562927246, 0.00820412826538086, 0.008550496101379394, 0.00817471981048584, 0.008040287971496581, 0.008038592338562012, 0.008028927803039551, 0.008175616264343261, 0.008046367645263671, 0.008103167533874512, 0.008098431587219238, 0.008214879989624023, 0.008232959747314453, 0.008083871841430664, 0.008050432205200195, 0.008022015571594238, 0.008082528114318848, 0.008179807662963867, 0.008049311637878418, 0.0080382080078125, 0.00802947235107422, 0.00803273582458496, 0.008048224449157714, 0.008100704193115234, 0.008042816162109376, 0.00806060791015625, 0.008042112350463867, 0.008036800384521484, 0.008159199714660644, 0.008459520339965821, 0.008213215827941895, 0.0081014404296875, 0.008068703651428222]",tokens/s,119.2388899184842,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3900, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,933.347328,641.59744,0.0,239.075328,225.530368,s,1,7.75225439453125,7.75225439453125,0.0,7.75225439453125,7.75225439453125,7.75225439453125,7.75225439453125,[7.75225439453125],,kWh,2.1314118991654142e-05,2.3431838227763432e-06,5.978060338002189e-06,2.9635363152432676e-05,,MB,1327.538176,733.872128,0.0,316.669952,285.824512,s,10,0.2095629138946533,0.02095629138946533,0.0003355476054913902,0.020923359870910645,0.0213248104095459,0.02144235725402832,0.02153639472961426,"[0.021559904098510742, 0.021187456130981445, 0.021298688888549806, 0.02093552017211914, 0.021255840301513673, 0.02059881591796875, 0.02059600067138672, 0.02091119956970215, 0.020631168365478517, 0.020588319778442384]",tokens/s,12215.901909470991,kWh,6.150000256228325e-07,6.78235697591607e-08,3.6823713669473087e-07,1.051060732076724e-06,tokens/kWh,243563470.87021875,MB,1360.592896,775.815168,0.0,358.612992,297.747968,s,10,10.140543029785157,1.0140543029785156,0.013402947383342408,1.011234375,1.0319357666015625,1.0376974365234375,1.0423067724609374,"[1.0306553955078126, 1.0434591064453125, 1.0155003662109374, 1.016322509765625, 1.0176226806640625, 1.0064497680664062, 1.005771484375, 0.99901904296875, 0.9987742919921875, 1.0069683837890624]",tokens/s,62.1268504210812,kWh,2.8881392158960423e-05,3.1850991709041666e-06,1.0540021589906274e-05,4.260651291977086e-05,tokens/kWh,1478647.1758116088,,s,630,10.13371453285218,0.016085261163257418,0.00040710155788790703,0.016052288055419923,0.01638685359954834,0.016480169296264648,0.017334088726043702,"[0.016094112396240236, 0.016222047805786132, 0.01616502380371094, 0.016066591262817384, 0.016148448944091797, 0.01619353675842285, 0.016187583923339844, 0.01634409523010254, 0.016257087707519532, 0.016245471954345704, 0.016194879531860353, 0.016239295959472655, 0.01643519973754883, 0.016477888107299804, 0.016200000762939454, 0.01619264030456543, 0.01641152000427246, 0.016271583557128905, 0.01679462432861328, 0.016325408935546876, 0.016183296203613282, 0.01615667152404785, 0.016242687225341796, 0.016210079193115234, 0.01617078399658203, 0.016316320419311522, 0.016445215225219727, 0.016200063705444336, 0.016270591735839845, 0.01630080032348633, 0.016471584320068358, 0.016253087997436525, 0.01628003120422363, 0.01634102439880371, 0.016249887466430663, 0.01626316833496094, 0.016433120727539063, 0.016796480178833006, 0.01701683235168457, 0.01810867118835449, 0.01663974380493164, 0.016328704833984374, 0.016355552673339845, 0.0163623046875, 0.016300895690917968, 0.016260639190673828, 0.016411231994628905, 0.016441343307495117, 0.016393247604370116, 0.01627872085571289, 0.01618511962890625, 0.016451583862304688, 0.016192672729492187, 0.016525888442993165, 0.016172479629516602, 0.016311391830444336, 0.016426368713378905, 0.016268800735473633, 0.01639718437194824, 0.016443584442138674, 0.016186464309692384, 0.016227039337158203, 0.016199680328369142, 0.016230655670166017, 0.016246528625488282, 0.016302080154418946, 0.016764255523681642, 0.016333280563354494, 0.01635862350463867, 0.01627606391906738, 0.01677289581298828, 0.016486431121826173, 0.016554527282714844, 0.016316448211669922, 0.01637276840209961, 0.01645257568359375, 0.01650092887878418, 0.016480064392089842, 0.016543519973754882, 0.01631439971923828, 0.016381439208984376, 0.01638675117492676, 0.016500991821289064, 0.01622809600830078, 0.016480255126953124, 0.01636083221435547, 0.01638777542114258, 0.016263263702392578, 0.016325471878051757, 0.01637798309326172, 0.016432640075683593, 0.016375520706176757, 0.016234592437744142, 0.016339616775512697, 0.016374847412109376, 0.01672435188293457, 0.01631043243408203, 0.016379968643188476, 0.016306240081787108, 0.0164003849029541, 0.016419008255004884, 0.016343008041381835, 0.01629801559448242, 0.016381952285766603, 0.016359392166137694, 0.016267295837402343, 0.01620992088317871, 0.016284767150878905, 0.016356319427490235, 0.01653753662109375, 0.01734249687194824, 0.022081504821777342, 0.019544256210327147, 0.01731350326538086, 0.016452735900878906, 0.016309215545654298, 0.016300256729125977, 0.016347232818603515, 0.016237663269042968, 0.016392959594726562, 0.016369504928588866, 0.016416767120361327, 0.016363296508789062, 0.016330976486206055, 0.01639017677307129, 0.016230367660522462, 0.01631350326538086, 0.016311071395874024, 0.016291839599609375, 0.01632467269897461, 0.01623664093017578, 0.016797536849975585, 0.016385887145996095, 0.0163985595703125, 0.016385759353637695, 0.016298208236694336, 0.016180511474609374, 0.01680656051635742, 0.01630009651184082, 0.016334367752075196, 0.016329216003417968, 0.01630156707763672, 0.016232032775878907, 0.016319488525390623, 0.016334047317504884, 0.016409248352050782, 0.01627907180786133, 0.016289247512817382, 0.016260095596313476, 0.016310272216796876, 0.01622425651550293, 0.016074560165405274, 0.016306367874145508, 0.015851519584655763, 0.015699968338012696, 0.015715904235839843, 0.016081184387207032, 0.015893919944763184, 0.01576848030090332, 0.015971936225891114, 0.01565644836425781, 0.015860735893249513, 0.015824447631835936, 0.015722816467285156, 0.015761280059814455, 0.016031423568725587, 0.01604332733154297, 0.015805343627929687, 0.01580246353149414, 0.015703807830810546, 0.015839648246765137, 0.015648320198059083, 0.016083328247070313, 0.01572876834869385, 0.015712063789367677, 0.0158822078704834, 0.01594598388671875, 0.015972096443176268, 0.01639638328552246, 0.016217151641845704, 0.0169564151763916, 0.016164703369140623, 0.01599897575378418, 0.01589616012573242, 0.015943552017211916, 0.016238176345825195, 0.0157838716506958, 0.016235519409179687, 0.0159617919921875, 0.016155040740966797, 0.016097280502319337, 0.01626963233947754, 0.01640166473388672, 0.01603014373779297, 0.016038976669311523, 0.015973312377929687, 0.015994848251342772, 0.015857791900634764, 0.016250112533569335, 0.01613260841369629, 0.01614614486694336, 0.016179616928100587, 0.01612771224975586, 0.016092992782592772, 0.016040447235107422, 0.016068607330322265, 0.015980544090270995, 0.01588748836517334, 0.01607769584655762, 0.01604198455810547, 0.01620966339111328, 0.01593171215057373, 0.015902815818786623, 0.015880031585693358, 0.016021120071411134, 0.016130016326904296, 0.016091327667236328, 0.016056543350219728, 0.015988736152648925, 0.016211551666259767, 0.01629635238647461, 0.016115007400512697, 0.016304832458496094, 0.01636147117614746, 0.016125120162963868, 0.01608787155151367, 0.016134143829345703, 0.01615398406982422, 0.016235136032104493, 0.016295040130615234, 0.016067455291748047, 0.016095232009887696, 0.016019487380981447, 0.016103391647338868, 0.015913151741027833, 0.01583894443511963, 0.016193632125854493, 0.01617001533508301, 0.016028640747070312, 0.016252927780151367, 0.015986687660217287, 0.016123392105102538, 0.016001535415649415, 0.016170047760009767, 0.016024511337280275, 0.016027456283569337, 0.016011808395385744, 0.01634828758239746, 0.01625347137451172, 0.016037567138671875, 0.016138816833496095, 0.017370880126953123, 0.01664143943786621, 0.016497503280639647, 0.016160127639770507, 0.016234880447387696, 0.016232448577880858, 0.016258655548095705, 0.016349599838256835, 0.016463872909545898, 0.01635327911376953, 0.016359424591064452, 0.016261215209960937, 0.01638800048828125, 0.01622425651550293, 0.01626316833496094, 0.01624233627319336, 0.016144256591796875, 0.016734687805175782, 0.016060543060302735, 0.01620569610595703, 0.0161814079284668, 0.016364959716796874, 0.016312768936157226, 0.016234495162963866, 0.016162303924560546, 0.01614633560180664, 0.01610601615905762, 0.016211360931396485, 0.016194175720214844, 0.016065696716308593, 0.0162108154296875, 0.01618124771118164, 0.01616841506958008, 0.016147071838378907, 0.01611996841430664, 0.016130943298339844, 0.016155519485473634, 0.016011007308959962, 0.016102783203125, 0.01605235290527344, 0.01587715244293213, 0.01606825637817383, 0.01589891242980957, 0.01620560073852539, 0.01588742446899414, 0.0158373441696167, 0.015991583824157714, 0.015842559814453126, 0.016120447158813476, 0.01587347221374512, 0.016093887329101563, 0.015932640075683593, 0.015887136459350585, 0.015883744239807127, 0.015890975952148438, 0.016095232009887696, 0.01607865524291992, 0.016124448776245116, 0.01633833694458008, 0.01606233596801758, 0.015923583984375, 0.01602505683898926, 0.01579062366485596, 0.015929023742675782, 0.015742112159729003, 0.015895392417907713, 0.016057504653930663, 0.01608790397644043, 0.016052223205566405, 0.015961376190185547, 0.015798975944519043, 0.0159716796875, 0.015694047927856444, 0.015784704208374023, 0.01632534408569336, 0.0174704647064209, 0.016338144302368164, 0.016183807373046876, 0.016158336639404296, 0.01595779228210449, 0.015859871864318847, 0.015993568420410158, 0.016147775650024412, 0.01624700736999512, 0.016078720092773436, 0.015897279739379884, 0.015853823661804198, 0.016090431213378907, 0.01596076774597168, 0.015947104454040528, 0.015789759635925292, 0.015823488235473634, 0.015849472045898438, 0.01573468780517578, 0.015666720390319824, 0.015655488014221192, 0.015716032028198244, 0.015745344161987303, 0.015742976188659667, 0.015918335914611816, 0.015944064140319823, 0.016273792266845704, 0.01626140785217285, 0.01613590431213379, 0.01602505683898926, 0.015974944114685058, 0.016080896377563478, 0.016135711669921875, 0.016019935607910155, 0.016044288635253905, 0.015975872039794923, 0.015874367713928222, 0.016213024139404297, 0.016126176834106446, 0.0161329288482666, 0.01598400020599365, 0.01591967964172363, 0.015759360313415526, 0.015789088249206544, 0.01585478401184082, 0.015775424003601074, 0.01561945629119873, 0.015571680068969727, 0.015558015823364258, 0.015584896087646485, 0.0161046085357666, 0.015884256362915038, 0.015647104263305664, 0.015533727645874023, 0.015802720069885253, 0.015805983543395997, 0.016064384460449218, 0.015872896194458006, 0.015914719581604005, 0.015922304153442382, 0.015926048278808592, 0.01585974407196045, 0.015678560256958008, 0.01566534423828125, 0.015792927742004394, 0.01584108829498291, 0.01610495948791504, 0.016130239486694335, 0.015981247901916504, 0.016101184844970702, 0.0160994873046875, 0.015909024238586426, 0.01584003162384033, 0.015872127532958986, 0.015909664154052733, 0.015932703971862795, 0.01583148765563965, 0.015882240295410157, 0.015770976066589355, 0.015744128227233888, 0.015748543739318846, 0.015644960403442383, 0.015694047927856444, 0.0159681282043457, 0.01603379249572754, 0.01589891242980957, 0.015850943565368654, 0.01593350410461426, 0.01621615982055664, 0.01605459213256836, 0.016228160858154296, 0.01603955268859863, 0.01598703956604004, 0.016324159622192382, 0.01616147232055664, 0.016074560165405274, 0.016016639709472657, 0.016395999908447267, 0.016776191711425782, 0.01637171173095703, 0.016125600814819337, 0.01601571273803711, 0.01604198455810547, 0.01579212760925293, 0.015955391883850097, 0.015970879554748536, 0.015888383865356445, 0.015871232032775878, 0.015745792388916015, 0.01581663990020752, 0.01589433574676514, 0.016222463607788087, 0.016074752807617186, 0.015921152114868165, 0.015902976036071777, 0.015799903869628908, 0.01597856044769287, 0.015870207786560057, 0.015826623916625978, 0.015823136329650878, 0.01584982395172119, 0.015824031829833985, 0.01593715190887451, 0.015860735893249513, 0.015585280418395997, 0.01568499183654785, 0.01583343982696533, 0.01593782424926758, 0.015804415702819825, 0.015884287834167482, 0.015938655853271484, 0.015845919609069826, 0.01572697639465332, 0.015730976104736328, 0.015728351593017578, 0.015596768379211427, 0.015624159812927247, 0.0156309757232666, 0.01556604766845703, 0.015425567626953124, 0.015385184288024902, 0.015445535659790038, 0.015399744033813476, 0.015557760238647462, 0.016046976089477537, 0.016017055511474608, 0.015919039726257325, 0.015968671798706056, 0.015862879753112794, 0.015798208236694335, 0.015891136169433592, 0.01581289577484131, 0.01582217597961426, 0.015827008247375488, 0.015953760147094726, 0.015992608070373535, 0.015971327781677248, 0.016149631500244142, 0.016040927886962892, 0.016109439849853517, 0.0160185604095459, 0.015972319602966308, 0.015929247856140135, 0.01579315185546875, 0.01580172824859619, 0.015823488235473634, 0.015956064224243165, 0.015824095726013183, 0.015879039764404298, 0.0157258882522583, 0.01581257629394531, 0.015839936256408692, 0.01686662483215332, 0.016329248428344725, 0.015986592292785644, 0.015949312210083007, 0.01574515247344971, 0.015786463737487794, 0.015943552017211916, 0.016042495727539064, 0.01619843292236328, 0.01611871910095215, 0.01591500759124756, 0.015728639602661132, 0.015814528465270997, 0.015992159843444823, 0.01587279987335205, 0.016462976455688477, 0.016175935745239258, 0.015843392372131348, 0.01582694435119629, 0.015793760299682616, 0.015970720291137695, 0.015945247650146484, 0.015886816024780273, 0.016044031143188475, 0.015796416282653807, 0.015726400375366212, 0.015737183570861818, 0.01567903995513916, 0.015711999893188475, 0.016034143447875977, 0.016021600723266603, 0.015979776382446288, 0.015782719612121583, 0.01568342399597168, 0.01593139171600342, 0.015848480224609374, 0.015841376304626464, 0.015819583892822266, 0.015666848182678224, 0.0158887996673584, 0.015768896102905272, 0.016147136688232422, 0.015629599571228028, 0.01570479965209961, 0.015613951683044434, 0.015747072219848633, 0.015837183952331545, 0.015800352096557616, 0.015843392372131348, 0.016340255737304688, 0.015778431892395018, 0.015564448356628418, 0.015681471824645996, 0.01572473621368408, 0.015710016250610352, 0.015571328163146972, 0.015759391784667967, 0.015572223663330078, 0.015671711921691894, 0.015761759757995604, 0.01571020793914795, 0.015773695945739748, 0.015904767990112305, 0.016029695510864257, 0.015677056312561034, 0.01585993576049805, 0.015571071624755859, 0.01564470386505127, 0.016035839080810545, 0.01563024044036865, 0.015676480293273926, 0.015526944160461426, 0.015525792121887207, 0.01547878360748291, 0.015553759574890138, 0.015686112403869627, 0.015475135803222656, 0.015609727859497071, 0.015613951683044434, 0.015590559959411622, 0.015895392417907713, 0.017497760772705078, 0.016023807525634766, 0.015984800338745116, 0.01597772789001465, 0.015954015731811523, 0.01592307186126709, 0.015946656227111815, 0.01593513584136963, 0.01615273666381836, 0.01620969581604004, 0.016171232223510742, 0.016021535873413085, 0.01610044860839844, 0.016085376739501955, 0.016046592712402344, 0.015972352027893065, 0.015841279983520508, 0.016029535293579103, 0.01591926383972168, 0.015931424140930177, 0.01587401580810547, 0.016164127349853515, 0.015899359703063966, 0.0160317440032959, 0.015892736434936522, 0.01585446357727051, 0.015897472381591796, 0.01581500816345215, 0.015685279846191405, 0.015856767654418947, 0.0162108154296875, 0.016055519104003907, 0.01618409538269043, 0.016113279342651367, 0.016093568801879884, 0.01616896057128906, 0.01615011215209961, 0.016114368438720703, 0.016053983688354492, 0.016150751113891602, 0.016272832870483398, 0.016078880310058594, 0.016311647415161133, 0.01602457618713379, 0.01652355194091797, 0.01599462413787842, 0.01599251174926758, 0.01598431968688965, 0.015874303817749024, 0.01598291206359863, 0.01593776035308838]",tokens/s,62.16871394567339,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,6410.99776,8461.877248,0.0,8059.355136,8042.68544,s,1,14.3769990234375,14.3769990234375,0.0,14.3769990234375,14.3769990234375,14.3769990234375,14.3769990234375,[14.3769990234375],,kWh,0.00021030209829995478,2.319057880981316e-05,6.248893888000095e-05,0.0002959816159897689,,MB,2242.281472,8577.220608,0.0,8160.018432,8140.420096,s,10,3.137843231201172,0.3137843231201172,0.0005093926945656181,0.3138793182373047,0.3142996215820313,0.31430964660644534,0.3143176666259766,"[0.3133050842285156, 0.31283721923828123, 0.3137593994140625, 0.3131062927246094, 0.31374609375, 0.3142088928222656, 0.31429739379882815, 0.3142639465332031, 0.3139992370605469, 0.3143196716308594]",tokens/s,815.8470042558587,kWh,9.244411707162213e-06,1.0194890564117339e-06,6.146211514187518e-06,1.6410112277761463e-05,tokens/kWh,15600137.01715644,MB,2247.950336,8608.677888,0.0,8191.475712,8140.422656,s,10,47.65226220703126,4.765226220703124,0.024717258964014256,4.759848388671875,4.792545361328125,4.802509741210938,4.810481245117187,"[4.77412158203125, 4.7903310546875, 4.76039404296875, 4.78889453125, 4.81247412109375, 4.759302734375, 4.74559033203125, 4.73355224609375, 4.73279736328125, 4.75480419921875]",tokens/s,13.220778423128912,kWh,0.0001395478619170049,1.5392695366353177e-05,8.23766093040126e-05,0.00023731716658737068,tokens/kWh,265467.52140160045,,s,630,47.64937995147705,0.07563393643091597,0.0012719205367164262,0.07548595428466798,0.07674239807128906,0.07720237503051758,0.08054351448059084,"[0.07675360107421875, 0.07521279907226562, 0.07480083465576172, 0.0744750747680664, 0.07477529907226563, 0.07475199890136719, 0.07475609588623047, 0.07571427154541016, 0.07547113800048828, 0.07498137664794922, 0.07505622100830078, 0.07571244812011718, 0.07531587219238281, 0.07550559997558594, 0.075781982421875, 0.07465203094482421, 0.07399142456054687, 0.07430140686035157, 0.07484716796875, 0.07570636749267579, 0.07590502166748046, 0.07557529449462891, 0.07584767913818359, 0.07637606048583985, 0.0766924819946289, 0.07542681884765624, 0.07573270416259766, 0.0757701416015625, 0.07539427185058593, 0.07544035339355469, 0.0754816665649414, 0.07569817352294922, 0.07587020874023437, 0.07560326385498047, 0.0757278060913086, 0.07573846435546874, 0.07590297698974609, 0.0759148178100586, 0.0764200668334961, 0.07680806732177735, 0.08167609405517579, 0.07677561950683594, 0.07634124755859376, 0.07630665588378906, 0.0760973129272461, 0.07547494506835938, 0.075423583984375, 0.07567362976074218, 0.07661567687988281, 0.07656460571289063, 0.07625727844238281, 0.07673036956787109, 0.07675897979736328, 0.07642733001708985, 0.08073145294189453, 0.07572102355957032, 0.07496310424804688, 0.0750758056640625, 0.07535206604003906, 0.0749117431640625, 0.07454924774169921, 0.07477452850341797, 0.07477788543701172, 0.08101366424560547, 0.07545353698730468, 0.0757441635131836, 0.08776294708251953, 0.07538893127441407, 0.07575961303710937, 0.07529267120361328, 0.07514316558837891, 0.07544319915771484, 0.07535513305664063, 0.07592569732666016, 0.07557679748535157, 0.07481993865966798, 0.0786803207397461, 0.07970764923095704, 0.07496320343017578, 0.07493631744384766, 0.07543603515625, 0.07623065948486328, 0.07613849639892578, 0.07595980834960937, 0.07590962982177735, 0.07626265716552734, 0.07584844970703125, 0.07740211486816406, 0.07689859008789063, 0.07635279846191406, 0.076153564453125, 0.07641673278808593, 0.07567769622802735, 0.07583657836914062, 0.07552025604248047, 0.07511510467529296, 0.07548313903808594, 0.07626547241210938, 0.0761629409790039, 0.07578636932373047, 0.07644569396972656, 0.07542694091796875, 0.07581526184082031, 0.07575103759765625, 0.07719004821777344, 0.07700479888916016, 0.07613235473632812, 0.07614463806152344, 0.0757760009765625, 0.07546988677978515, 0.07570323181152344, 0.07517183685302735, 0.07462297821044922, 0.07532889556884766, 0.0747874526977539, 0.07567702484130859, 0.07451510620117187, 0.07529267120361328, 0.07492966461181641, 0.07561183929443359, 0.07536109161376953, 0.07530086517333984, 0.0747171859741211, 0.07540940856933594, 0.07447756958007813, 0.07418032073974609, 0.07517798614501953, 0.0748128662109375, 0.07490518188476562, 0.07571145629882813, 0.07582723236083984, 0.07430931091308594, 0.07502671813964844, 0.07434413146972656, 0.07464163208007812, 0.07533782196044922, 0.07731986999511718, 0.07565920257568359, 0.07518479919433593, 0.0757834243774414, 0.07524604797363281, 0.07556710052490234, 0.0760335693359375, 0.07564569854736328, 0.0749928970336914, 0.07494703674316407, 0.07482777404785156, 0.07704271697998047, 0.07537763214111329, 0.07578214263916015, 0.0758437728881836, 0.0758823013305664, 0.07486879730224609, 0.0782458267211914, 0.07600252532958984, 0.07578089904785157, 0.0752515869140625, 0.07481324768066407, 0.0743075180053711, 0.0742977294921875, 0.07809228515625, 0.07446527862548828, 0.07492800140380859, 0.07488662719726563, 0.07467689514160156, 0.07387750244140626, 0.07435084533691406, 0.07431526184082031, 0.07673677062988281, 0.07644774627685547, 0.07662387084960938, 0.07668736267089844, 0.07699977874755859, 0.07702566528320312, 0.0767834243774414, 0.07653040313720703, 0.0765849609375, 0.07618150329589844, 0.07550902557373047, 0.07554736328125, 0.075859619140625, 0.0754648666381836, 0.07618374633789063, 0.07578009796142578, 0.07519987487792969, 0.07511103820800781, 0.07543571472167969, 0.07472563171386719, 0.07430063629150391, 0.07807389068603515, 0.08598166656494141, 0.07677558135986329, 0.07643910217285156, 0.07592185974121093, 0.07591718292236328, 0.07541158294677734, 0.07439360046386718, 0.0739653778076172, 0.07533792114257812, 0.07610572814941406, 0.07587225341796874, 0.07466770935058593, 0.07487725067138672, 0.07493222045898437, 0.07546844482421874, 0.0752665252685547, 0.07559769439697266, 0.07745126342773437, 0.07579193878173827, 0.07595053100585937, 0.07561011505126954, 0.07554156494140625, 0.07636463928222656, 0.07582844543457032, 0.07577398681640625, 0.0756580810546875, 0.07475609588623047, 0.07512204742431641, 0.07546080017089844, 0.0752870101928711, 0.07497676849365234, 0.07449443054199219, 0.07396147155761719, 0.07370272064208984, 0.07472518157958985, 0.07547174072265625, 0.07617945861816407, 0.07550498962402344, 0.07651554870605469, 0.08008338928222657, 0.0759582748413086, 0.07649485015869141, 0.07685321807861328, 0.07592758178710937, 0.07700275421142579, 0.0771520004272461, 0.0759319076538086, 0.07630461120605468, 0.07663593292236329, 0.0759552993774414, 0.07498435211181641, 0.07444992065429687, 0.07575456237792969, 0.07535609436035157, 0.07581900787353515, 0.07629804992675782, 0.07671622467041016, 0.07724559783935547, 0.07648137664794921, 0.0762429428100586, 0.07684915161132813, 0.07691779327392578, 0.07699628448486329, 0.07814444732666016, 0.076623779296875, 0.07581295776367188, 0.07662754821777344, 0.07634182739257812, 0.07566015625, 0.07624553680419922, 0.07713030242919922, 0.07859801483154297, 0.07647026824951172, 0.07493993377685547, 0.07456610870361328, 0.07488716888427735, 0.07492972564697266, 0.07650492858886719, 0.07630089569091797, 0.07720038604736328, 0.07817318725585938, 0.07614598083496094, 0.0759548797607422, 0.0758345947265625, 0.07677212524414062, 0.07734819030761719, 0.07582991790771484, 0.07599871826171875, 0.07581858825683593, 0.0762070083618164, 0.07589174652099609, 0.0760555191040039, 0.07646524810791015, 0.07706716918945312, 0.07570191955566406, 0.07648422241210938, 0.07598973083496094, 0.07669964599609375, 0.07700233459472657, 0.07648912048339844, 0.07649801635742187, 0.07685212707519531, 0.07624908447265626, 0.07675279998779297, 0.07588275146484375, 0.0767421417236328, 0.07699696350097657, 0.07601136016845703, 0.07610179138183594, 0.07656857299804687, 0.07685318756103515, 0.07684102630615235, 0.07648255920410156, 0.07667008209228515, 0.07619673919677734, 0.07693926239013672, 0.0762798080444336, 0.07623065948486328, 0.07592905426025391, 0.07604688262939453, 0.0762060775756836, 0.07614873504638672, 0.07568697357177734, 0.07643231964111329, 0.07661567687988281, 0.07718061065673829, 0.07673065948486328, 0.07725023651123047, 0.07759670257568359, 0.07642934417724609, 0.0756319351196289, 0.07562937927246094, 0.07742057800292969, 0.0773039321899414, 0.07587020874023437, 0.07796288299560547, 0.07578253173828126, 0.0764865951538086, 0.07628374481201172, 0.07559142303466797, 0.07566579437255859, 0.07581110382080078, 0.07566960144042968, 0.07584703826904297, 0.07565347290039062, 0.07508172607421874, 0.0765248031616211, 0.0748939208984375, 0.07533939361572266, 0.07409232330322266, 0.07438374328613281, 0.07549581146240235, 0.07499110412597657, 0.07443711853027343, 0.07489126586914062, 0.07653324890136719, 0.07479551696777344, 0.07515545654296875, 0.07553433227539062, 0.07544627380371094, 0.07643341064453125, 0.075446044921875, 0.0791226577758789, 0.07488041687011719, 0.0755568618774414, 0.07570006561279297, 0.07496985626220704, 0.07459561920166016, 0.07446720123291016, 0.07398041534423828, 0.07420358276367188, 0.07560921478271485, 0.07641577911376952, 0.07617734527587891, 0.07547289276123047, 0.07541561889648438, 0.07497904205322266, 0.07519055938720703, 0.0750940170288086, 0.07468956756591796, 0.07468879699707032, 0.074895263671875, 0.07412815856933594, 0.07450418853759766, 0.07454246520996094, 0.07407894134521484, 0.07406787109375, 0.07432806396484375, 0.07616028594970703, 0.0749838409423828, 0.0742557144165039, 0.07396867370605469, 0.07369926452636719, 0.07530604553222656, 0.07564998626708984, 0.0755077133178711, 0.07571647644042968, 0.07472252655029298, 0.0739234848022461, 0.07465727996826171, 0.07610585784912109, 0.07518236541748047, 0.07554825592041016, 0.07607084655761719, 0.07801062774658203, 0.07585004425048827, 0.07543398284912109, 0.07518412780761718, 0.07538595581054687, 0.07508470153808594, 0.07533526611328124, 0.07528224182128906, 0.07541232299804687, 0.07562242889404297, 0.07540911865234375, 0.07540560150146484, 0.07532943725585937, 0.07526751708984375, 0.07565760040283204, 0.07556505584716797, 0.07571775817871093, 0.07668329620361328, 0.07605538940429687, 0.07651532745361328, 0.07637126159667969, 0.07599171447753907, 0.07630850982666015, 0.07583280181884766, 0.07506374359130859, 0.07488317108154297, 0.07439974212646484, 0.07432192230224609, 0.07384268951416016, 0.07380172729492188, 0.07492396545410156, 0.0759682846069336, 0.07555506896972657, 0.07584732818603515, 0.07513279724121094, 0.07511090850830078, 0.0755384292602539, 0.07537459564208984, 0.07517593383789062, 0.07524972534179687, 0.07511443328857421, 0.0755077133178711, 0.07489126586914062, 0.07455129241943359, 0.07437312316894532, 0.07544217681884766, 0.07508582305908203, 0.07609513854980468, 0.07564921569824219, 0.07460265350341796, 0.07396761322021485, 0.07371932983398438, 0.08425107574462891, 0.07790332794189453, 0.07577247619628906, 0.07586956787109375, 0.0753202896118164, 0.07498870086669922, 0.07418931579589844, 0.07432147216796875, 0.07466028594970703, 0.0745403823852539, 0.074104736328125, 0.07392646026611328, 0.07370848083496094, 0.07458755493164063, 0.07501881408691406, 0.07505923461914063, 0.07594956970214843, 0.07566397094726562, 0.07521206665039062, 0.07500454711914062, 0.07444684600830079, 0.0754480972290039, 0.07599740600585937, 0.0754435806274414, 0.07429593658447266, 0.07465570831298828, 0.07532508850097656, 0.07485292816162109, 0.07422752380371093, 0.07432806396484375, 0.07531753540039063, 0.07731171417236328, 0.07507113647460938, 0.07424240112304688, 0.07534150695800781, 0.07505133056640625, 0.07493631744384766, 0.07476223754882813, 0.07424205017089844, 0.07502438354492187, 0.07468851470947266, 0.07418470764160157, 0.07387312316894531, 0.07563292694091797, 0.07528425598144531, 0.07422783660888672, 0.07529071807861328, 0.07600947570800781, 0.07541321563720703, 0.07539740753173828, 0.07468377685546874, 0.07433280181884766, 0.07458799743652343, 0.07504422760009766, 0.07517263793945313, 0.07470899200439453, 0.07472946929931641, 0.07559168243408203, 0.07596832275390625, 0.07541011047363282, 0.07512268829345703, 0.07575276947021484, 0.07591343688964844, 0.07638066864013672, 0.07477040100097657, 0.07551558685302734, 0.07515888214111328, 0.07535868835449219, 0.07532173156738281, 0.07390025329589844, 0.07399935913085938, 0.07476854705810547, 0.07495053100585937, 0.07568185424804688, 0.07413353729248047, 0.07400323486328125, 0.07451033782958984, 0.07512608337402343, 0.07493497467041016, 0.07544790649414063, 0.07491407775878907, 0.07538700866699219, 0.07519641876220703, 0.0752168960571289, 0.07523356628417968, 0.07492374420166016, 0.07546265411376953, 0.07570636749267579, 0.0749546890258789, 0.0748524169921875, 0.07502028656005859, 0.07436902618408203, 0.07430143737792969, 0.0742764129638672, 0.07432614135742187, 0.07494016265869141, 0.07392467498779297, 0.0744616928100586, 0.07584153747558593, 0.07533363342285156, 0.07572617340087891, 0.07605260467529297, 0.07533827209472656, 0.07483586883544922, 0.07450572967529297, 0.07526979064941407, 0.07451929473876953, 0.0743896942138672, 0.07615692901611328, 0.07548876953125, 0.07467878723144532, 0.07507148742675782, 0.07616918182373048, 0.07553987121582031, 0.0756794204711914, 0.07526290893554688, 0.07552819061279296, 0.07509375762939453, 0.07494380950927734, 0.0749964828491211, 0.07646841430664063, 0.07601881408691406, 0.07513382720947266, 0.07466598510742188, 0.07501001739501953, 0.07480118560791016, 0.07780483245849609, 0.07518486022949218, 0.07511654663085937, 0.07633443450927735, 0.07534659576416015, 0.07535743713378906, 0.0757501449584961, 0.0760975341796875, 0.07674470520019532, 0.07653523254394531, 0.07649747467041015, 0.07625119781494141, 0.07646380615234374, 0.07618585968017579, 0.07577523040771485, 0.07530729675292969, 0.07503510284423828, 0.07480044555664063, 0.07430009460449219, 0.07455129241943359, 0.07506924438476563, 0.07532915496826172, 0.07460511779785156, 0.07418201446533203, 0.07382489776611328, 0.07350067138671874, 0.0742762222290039, 0.07632316589355469, 0.07586434936523437, 0.0753602523803711, 0.08367049407958985, 0.07522358703613281, 0.07663206481933593, 0.07598079681396484, 0.07614979553222656, 0.07564374542236328, 0.07524569702148437, 0.07521651458740235, 0.07486911773681641, 0.07465135955810547, 0.07496880340576172, 0.07560249328613282, 0.07539421081542969, 0.074775390625, 0.07489849853515625, 0.07479801940917968, 0.07557939147949219, 0.07584358215332031, 0.07554867553710938, 0.07563699340820312, 0.07495449829101562, 0.07563878631591797, 0.07451554870605469, 0.07378540802001952, 0.07720400238037109, 0.07429695892333985, 0.07407695770263673, 0.0743474578857422]",tokens/s,13.221578132633622,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,7436.88192,8041.463808,0.0,7646.216192,7627.584,s,1,13.0828779296875,13.0828779296875,0.0,13.0828779296875,13.0828779296875,13.0828779296875,13.0828779296875,[13.0828779296875],,kWh,0.00017310213232083242,1.908722760301612e-05,5.2883375640000105e-05,0.00024507273556384864,,MB,1765.060608,8687.386624,0.0,8277.458944,8199.8592,s,10,3.5461555175781245,0.35461555175781245,0.0007464210454196315,0.35487646484375,0.3554279327392578,0.35550009918212894,0.3555578323364258,"[0.35337371826171876, 0.35448025512695314, 0.3538238525390625, 0.3535220336914062, 0.354822509765625, 0.35504058837890623, 0.354930419921875, 0.35541189575195314, 0.355572265625, 0.355177978515625]",tokens/s,721.9085534489961,kWh,1.0389587062930928e-05,1.145673224278213e-06,6.860972921724143e-06,1.8396233208933282e-05,tokens/kWh,13915892.296673289,MB,1777.09056,9001.959424,0.0,8592.031744,8476.849152,s,10,29.368402099609373,2.9368402099609376,0.003935139654403692,2.9374609375,2.9412039794921876,2.9414275512695314,2.941606408691406,"[2.931014892578125, 2.928758544921875, 2.937771240234375, 2.941154296875, 2.937150634765625, 2.939466796875, 2.941651123046875, 2.935904541015625, 2.9364609375, 2.939069091796875]",tokens/s,21.451626747114698,kWh,8.633575688248555e-05,9.523114343253929e-06,5.7382856251075835e-05,0.00015324172747681528,tokens/kWh,411115.1775519601,,s,630,29.361263904571537,0.0466051808009072,0.0003901684382636195,0.04659067153930664,0.04708167533874512,0.04720060176849365,0.04764319000244141,"[0.04670352172851563, 0.046142688751220705, 0.045762462615966795, 0.046123905181884764, 0.045878944396972654, 0.04604118347167969, 0.04607001495361328, 0.04605324935913086, 0.04619046401977539, 0.045961471557617185, 0.04616191864013672, 0.04620470428466797, 0.04629718399047852, 0.04616220855712891, 0.046212959289550784, 0.046102527618408204, 0.04604288101196289, 0.04619651031494141, 0.04616649627685547, 0.04619878387451172, 0.04636262512207031, 0.046534656524658206, 0.04632912063598633, 0.04638496017456055, 0.046219966888427735, 0.047315166473388674, 0.0464496955871582, 0.04650902557373047, 0.04641321563720703, 0.046432865142822265, 0.046189697265625, 0.04632665634155273, 0.046351390838623045, 0.046349281311035155, 0.04656332778930664, 0.04650188827514649, 0.04632275390625, 0.04637382507324219, 0.046581153869628904, 0.046578559875488285, 0.04644015884399414, 0.046499839782714845, 0.04759500885009765, 0.048302593231201174, 0.046728511810302735, 0.046682815551757816, 0.04698316955566406, 0.04741308975219727, 0.04673961639404297, 0.04672512054443359, 0.04695964813232422, 0.0466728630065918, 0.046604286193847655, 0.046581760406494144, 0.04689420700073242, 0.04684275054931641, 0.04688076782226563, 0.04661043167114258, 0.04679270553588867, 0.04677632141113281, 0.04688803100585937, 0.04679315185546875, 0.04718384170532226, 0.046684318542480466, 0.04623353576660156, 0.04602719879150391, 0.04585382461547852, 0.04574835205078125, 0.045943294525146484, 0.045917312622070314, 0.045853569030761716, 0.04598905563354492, 0.04589241409301758, 0.04598988723754883, 0.046040321350097654, 0.046279422760009764, 0.04622489547729492, 0.04590233612060547, 0.04646236801147461, 0.046211681365966796, 0.046137054443359374, 0.046270751953125, 0.04633804702758789, 0.04623686218261719, 0.04655724716186523, 0.046365440368652346, 0.046366367340087894, 0.04605987167358398, 0.046137344360351565, 0.046252033233642575, 0.046206878662109374, 0.046205024719238284, 0.04629913711547851, 0.046325759887695314, 0.04627241516113281, 0.04654499053955078, 0.04651193618774414, 0.04622518539428711, 0.04654655838012695, 0.04654569625854492, 0.04642611312866211, 0.046647296905517575, 0.04667747116088867, 0.04664579010009766, 0.04665139389038086, 0.04680681610107422, 0.04761008071899414, 0.04676198577880859, 0.046688255310058595, 0.04669164657592773, 0.04697875213623047, 0.046898174285888675, 0.046626399993896485, 0.04686064147949219, 0.04705900955200195, 0.04677427291870117, 0.04670259094238281, 0.04694015884399414, 0.047081024169921874, 0.04706963348388672, 0.04686000061035156, 0.046784801483154295, 0.04682547378540039, 0.0470200309753418, 0.04716080093383789, 0.047163681030273436, 0.04653670501708984, 0.04603481674194336, 0.04583817672729492, 0.04612739181518555, 0.045813758850097655, 0.04611072158813476, 0.04614144134521484, 0.04603289413452148, 0.0459521598815918, 0.046104896545410154, 0.04619318389892578, 0.04609843063354492, 0.04641177749633789, 0.04627190399169922, 0.046228065490722656, 0.04637286376953125, 0.04649369430541992, 0.04648527908325195, 0.04642627334594727, 0.046610496520996095, 0.04683699035644531, 0.04659027099609375, 0.046559680938720704, 0.04628844833374023, 0.04628688049316406, 0.0462459831237793, 0.04642438507080078, 0.04650982284545899, 0.046340351104736326, 0.0466143684387207, 0.046773536682128906, 0.046521278381347654, 0.04657497787475586, 0.04668454360961914, 0.048195137023925784, 0.04675443267822266, 0.04644454574584961, 0.046505630493164064, 0.046757568359375, 0.04736886215209961, 0.04674969482421875, 0.04678041458129883, 0.04675788879394531, 0.046886913299560545, 0.046718784332275394, 0.04657785415649414, 0.04682870483398437, 0.046926559448242186, 0.046752960205078124, 0.04687558364868164, 0.047249408721923826, 0.04696854400634766, 0.04701827239990235, 0.04686643218994141, 0.047010814666748044, 0.047010879516601566, 0.04689503860473633, 0.04681907272338867, 0.047368446350097654, 0.04724531173706055, 0.04713881683349609, 0.04701113510131836, 0.0470552978515625, 0.04673491287231445, 0.04633951950073242, 0.04594099044799805, 0.046297855377197265, 0.046011489868164064, 0.046110847473144534, 0.04614377593994141, 0.04618844985961914, 0.04608265686035156, 0.046083263397216793, 0.046218048095703124, 0.046478687286376955, 0.04647769546508789, 0.04623593521118164, 0.04634444808959961, 0.04639254379272461, 0.04627510452270508, 0.046292991638183595, 0.046342144012451174, 0.04647731018066406, 0.046696449279785154, 0.046712833404541014, 0.04652860641479492, 0.04643756866455078, 0.04649801635742187, 0.04648191833496094, 0.04670873641967774, 0.046804126739501954, 0.04661033630371094, 0.04668307113647461, 0.04679679870605469, 0.04669440078735351, 0.046671871185302735, 0.04676607894897461, 0.046650753021240235, 0.04669279861450195, 0.046561729431152346, 0.04669385528564453, 0.046874912261962894, 0.046854145050048826, 0.04686431884765625, 0.046771358489990235, 0.04682640075683594, 0.04687865447998047, 0.04676409530639648, 0.047027488708496094, 0.047182464599609376, 0.047081409454345705, 0.046918846130371096, 0.04722787094116211, 0.04708051300048828, 0.04694931030273437, 0.04684799957275391, 0.046876129150390626, 0.04708377456665039, 0.04707731246948242, 0.0474579849243164, 0.046921470642089846, 0.04716432189941406, 0.04712038421630859, 0.0471615982055664, 0.047110942840576174, 0.04716003036499023, 0.04696604919433594, 0.04639104080200195, 0.04597414398193359, 0.046145313262939455, 0.04610924911499024, 0.046203937530517575, 0.04604412841796875, 0.04612668609619141, 0.046067615509033204, 0.045894142150878905, 0.046247390747070315, 0.04627920150756836, 0.04634009552001953, 0.04620646286010742, 0.04614809417724609, 0.04632748794555664, 0.046209152221679685, 0.04643420791625977, 0.0464117431640625, 0.046456897735595706, 0.04651375961303711, 0.04687094497680664, 0.04665164947509766, 0.04641791915893555, 0.04648531341552734, 0.046372928619384766, 0.04642214584350586, 0.04648294448852539, 0.04637270355224609, 0.046451038360595706, 0.04638956832885742, 0.04656060791015625, 0.04664591979980469, 0.04680729675292969, 0.04646271896362305, 0.04661372756958008, 0.04671551895141601, 0.04679030227661133, 0.04680755233764648, 0.0466778564453125, 0.04668771362304688, 0.04683436965942383, 0.04743167877197266, 0.04676607894897461, 0.04682710266113281, 0.04674959945678711, 0.047294975280761715, 0.04686617660522461, 0.04671641540527344, 0.046848766326904295, 0.046878719329833986, 0.04689894485473633, 0.046833919525146483, 0.046728382110595705, 0.047084095001220704, 0.0469854736328125, 0.046948158264160156, 0.04703251266479492, 0.0473105583190918, 0.047032608032226565, 0.047026176452636716, 0.04706304168701172, 0.04709894561767578, 0.046640640258789064, 0.04622079849243164, 0.046023681640625, 0.046086143493652344, 0.04595663833618164, 0.046072288513183596, 0.04617324829101563, 0.04613216018676758, 0.046033088684082034, 0.04609724807739258, 0.046373855590820315, 0.046225406646728515, 0.04653263854980469, 0.046393310546875, 0.04636876678466797, 0.04642201614379883, 0.04653875350952148, 0.04657561492919922, 0.046202880859375, 0.046440448760986325, 0.046451744079589845, 0.04651721572875977, 0.047470592498779295, 0.04656470489501953, 0.04640835189819336, 0.04631961441040039, 0.046392864227294925, 0.04652080154418945, 0.04652767944335937, 0.04652729415893555, 0.046481311798095705, 0.04655523300170898, 0.04655718231201172, 0.046581760406494144, 0.046383102416992186, 0.046712833404541014, 0.047923198699951174, 0.047924606323242185, 0.046795455932617185, 0.0466759033203125, 0.04659807968139648, 0.04667308807373047, 0.04684684753417969, 0.04672284698486328, 0.04677030563354492, 0.046739425659179684, 0.04686656188964844, 0.04693753433227539, 0.046914112091064455, 0.04689433670043945, 0.046936416625976564, 0.04683407974243164, 0.046798336029052735, 0.04676409530639648, 0.047061439514160155, 0.04708467102050781, 0.04716838455200195, 0.04692582321166992, 0.04698316955566406, 0.04707942581176758, 0.04713676834106445, 0.04700748825073242, 0.04720256042480469, 0.046647296905517575, 0.046260223388671876, 0.04600774383544922, 0.046145889282226564, 0.046028705596923826, 0.04609260940551758, 0.046061569213867185, 0.04615292739868164, 0.046320415496826174, 0.04627046585083008, 0.04628684616088867, 0.046290145874023435, 0.04635113525390625, 0.04628070449829102, 0.046283935546875, 0.04647932815551758, 0.046535552978515624, 0.0463699836730957, 0.04630352020263672, 0.0464031982421875, 0.04647520065307617, 0.046695392608642576, 0.04640703964233398, 0.046391937255859376, 0.04637638473510742, 0.04637446212768555, 0.04663180923461914, 0.046623905181884764, 0.04644758224487305, 0.04659404754638672, 0.04646211242675781, 0.04658671951293945, 0.0468513298034668, 0.04685660934448242, 0.046559585571289065, 0.04684185409545898, 0.046630912780761716, 0.04664495849609375, 0.04692816162109375, 0.04699955368041992, 0.046845951080322266, 0.047099903106689454, 0.047265792846679686, 0.046951839447021484, 0.046840351104736326, 0.04687225723266602, 0.04700198364257813, 0.047175422668457034, 0.047728256225585936, 0.04702422332763672, 0.04697520065307617, 0.04754582214355469, 0.04709257507324219, 0.047032257080078126, 0.04721670532226562, 0.046947456359863284, 0.047030464172363284, 0.047104991912841794, 0.04699622344970703, 0.046926815032958986, 0.04720435333251953, 0.047034366607666016, 0.047083518981933595, 0.04696752166748047, 0.04640124893188476, 0.04600400161743164, 0.045973217010498044, 0.04587916946411133, 0.04609088134765625, 0.04597964859008789, 0.046053375244140625, 0.046061569213867185, 0.046002174377441404, 0.04613529586791992, 0.04616550445556641, 0.04633232116699219, 0.046137439727783204, 0.046142784118652344, 0.046416576385498044, 0.04632166290283203, 0.04624588775634766, 0.04617180633544922, 0.04643875122070312, 0.04659107208251953, 0.04672195053100586, 0.046374561309814454, 0.046317920684814454, 0.04638236618041992, 0.04622771072387695, 0.046631393432617185, 0.04638019180297852, 0.0463012466430664, 0.04651036834716797, 0.04645939254760742, 0.04668182373046875, 0.046771934509277344, 0.046633537292480466, 0.04657731246948242, 0.046784862518310544, 0.04666572952270508, 0.04679065704345703, 0.04689715194702149, 0.04702412796020508, 0.04693392181396484, 0.04694153594970703, 0.04687334442138672, 0.04679420852661133, 0.0467006721496582, 0.04686275100708008, 0.046920734405517577, 0.04684489440917969, 0.046884735107421874, 0.0480808334350586, 0.04663315200805664, 0.04676796722412109, 0.04676214218139649, 0.04666764831542969, 0.04681075286865234, 0.04692428970336914, 0.04685619354248047, 0.046746654510498045, 0.04680393600463867, 0.04716918563842774, 0.04715555191040039, 0.04708147048950195, 0.04725964736938477, 0.046662017822265624, 0.046229248046875, 0.04592051315307617, 0.04608943939208984, 0.046008735656738284, 0.046163551330566405, 0.04627536010742188, 0.04617340850830078, 0.045957920074462894, 0.04617580795288086, 0.04643260955810547, 0.04636476898193359, 0.046309024810791015, 0.046411136627197265, 0.04638614273071289, 0.04638719940185547, 0.046247905731201175, 0.04640163040161133, 0.04622438430786133, 0.046541152954101564, 0.04643900680541992, 0.04657513427734375, 0.046372638702392575, 0.04650259017944336, 0.04654079818725586, 0.04674969482421875, 0.046430206298828124, 0.04656332778930664, 0.04640553665161133, 0.04646255874633789, 0.04644009780883789, 0.04651430511474609, 0.0466091194152832, 0.04666572952270508, 0.0464793586730957, 0.046757823944091795, 0.04653472137451172, 0.04657503890991211, 0.046757984161376956, 0.04764854431152344, 0.046868961334228514, 0.046739681243896485, 0.046941665649414065, 0.04683622360229492, 0.046790496826171875, 0.04684000015258789, 0.046835712432861325, 0.04681836700439453, 0.04667692947387695, 0.046710945129394534, 0.04672700881958008, 0.04673126220703125, 0.046667007446289065, 0.04677228927612305, 0.046873153686523436, 0.047042911529541015, 0.04688873672485352, 0.046922046661376955, 0.047122112274169924, 0.04707660675048828, 0.0471162223815918, 0.04718675231933594, 0.047158912658691404, 0.04683161544799805, 0.046196449279785154, 0.04594633483886719, 0.04615996932983398, 0.04591830444335938, 0.04616767883300781, 0.046099361419677735, 0.04613711929321289, 0.04616995239257812, 0.04611455917358399, 0.0462825927734375, 0.046340991973876956, 0.046330974578857424, 0.04618332672119141, 0.046159870147705076, 0.046368511199951175, 0.04638729476928711, 0.04652150344848633, 0.046410751342773435, 0.0466464958190918, 0.046478111267089846, 0.04660588836669922, 0.046452224731445314, 0.04651462554931641, 0.046421600341796876, 0.046303295135498045, 0.046357025146484376, 0.04622367858886719, 0.046467071533203126, 0.04689920043945312, 0.04649369430541992, 0.04648550415039063, 0.04674969482421875, 0.04659977722167969, 0.0464277114868164, 0.04715155029296875, 0.04659651184082031, 0.04755027389526367, 0.04680019378662109, 0.04687756729125977, 0.04674345779418945, 0.04670816040039062, 0.04681081771850586, 0.04692272186279297, 0.04688614273071289, 0.0468507194519043, 0.04671404647827149, 0.046701473236083986, 0.046675167083740234, 0.04666592025756836, 0.04733536148071289, 0.047630081176757814, 0.04695951843261719, 0.047007553100585936, 0.04720659255981445, 0.04687257766723633, 0.04715097427368164, 0.04707888031005859, 0.04702684783935547, 0.04708713531494141, 0.04719820785522461, 0.046985694885253906, 0.047286113739013674]",tokens/s,21.45684198226594,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,828.317696,551.419904,0.0,148.897792,141.633024,s,1,7.85617529296875,7.85617529296875,0.0,7.85617529296875,7.85617529296875,7.85617529296875,7.85617529296875,[7.85617529296875],,kWh,1.905248846669565e-05,2.0942869207233018e-06,6.0550048440050075e-06,2.720178023142396e-05,,MB,1346.813952,666.763264,0.0,249.561088,216.900608,s,18,0.22043075370788576,0.01224615298377143,0.00015460600347596269,0.012210927963256835,0.012412889862060546,0.012508708667755126,0.012602298679351807,"[0.011945792198181152, 0.01231766414642334, 0.012116864204406738, 0.012216447830200195, 0.012380672454833984, 0.012145855903625487, 0.012625696182250977, 0.012300640106201173, 0.012303263664245605, 0.012205408096313477, 0.012315456390380859, 0.012134528160095215, 0.01217580795288086, 0.01208409595489502, 0.01219808006286621, 0.012120320320129394, 0.012356096267700196, 0.012488063812255859]",tokens/s,20904.524085176017,kWh,3.558665168902106e-07,3.9244139023801684e-08,1.9255266081707006e-07,5.876633167310823e-07,tokens/kWh,435623583.6261103,MB,1380.401152,691.929088,0.0,274.726912,216.903168,s,18,10.265282165527344,0.570293453640408,0.00641641160208095,0.5675881958007813,0.5822632324218749,0.5840480255126953,0.5857986187744141,"[0.5680799560546875, 0.5646792602539062, 0.5665809326171874, 0.5862362670898438, 0.566578369140625, 0.572816162109375, 0.581663818359375, 0.583661865234375, 0.5663518676757813, 0.5656045532226562, 0.5689368896484375, 0.5691878051757813, 0.566558837890625, 0.565134033203125, 0.5687664794921875, 0.5708627319335937, 0.5664859008789063, 0.567096435546875]",tokens/s,110.46944270155332,kWh,1.6321766202784838e-05,1.7998992873582238e-06,5.845569453627905e-06,2.3967234943770967e-05,tokens/kWh,2628588.577188941,,s,1134,10.256465497970575,0.009044502202795927,0.00020293997240955098,0.008980879783630371,0.009294595432281495,0.009384180593490601,0.00972926066398621,"[0.008744511604309082, 0.008953599929809571, 0.008907391548156738, 0.00898259162902832, 0.008966143608093263, 0.008921088218688965, 0.008943615913391113, 0.00894156837463379, 0.008930432319641112, 0.009023743629455567, 0.008941696166992188, 0.009140735626220703, 0.008887999534606934, 0.008859295845031739, 0.008873791694641113, 0.009102304458618165, 0.010477439880371094, 0.010010623931884765, 0.00899897575378418, 0.009011136054992677, 0.008947199821472167, 0.009408831596374512, 0.00954911994934082, 0.008962656021118164, 0.008917280197143555, 0.008907936096191406, 0.008889439582824708, 0.008933024406433105, 0.009043392181396484, 0.008870559692382813, 0.00889350414276123, 0.008925536155700683, 0.008911808013916016, 0.00893507194519043, 0.00892518424987793, 0.00898624038696289, 0.008929535865783692, 0.008916640281677246, 0.008882656097412109, 0.00890880012512207, 0.008890368461608887, 0.009037823677062988, 0.008911999702453613, 0.008960512161254883, 0.00894809627532959, 0.008939583778381348, 0.00892470359802246, 0.0088755521774292, 0.008935680389404297, 0.008935744285583497, 0.008941887855529785, 0.008914591789245606, 0.008878432273864745, 0.008886272430419923, 0.008992863655090331, 0.008896415710449218, 0.008888319969177246, 0.008920543670654297, 0.009017888069152833, 0.009170720100402832, 0.00946121597290039, 0.009027584075927735, 0.008985535621643066, 0.008941760063171387, 0.009068127632141113, 0.009130144119262695, 0.009016575813293458, 0.008989695549011231, 0.00897663974761963, 0.008978272438049316, 0.008943519592285155, 0.00894976043701172, 0.008959936141967774, 0.009169055938720702, 0.009012864112854005, 0.00895622444152832, 0.008939488410949707, 0.008955904006958008, 0.008947711944580078, 0.009029888153076171, 0.008935168266296386, 0.008924991607666015, 0.008959551811218262, 0.008872575759887695, 0.008873503684997558, 0.008892895698547364, 0.00888864040374756, 0.008852319717407226, 0.00913657569885254, 0.00891536045074463, 0.008957695960998536, 0.008902912139892577, 0.00889241600036621, 0.008917119979858399, 0.008906623840332032, 0.009004799842834473, 0.008912768363952637, 0.00890454387664795, 0.008947520256042481, 0.009163488388061523, 0.008920415878295899, 0.008907744407653809, 0.008913727760314941, 0.008901503562927246, 0.008888480186462402, 0.008876192092895507, 0.008912192344665528, 0.008996735572814942, 0.008893088340759277, 0.0089552001953125, 0.008976767539978028, 0.008925344467163086, 0.008850751876831054, 0.008889023780822753, 0.008916704177856446, 0.008903072357177735, 0.008872991561889648, 0.008884160041809083, 0.008924063682556152, 0.00898812770843506, 0.00901807975769043, 0.008992575645446777, 0.008943903923034667, 0.009037856101989747, 0.009168576240539551, 0.009025535583496093, 0.00880128002166748, 0.009116640090942383, 0.009047807693481446, 0.009007007598876952, 0.008959967613220215, 0.009001184463500976, 0.008986944198608399, 0.00892080020904541, 0.008971615791320801, 0.008978464126586915, 0.008982720375061036, 0.008906623840332032, 0.009037471771240235, 0.0089650239944458, 0.008960000038146973, 0.009010815620422363, 0.009061087608337402, 0.009232031822204589, 0.00964572811126709, 0.00944921588897705, 0.009101920127868653, 0.009000351905822754, 0.009020031929016114, 0.008975584030151368, 0.009111680030822755, 0.008964096069335938, 0.00888435173034668, 0.008958687782287597, 0.008946911811828613, 0.008870623588562011, 0.008867679595947265, 0.009004544258117676, 0.008944224357604981, 0.008920991897583008, 0.008898176193237305, 0.008982912063598632, 0.008960000038146973, 0.008961600303649902, 0.008902815818786622, 0.0089334077835083, 0.008904959678649902, 0.008953151702880859, 0.008919199943542481, 0.009074399948120117, 0.008921279907226562, 0.009027839660644531, 0.008922528266906739, 0.008915936470031739, 0.008927167892456055, 0.008966015815734864, 0.008918944358825684, 0.008947263717651368, 0.008935135841369629, 0.008974495887756348, 0.009001215934753419, 0.008970848083496094, 0.00887564754486084, 0.008953408241271972, 0.008964415550231933, 0.00890726375579834, 0.008931327819824218, 0.008986144065856934, 0.00895638370513916, 0.00874112033843994, 0.009015040397644043, 0.009016544342041015, 0.009000767707824707, 0.009001728057861327, 0.009010944366455078, 0.009013152122497559, 0.008976287841796875, 0.009032575607299805, 0.008927295684814453, 0.008973664283752441, 0.008993151664733887, 0.008951807975769043, 0.008931455612182617, 0.008957216262817383, 0.008972160339355469, 0.0090098876953125, 0.009000960350036622, 0.01014748764038086, 0.00921452808380127, 0.009180000305175781, 0.009382847785949708, 0.009559776306152344, 0.00936518383026123, 0.009363871574401855, 0.009291968345642089, 0.009299967765808105, 0.009315744400024414, 0.009256832122802735, 0.009234560012817384, 0.009259615898132324, 0.009254912376403808, 0.00921395206451416, 0.009304127693176269, 0.009207263946533203, 0.009193792343139648, 0.009183391571044922, 0.009232416152954101, 0.009192480087280273, 0.009267840385437012, 0.009224512100219727, 0.009168671607971192, 0.009229951858520508, 0.009527839660644532, 0.009313952445983887, 0.009621631622314454, 0.009439231872558594, 0.009559359550476074, 0.0093787841796875, 0.009835519790649415, 0.009690303802490235, 0.00947385597229004, 0.00950614356994629, 0.00957916831970215, 0.009477472305297852, 0.009463744163513184, 0.011122847557067871, 0.010090880393981934, 0.009414591789245606, 0.009402624130249023, 0.009348352432250977, 0.009122015953063965, 0.009248640060424805, 0.009003168106079101, 0.009285056114196777, 0.0092227201461792, 0.009319456100463867, 0.00917427158355713, 0.00909273624420166, 0.009108672142028809, 0.009101759910583497, 0.00908745574951172, 0.009066495895385742, 0.009072416305541993, 0.00901251220703125, 0.009022015571594239, 0.009003711700439452, 0.00902956771850586, 0.008990176200866699, 0.008976672172546387, 0.008976479530334473, 0.009234335899353028, 0.009046015739440917, 0.00901036834716797, 0.009123711585998536, 0.00903388786315918, 0.009190239906311036, 0.00900499153137207, 0.008976415634155273, 0.008980575561523438, 0.008920415878295899, 0.008901056289672851, 0.008924960136413574, 0.008936063766479492, 0.008894399642944336, 0.008895520210266114, 0.00891766357421875, 0.009005120277404785, 0.008997119903564454, 0.008922752380371093, 0.00887769603729248, 0.008913408279418946, 0.008886048316955567, 0.00890287971496582, 0.008848383903503418, 0.008891200065612793, 0.008921279907226562, 0.008894463539123536, 0.008882143974304198, 0.008895551681518555, 0.009040512084960938, 0.008894335746765137, 0.008868191719055176, 0.00888652801513672, 0.009028736114501954, 0.008948479652404785, 0.00899612808227539, 0.008915679931640625, 0.008884320259094238, 0.008904607772827148, 0.00888435173034668, 0.008899456024169921, 0.008857728004455567, 0.008925663948059081, 0.008896927833557128, 0.008902655601501466, 0.009251968383789062, 0.009206656455993653, 0.009748448371887207, 0.009644096374511718, 0.009084992408752442, 0.009075743675231934, 0.009069151878356933, 0.00906380844116211, 0.009069472312927247, 0.00902121639251709, 0.009080320358276368, 0.00900476837158203, 0.008905695915222169, 0.00897436809539795, 0.00903974437713623, 0.009011327743530273, 0.008996416091918945, 0.008902336120605469, 0.00898863983154297, 0.009016096115112305, 0.0089169921875, 0.008947711944580078, 0.008953280448913574, 0.008976192474365234, 0.00896281623840332, 0.009016608238220215, 0.00894819164276123, 0.008900863647460937, 0.009073887825012207, 0.008956704139709473, 0.008971936225891113, 0.009031935691833496, 0.009052255630493163, 0.008937600135803223, 0.009183327674865722, 0.00936729621887207, 0.009170975685119629, 0.009100447654724121, 0.009089887619018555, 0.00911359977722168, 0.009151743888854981, 0.009042719841003417, 0.009066271781921386, 0.008980352401733399, 0.008958271980285645, 0.009013376235961913, 0.009016832351684571, 0.008954239845275878, 0.008959168434143067, 0.00899142360687256, 0.009023200035095215, 0.009021759986877442, 0.008988767623901368, 0.008998271942138671, 0.00895587158203125, 0.009026207923889161, 0.00914575958251953, 0.00922646427154541, 0.00931388759613037, 0.009453439712524414, 0.009427871704101563, 0.009326399803161621, 0.009375935554504394, 0.009025376319885254, 0.009351167678833008, 0.009291775703430176, 0.00933683204650879, 0.009234432220458985, 0.009414655685424805, 0.009320128440856933, 0.009290047645568848, 0.009330944061279297, 0.009352959632873536, 0.009271583557128905, 0.00928508758544922, 0.009337087631225587, 0.00931760025024414, 0.009444128036499024, 0.00930940818786621, 0.009294624328613282, 0.009267200469970703, 0.0093306884765625, 0.009320639610290527, 0.009284992218017579, 0.009300479888916016, 0.009233344078063964, 0.009427488327026367, 0.009322015762329102, 0.009265472412109375, 0.009273728370666505, 0.009271455764770507, 0.009244288444519043, 0.0091911039352417, 0.009250720024108887, 0.00912172794342041, 0.009114080429077149, 0.009251296043395996, 0.009256959915161133, 0.009260224342346192, 0.0093721923828125, 0.0093405122756958, 0.009294528007507324, 0.009182656288146972, 0.00909779167175293, 0.009099040031433105, 0.009097599983215332, 0.009152352333068848, 0.009132032394409179, 0.009340928077697755, 0.009084063529968262, 0.00921183967590332, 0.009147135734558106, 0.009135647773742676, 0.009182880401611328, 0.009067487716674805, 0.009076736450195312, 0.009117695808410644, 0.00919753646850586, 0.009109536170959472, 0.00909721565246582, 0.009011199951171875, 0.009276864051818847, 0.00911411190032959, 0.00902143955230713, 0.009039487838745117, 0.00897715187072754, 0.008824895858764649, 0.00921395206451416, 0.009160160064697265, 0.009336735725402832, 0.00952188777923584, 0.00932585620880127, 0.009296256065368652, 0.00933017635345459, 0.00932038402557373, 0.009300352096557617, 0.009373951911926269, 0.009292223930358886, 0.00923151969909668, 0.009229120254516601, 0.009287455558776855, 0.009316351890563965, 0.009312255859375, 0.009261055946350098, 0.009232224464416504, 0.00947804832458496, 0.0094901123046875, 0.009429727554321289, 0.009426783561706543, 0.009363455772399902, 0.009332639694213867, 0.00933283233642578, 0.009371616363525391, 0.00935324764251709, 0.00934659194946289, 0.009468576431274415, 0.009349247932434083, 0.009277119636535644, 0.009242783546447754, 0.009306976318359375, 0.00950169563293457, 0.009266559600830077, 0.009239168167114257, 0.009476320266723633, 0.009305343627929688, 0.009314240455627442, 0.009327199935913086, 0.009267295837402344, 0.00917846393585205, 0.009113856315612793, 0.009038144111633301, 0.009035776138305664, 0.009064512252807617, 0.009420543670654298, 0.009120063781738281, 0.009358559608459473, 0.009143327713012695, 0.0091277437210083, 0.00909222412109375, 0.009220319747924805, 0.009107904434204101, 0.009184415817260743, 0.009194368362426758, 0.009192768096923829, 0.009052672386169434, 0.00904364776611328, 0.008974847793579101, 0.009029631614685058, 0.009054143905639649, 0.008765376091003418, 0.00903987216949463, 0.009017215728759766, 0.00905235195159912, 0.009009087562561035, 0.008978015899658203, 0.009014016151428222, 0.009006208419799804, 0.00904041576385498, 0.008945311546325684, 0.00902291202545166, 0.008999839782714843, 0.008959136009216308, 0.008910719871520995, 0.009056991577148437, 0.008986880302429199, 0.009013407707214356, 0.00898953628540039, 0.008952351570129394, 0.008910880088806153, 0.009078240394592286, 0.008983712196350097, 0.009006912231445313, 0.008983551979064941, 0.008888544082641602, 0.00897103977203369, 0.009257087707519531, 0.008972160339355469, 0.009062399864196777, 0.008932767868041992, 0.009001888275146485, 0.009014975547790528, 0.008988672256469727, 0.008964351654052734, 0.008935296058654785, 0.008934528350830077, 0.008967071533203126, 0.008972127914428711, 0.009035840034484863, 0.008935615539550782, 0.008955936431884765, 0.009078432083129882, 0.009048128128051757, 0.00894976043701172, 0.008920127868652344, 0.00891327953338623, 0.008992671966552734, 0.008957663536071777, 0.008942463874816894, 0.00891648006439209, 0.008935168266296386, 0.00897926425933838, 0.00896025562286377, 0.009017087936401368, 0.009006464004516601, 0.008891039848327637, 0.008946911811828613, 0.008966655731201171, 0.009014719963073731, 0.00906937599182129, 0.008936927795410157, 0.008924768447875977, 0.008975296020507813, 0.008783359527587891, 0.008978688240051269, 0.008972319602966308, 0.008934656143188476, 0.008958784103393555, 0.008953760147094727, 0.009000479698181152, 0.008987104415893555, 0.008904352188110351, 0.008945599555969238, 0.009046624183654785, 0.009029439926147461, 0.00897433567047119, 0.008912256240844726, 0.0089335355758667, 0.009005760192871094, 0.008992447853088378, 0.008941663742065429, 0.00893337631225586, 0.00899891185760498, 0.00900432014465332, 0.008935615539550782, 0.008973088264465332, 0.008949151992797852, 0.009057984352111817, 0.008965855598449706, 0.00903264045715332, 0.008931327819824218, 0.008966143608093263, 0.00889241600036621, 0.00901734447479248, 0.008932448387145997, 0.009218976020812989, 0.008927200317382812, 0.008882207870483399, 0.008996416091918945, 0.008973919868469238, 0.008940735816955566, 0.00894883155822754, 0.008854080200195312, 0.008947327613830566, 0.00893894386291504, 0.008927328109741211, 0.00897110366821289, 0.008914463996887207, 0.009054688453674317, 0.008892127990722656, 0.009031968116760254, 0.00897433567047119, 0.00896121597290039, 0.008940447807312011, 0.00903286361694336, 0.009027935981750489, 0.009007360458374023, 0.008974495887756348, 0.008955519676208496, 0.008989055633544922, 0.00903382396697998, 0.008937088012695313, 0.0089684476852417, 0.008943296432495118, 0.009074687957763672, 0.009037311553955079, 0.008779775619506837, 0.0090250883102417, 0.00907260799407959, 0.009032159805297852, 0.009003007888793945, 0.009025312423706055, 0.009251040458679199, 0.009063424110412598, 0.009055232048034668, 0.0090316801071167, 0.009015071868896484, 0.009052127838134765, 0.009256352424621582, 0.009104031562805176, 0.009066975593566895, 0.009070303916931153, 0.009052160263061524, 0.009003168106079101, 0.008945376396179198, 0.009457792282104492, 0.009411871910095215, 0.009106271743774414, 0.009103232383728028, 0.009074687957763672, 0.008994112014770508, 0.00898528003692627, 0.009059455871582031, 0.00902025604248047, 0.009026880264282227, 0.009005087852478028, 0.00892304039001465, 0.00898089599609375, 0.00893990421295166, 0.00891862392425537, 0.008963871955871582, 0.00889510440826416, 0.008916319847106934, 0.008954463958740234, 0.008924351692199708, 0.009155712127685546, 0.009075839996337891, 0.008944479942321778, 0.008997983932495117, 0.009138879776000976, 0.00891859245300293, 0.008948160171508788, 0.008910847663879394, 0.008923328399658204, 0.008885215759277344, 0.009301952362060547, 0.008932031631469727, 0.009013471603393554, 0.008978431701660156, 0.008886272430419923, 0.0089169921875, 0.008943615913391113, 0.008916000366210937, 0.008981472015380859, 0.00889027214050293, 0.008992575645446777, 0.009027872085571289, 0.009183263778686524, 0.009023296356201171, 0.00871833610534668, 0.008939328193664551, 0.008906944274902344, 0.008929280281066895, 0.008945664405822755, 0.008869888305664063, 0.009308128356933594, 0.00904435157775879, 0.009031328201293945, 0.009134079933166504, 0.008963904380798339, 0.009275456428527833, 0.008982624053955078, 0.008888352394104003, 0.009125344276428223, 0.009044511795043945, 0.008957951545715333, 0.008887904167175293, 0.008943136215209962, 0.008980863571166993, 0.00923481559753418, 0.009031807899475098, 0.009202719688415528, 0.009075807571411134, 0.009537023544311523, 0.009015168190002441, 0.008980992317199708, 0.00900710391998291, 0.00892518424987793, 0.008932352066040039, 0.009044992446899413, 0.009048064231872559, 0.008906880378723144, 0.008877951622009278, 0.00890390396118164, 0.008920096397399902, 0.008936927795410157, 0.008937184333801269, 0.008918911933898925, 0.008966848373413086, 0.008931327819824218, 0.008903936386108398, 0.00892512035369873, 0.008972864151000976, 0.009044223785400391, 0.008931232452392577, 0.009032064437866211, 0.008974047660827636, 0.008938688278198242, 0.008909279823303223, 0.008884575843811035, 0.008939871788024903, 0.00904742431640625, 0.009107711791992188, 0.009374879837036132, 0.00939907169342041, 0.009270943641662598, 0.009342720031738281, 0.009204607963562011, 0.009121600151062012, 0.009086112022399902, 0.009022303581237793, 0.009064640045166016, 0.008760224342346192, 0.008918848037719727, 0.009062399864196777, 0.009063520431518556, 0.008918047904968262, 0.008920607566833496, 0.008910911560058594, 0.00894320011138916, 0.00894643211364746, 0.009028544425964355, 0.008952735900878906, 0.008937631607055664, 0.008965056419372558, 0.00889139175415039, 0.008926655769348145, 0.009083392143249512, 0.009041024208068847, 0.010453951835632325, 0.009019295692443847, 0.008962240219116211, 0.008943488121032715, 0.009011232376098632, 0.008972096443176269, 0.009152704238891602, 0.00890060806274414, 0.00905731201171875, 0.009468255996704101, 0.009087615966796875, 0.008996864318847657, 0.009019071578979493, 0.00891321563720703, 0.008965408325195313, 0.008897120475769044, 0.008911328315734863, 0.009046815872192382, 0.008878975868225098, 0.008942879676818848, 0.00891766357421875, 0.008910112380981446, 0.008839967727661133, 0.008869728088378907, 0.008865951538085937, 0.008939519882202148, 0.008887999534606934, 0.0088722562789917, 0.008931327819824218, 0.00889459228515625, 0.00892460823059082, 0.008862144470214843, 0.00898252773284912, 0.008963616371154785, 0.008943391799926757, 0.008885120391845703, 0.008865280151367188, 0.009015456199645996, 0.008972160339355469, 0.008972576141357422, 0.0088985595703125, 0.008879136085510254, 0.00906339168548584, 0.008905983924865723, 0.00923519992828369, 0.00890675163269043, 0.008789504051208496, 0.008903200149536133, 0.008904191970825195, 0.009003680229187011, 0.008957695960998536, 0.0088985595703125, 0.008931327819824218, 0.008931327819824218, 0.00899443244934082, 0.00893785572052002, 0.008973535537719726, 0.008902688026428223, 0.009425663948059082, 0.00942080020904541, 0.009029631614685058, 0.008955904006958008, 0.008971936225891113, 0.008919296264648437, 0.008939616203308106, 0.008884511947631835, 0.008987648010253906, 0.009005791664123535, 0.00894483184814453, 0.008960831642150878, 0.00892518424987793, 0.009054271697998047, 0.008977888107299804, 0.008910816192626953, 0.008964608192443848, 0.008926752090454102, 0.008872415542602539, 0.008853823661804198, 0.00894115161895752, 0.008939616203308106, 0.00891321563720703, 0.009130847930908204, 0.008915103912353516, 0.008951647758483887, 0.008936320304870606, 0.008935392379760742, 0.008908127784729004, 0.009070303916931153, 0.00889913558959961, 0.00893337631225586, 0.008919424057006835, 0.008954879760742187, 0.008978528022766113, 0.009075615882873535, 0.008996800422668457, 0.008953696250915527, 0.008892640113830567, 0.008916095733642578, 0.008950655937194824, 0.008921088218688965, 0.00890675163269043, 0.008914943695068359, 0.008975680351257325, 0.008992992401123046, 0.008890336036682128, 0.009032064437866211, 0.008907103538513183, 0.008935199737548829, 0.00901529598236084, 0.008711487770080566, 0.008962752342224121, 0.008973407745361327, 0.008958463668823241, 0.0089967679977417, 0.008958016395568848, 0.009042367935180665, 0.008972288131713867, 0.008961536407470704, 0.008926912307739258, 0.009036031723022461, 0.008944191932678223, 0.008924480438232421, 0.008905216217041016, 0.008963456153869629, 0.008974464416503906, 0.00895628833770752, 0.009013567924499512, 0.008898048400878907, 0.008966719627380371, 0.00893945598602295, 0.008925408363342284, 0.008918751716613769, 0.008931072235107421, 0.008887968063354491, 0.008956576347351074, 0.008955295562744141, 0.009091232299804687, 0.009257408142089843, 0.009027584075927735, 0.009217023849487305, 0.009022463798522949, 0.008945631980895995, 0.009179167747497559, 0.008966336250305176, 0.008977727890014648, 0.008964159965515136, 0.009050368309020996, 0.009023455619812012, 0.008951647758483887, 0.00912831974029541, 0.008957951545715333, 0.008996895790100098, 0.009002976417541504, 0.009074111938476563, 0.00891532802581787, 0.008909215927124023, 0.009000320434570313, 0.008901056289672851, 0.009533408164978028, 0.009303456306457519, 0.009564800262451172, 0.009824224472045898, 0.009009344100952148, 0.008959808349609376, 0.00897593593597412, 0.008991104125976562, 0.008963680267333985, 0.008892895698547364, 0.00921993637084961, 0.008997023582458496, 0.008946816444396972, 0.008992799758911133, 0.008693696022033692, 0.00901411247253418, 0.008976479530334473, 0.009038687705993653, 0.008971199989318848, 0.009017279624938965, 0.008980544090270996, 0.009056608200073242, 0.009118464469909668, 0.009069343566894532, 0.009209983825683594, 0.00898374366760254, 0.00891478443145752, 0.008922240257263184, 0.008973728179931641, 0.00894979190826416, 0.008939935684204101, 0.008996864318847657, 0.009388031959533692, 0.009024895668029786, 0.008978272438049316, 0.008956704139709473, 0.008906463623046874, 0.008942015647888183, 0.008886112213134765, 0.008887840270996093, 0.009097344398498535, 0.009130335807800294, 0.008893471717834472, 0.008913887977600097, 0.009010944366455078, 0.009147839546203613, 0.00893939208984375, 0.008931520462036133, 0.008952608108520508, 0.008964287757873534, 0.008975808143615722, 0.0091112642288208, 0.00929145622253418, 0.009233344078063964, 0.009260095596313477, 0.009171168327331542, 0.009386655807495117, 0.009281215667724609, 0.00916220760345459, 0.009153440475463867, 0.009114720344543458, 0.009163007736206055, 0.009056832313537598, 0.0090032958984375, 0.009289536476135254, 0.00908083152770996, 0.008985856056213378, 0.009048704147338868, 0.00905023956298828, 0.009112768173217774, 0.009270367622375488, 0.009049344062805175, 0.008980959892272948, 0.009103360176086426, 0.00910854434967041, 0.008990752220153808, 0.009128416061401367, 0.008825471878051758, 0.009175040245056153, 0.009314432144165039, 0.00907260799407959, 0.00902086353302002, 0.009107808113098144, 0.00910870361328125, 0.009076767921447754, 0.009066495895385742, 0.00903872013092041, 0.009015071868896484, 0.008941791534423827, 0.008949695587158203, 0.008939583778381348, 0.008951807975769043, 0.008939231872558594, 0.00891318416595459, 0.008929280281066895, 0.008899744033813477, 0.008923999786376953, 0.008894463539123536, 0.009043968200683594, 0.009012255668640137, 0.008963040351867676, 0.00901244831085205, 0.009191328048706055, 0.009020319938659668, 0.008921088218688965, 0.008927007675170899, 0.00890236759185791, 0.008931520462036133, 0.008888799667358399, 0.008896608352661133, 0.009027551651000977, 0.00903548812866211, 0.00891318416595459, 0.008907679557800294, 0.008921216011047364, 0.008978400230407715, 0.00890988826751709, 0.008892191886901855, 0.009004639625549317, 0.008976991653442384, 0.009076224327087403, 0.008972512245178223, 0.008871935844421386, 0.008885567665100097, 0.008937952041625976, 0.008896736145019532, 0.008894463539123536, 0.0089169921875, 0.008980480194091797, 0.008920767784118652, 0.008898880004882813, 0.009584159851074218, 0.009062272071838379, 0.008979040145874024, 0.008976032257080078, 0.008933728218078613, 0.009021632194519043, 0.009000767707824707, 0.0089169921875, 0.00889260768890381, 0.008711935997009278, 0.008950400352478027, 0.00901696014404297, 0.008934656143188476, 0.00897267246246338, 0.008892800331115723, 0.008922847747802734, 0.008915007591247559, 0.008938976287841797, 0.008887040138244628, 0.008906432151794434, 0.008892736434936523, 0.008937567710876464, 0.008951168060302734, 0.00887600040435791, 0.008927807807922364, 0.008931327819824218, 0.008910143852233886, 0.008899456024169921, 0.008904191970825195, 0.008925056457519532, 0.00891043186187744, 0.008908703804016113, 0.00886246395111084, 0.00887827205657959, 0.008959808349609376, 0.008917344093322755, 0.00886732769012451, 0.008849760055541993, 0.008927231788635253, 0.009876704216003418, 0.010515583992004394, 0.01078883171081543, 0.00928326416015625, 0.009111552238464356, 0.008986144065856934, 0.008899040222167968, 0.008904095649719238, 0.00893398380279541, 0.00889241600036621, 0.008888064384460449, 0.008916704177856446, 0.008933728218078613, 0.008919487953186036, 0.008893728256225586, 0.008882656097412109, 0.008859999656677246, 0.009010432243347168, 0.008892831802368164, 0.008901951789855957, 0.00900153636932373, 0.008926400184631348, 0.008897407531738281, 0.008874272346496582, 0.008898112297058105, 0.008990943908691406, 0.008884223937988281, 0.008867487907409669, 0.008917344093322755, 0.00886348819732666, 0.008879424095153809, 0.009102272033691407, 0.008935423851013183]",tokens/s,110.56440449435348,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4404.502528,4976.410624,0.0,4573.888512,4568.93696,s,1,11.1963173828125,11.1963173828125,0.0,11.1963173828125,11.1963173828125,11.1963173828125,11.1963173828125,[11.1963173828125],,kWh,0.00011957140402082585,1.318214900981315e-05,3.518030592199878e-05,0.0001679338589526378,,MB,2192.95744,5303.566336,0.0,4886.36416,4814.97344,s,10,2.093231201171875,0.2093231201171875,0.0007797172315558181,0.2089561462402344,0.21011659088134765,0.21054148025512695,0.21088139175415038,"[0.20884967041015626, 0.20860928344726562, 0.20850743103027344, 0.20868092346191405, 0.2087398986816406, 0.21096636962890625, 0.2090626220703125, 0.20997955322265624, 0.2098132781982422, 0.21002217102050783]",tokens/s,1222.9896050502252,kWh,6.164865391319394e-06,6.798674849971117e-07,4.066982420249949e-06,1.0911715296566454e-05,tokens/kWh,23461022.675376665,MB,2198.2208,5408.423936,0.0,4991.22176,4947.963904,s,10,25.41122998046875,2.541122998046875,0.019529103375626543,2.5415146484375,2.562885791015625,2.567808984375,2.5717475390625,"[2.515795166015625, 2.51910693359375, 2.524144287109375, 2.52001611328125, 2.540578857421875, 2.558209228515625, 2.542450439453125, 2.572732177734375, 2.556405029296875, 2.561791748046875]",tokens/s,24.792188354685013,kWh,7.462294721243101e-05,8.230870861251594e-06,4.549702945315009e-05,0.00012835084752683273,tokens/kWh,490842.1036084656,,s,630,25.40759236907958,0.040329511696951724,0.0006311088888960408,0.04027203178405762,0.04090074157714844,0.041167752838134765,0.042666635360717776,"[0.04029849624633789, 0.04018179321289062, 0.040214496612548827, 0.04018806457519531, 0.03997065734863281, 0.03990323257446289, 0.03958108901977539, 0.039676513671875, 0.039790687561035154, 0.03973654556274414, 0.04009024047851562, 0.03983366394042969, 0.03982534408569336, 0.03974111938476563, 0.03980758285522461, 0.039560958862304686, 0.03975161743164062, 0.03970198440551758, 0.03963510513305664, 0.03958534240722656, 0.039713088989257815, 0.03973798370361328, 0.0396044807434082, 0.03965923309326172, 0.03969638442993164, 0.03964083099365234, 0.0397949104309082, 0.03974934387207031, 0.03965679931640625, 0.03988131332397461, 0.039591487884521485, 0.039743934631347656, 0.039706302642822267, 0.03961008071899414, 0.03972367858886719, 0.039970718383789065, 0.039755680084228515, 0.03975638580322265, 0.04039779281616211, 0.04015923309326172, 0.0402050552368164, 0.04036198425292969, 0.04067478561401367, 0.04032380676269531, 0.04036198425292969, 0.04019209671020508, 0.04043366241455078, 0.040246337890625, 0.040180545806884765, 0.04028400039672852, 0.03994047927856445, 0.0399189453125, 0.03996425628662109, 0.03993484878540039, 0.039742752075195314, 0.03995107269287109, 0.03999129486083984, 0.03993395233154297, 0.039867424011230466, 0.03990563201904297, 0.04019673538208008, 0.04003190231323242, 0.04020668792724609, 0.042608062744140626, 0.0401611213684082, 0.03988723373413086, 0.04080428695678711, 0.039828960418701174, 0.04088812637329101, 0.039790592193603515, 0.039869182586669924, 0.03999375915527344, 0.03967574310302734, 0.03971878433227539, 0.03997449493408203, 0.03999772644042969, 0.04007846450805664, 0.03981999969482422, 0.03984783935546875, 0.039952320098876955, 0.040425918579101563, 0.043878623962402344, 0.039959423065185545, 0.04154665756225586, 0.040083358764648434, 0.03960022354125976, 0.03965542221069336, 0.03958963012695312, 0.03974777603149414, 0.03970822525024414, 0.03971916961669922, 0.0397130241394043, 0.04006070327758789, 0.04080665588378906, 0.03979481506347656, 0.0395750732421875, 0.039669471740722655, 0.03961088180541992, 0.03953587341308594, 0.0395948486328125, 0.03965043258666992, 0.0397108154296875, 0.039416446685791015, 0.03954108810424805, 0.03977606582641602, 0.03983708953857422, 0.03973795318603516, 0.03960396957397461, 0.039747840881347654, 0.039569408416748046, 0.039485439300537106, 0.039613697052001955, 0.039370784759521486, 0.04039148712158203, 0.03967350387573242, 0.03935891342163086, 0.03939014434814453, 0.03963382339477539, 0.040548641204833986, 0.04002511978149414, 0.039790462493896485, 0.03986307144165039, 0.03977011108398437, 0.0400129280090332, 0.04004473495483398, 0.04001801681518555, 0.04023401641845703, 0.04007158279418945, 0.03955654525756836, 0.039641918182373045, 0.03979257583618164, 0.03974764633178711, 0.03993766403198242, 0.040268032073974606, 0.0411096305847168, 0.040408737182617185, 0.04154403305053711, 0.04038860702514648, 0.04118486404418945, 0.04026403045654297, 0.039936065673828125, 0.03981721496582031, 0.039642688751220706, 0.03981151962280274, 0.03962422561645508, 0.040048385620117186, 0.03977699279785156, 0.03962300872802734, 0.03964886474609375, 0.04018796920776367, 0.03998336029052734, 0.04038630294799805, 0.040594593048095706, 0.040445793151855466, 0.04022502517700195, 0.04019686508178711, 0.04000755310058594, 0.039886878967285155, 0.040127937316894534, 0.0398419189453125, 0.03979699325561523, 0.039928096771240235, 0.0397946891784668, 0.04070604705810547, 0.04012384033203125, 0.04002422332763672, 0.04002435302734375, 0.03985801696777344, 0.04068963241577148, 0.0409455680847168, 0.040062591552734374, 0.03986547088623047, 0.039618209838867186, 0.0399420166015625, 0.03982137680053711, 0.04006099319458008, 0.03973529434204102, 0.039763904571533205, 0.04005263900756836, 0.039870624542236326, 0.03969020843505859, 0.039769824981689454, 0.03973500823974609, 0.03989891052246094, 0.03982156753540039, 0.040007518768310546, 0.039834400177001954, 0.03978790283203125, 0.040632705688476566, 0.039981887817382815, 0.03988396835327149, 0.03976275253295898, 0.03975372695922851, 0.039717086791992186, 0.040982017517089846, 0.04337420654296875, 0.040229534149169924, 0.04027715301513672, 0.04016624069213867, 0.03982876968383789, 0.03982614517211914, 0.03968582534790039, 0.03957587051391601, 0.03980035018920899, 0.039841503143310544, 0.0395968017578125, 0.03970873641967773, 0.039746910095214846, 0.040589920043945314, 0.039664958953857424, 0.039685951232910154, 0.039895103454589846, 0.03992559814453125, 0.03965171051025391, 0.03932995223999024, 0.039608352661132815, 0.03957612609863281, 0.039489376068115235, 0.03964092636108398, 0.0400549430847168, 0.03974553680419922, 0.03969023895263672, 0.039743679046630856, 0.03973305511474609, 0.03976166534423828, 0.03986198425292969, 0.04016182327270508, 0.04008883285522461, 0.039997344970703126, 0.03978652954101562, 0.0397496337890625, 0.039857311248779295, 0.04022441482543945, 0.040820735931396485, 0.040007648468017576, 0.03977606582641602, 0.03986054229736328, 0.039898399353027345, 0.040172000885009766, 0.04027571105957031, 0.04029276657104492, 0.04011391830444336, 0.03990143966674805, 0.04004249572753906, 0.039929855346679685, 0.03987830352783203, 0.04017596817016601, 0.04017692947387695, 0.040271678924560544, 0.04016038513183594, 0.04033884811401367, 0.040280223846435544, 0.04065513610839844, 0.041412479400634764, 0.04080640029907227, 0.0400849609375, 0.04034819030761719, 0.040134654998779294, 0.04005635070800781, 0.040226943969726564, 0.04019564819335938, 0.04000374221801758, 0.03996940612792969, 0.040218177795410155, 0.04041593551635742, 0.0402529296875, 0.04011983871459961, 0.04032806396484375, 0.04015520095825195, 0.040029983520507816, 0.03994646453857422, 0.04003142547607422, 0.040188865661621095, 0.040099742889404294, 0.04010367965698242, 0.039944255828857425, 0.04009088134765625, 0.040298526763916015, 0.04114089584350586, 0.03988275146484375, 0.04269055938720703, 0.04044822311401367, 0.040036384582519534, 0.04038025665283203, 0.040191455841064455, 0.04028460693359375, 0.04035379028320313, 0.040204288482666016, 0.04004249572753906, 0.04056063842773437, 0.040260929107666016, 0.04042822265625, 0.04041878509521484, 0.04031900787353516, 0.040269790649414064, 0.04030028915405273, 0.03991632080078125, 0.040152862548828126, 0.04040095901489258, 0.04018355178833008, 0.040349376678466796, 0.04111801528930664, 0.04022313690185547, 0.0411583366394043, 0.04049324798583984, 0.040110462188720705, 0.03999308776855469, 0.04031692886352539, 0.03996809768676758, 0.04011894226074219, 0.04026889419555664, 0.03998758316040039, 0.04007526397705078, 0.04056937789916992, 0.040459999084472655, 0.04104198455810547, 0.04078118515014648, 0.04163033676147461, 0.04068374252319336, 0.0409486083984375, 0.04088441467285156, 0.040556800842285155, 0.04153334426879883, 0.04078649520874023, 0.0404188461303711, 0.04063075256347656, 0.04084044647216797, 0.04053241729736328, 0.040976032257080075, 0.04099683380126953, 0.040805183410644534, 0.0407834243774414, 0.040640159606933596, 0.040753185272216795, 0.04073040008544922, 0.0412591667175293, 0.040787841796875, 0.040750110626220706, 0.04095564651489258, 0.04050492858886719, 0.04049555206298828, 0.04053740692138672, 0.0404384651184082, 0.04046255874633789, 0.04063209533691406, 0.04046847915649414, 0.04033542251586914, 0.041672672271728516, 0.04043529510498047, 0.04065478515625, 0.04041689682006836, 0.0403628158569336, 0.04044758224487305, 0.04046473693847656, 0.04044319915771484, 0.04058915328979492, 0.040835361480712894, 0.04060140609741211, 0.04099769592285156, 0.040513534545898434, 0.041040897369384766, 0.0405794563293457, 0.04045644760131836, 0.040409473419189455, 0.040514881134033204, 0.040393409729003904, 0.04033744049072266, 0.040228832244873045, 0.04038159942626953, 0.04162031936645508, 0.04028188705444336, 0.04054447937011719, 0.040441856384277344, 0.039725055694580076, 0.03943862533569336, 0.039424991607666014, 0.0392704963684082, 0.0397441291809082, 0.03992015838623047, 0.03988515090942383, 0.04043907165527344, 0.03986812973022461, 0.03962777709960937, 0.039851486206054686, 0.03966934585571289, 0.03991584014892578, 0.04047526550292969, 0.04041523361206055, 0.04024729537963867, 0.040032257080078126, 0.040099327087402346, 0.03992374420166016, 0.03993443298339844, 0.039800830841064457, 0.03982684707641602, 0.039680606842041014, 0.040112033843994144, 0.041424991607666016, 0.04046851348876953, 0.040185985565185545, 0.040459518432617185, 0.04026015853881836, 0.04015756988525391, 0.03987401580810547, 0.039843551635742186, 0.04043414306640625, 0.04005683135986328, 0.03984384155273438, 0.039927806854248044, 0.04015513610839844, 0.040392257690429687, 0.040659393310546875, 0.04068796920776367, 0.04037801742553711, 0.040458240509033204, 0.040394016265869144, 0.04060211181640625, 0.040394977569580076, 0.04036377716064453, 0.040161537170410155, 0.04047872161865235, 0.04030035018920899, 0.040309951782226565, 0.0403507194519043, 0.0402815055847168, 0.04033145523071289, 0.040472606658935546, 0.04384969711303711, 0.0407628173828125, 0.0406945915222168, 0.04087033462524414, 0.0404804801940918, 0.040382366180419925, 0.04047081756591797, 0.040300670623779296, 0.04054547119140625, 0.040595966339111327, 0.04167241668701172, 0.04069721603393555, 0.040387489318847655, 0.04053926467895508, 0.040777599334716794, 0.04063126373291016, 0.0405436782836914, 0.04037609481811524, 0.040460769653320315, 0.04162591934204102, 0.041084449768066404, 0.04067951965332031, 0.04081039810180664, 0.04133116912841797, 0.040900543212890626, 0.041293888092041015, 0.04066204833984375, 0.04057964706420898, 0.041006912231445314, 0.041049793243408204, 0.04080940628051758, 0.040611553192138675, 0.04043356704711914, 0.04111142349243164, 0.04052425765991211, 0.04071571350097656, 0.04099334335327148, 0.040548351287841795, 0.040973472595214847, 0.040962913513183596, 0.04095948791503906, 0.04067935943603516, 0.04066550445556641, 0.040827041625976564, 0.04079411315917969, 0.040839168548583986, 0.040925182342529294, 0.04087603378295898, 0.041246177673339844, 0.040865951538085935, 0.04086412811279297, 0.04067734527587891, 0.040861438751220704, 0.04062966537475586, 0.04101824188232422, 0.04082825469970703, 0.040589599609375, 0.04072022247314453, 0.04108921432495117, 0.04076579284667969, 0.04068131256103515, 0.04063644790649414, 0.04068723297119141, 0.04081132888793945, 0.04087763214111328, 0.04068889617919922, 0.04082720184326172, 0.04069462585449219, 0.04156099319458008, 0.04125526428222656, 0.04088675308227539, 0.04060992050170899, 0.040820606231689455, 0.04084735870361328, 0.04076748657226562, 0.04075273513793945, 0.04070646286010742, 0.040804481506347655, 0.04064985656738281, 0.04147289657592773, 0.04081049728393555, 0.040850654602050784, 0.04076828765869141, 0.04069580841064453, 0.04094566345214844, 0.040984737396240235, 0.040746337890625, 0.04070041656494141, 0.040645790100097653, 0.040794017791748044, 0.040870849609375, 0.040833343505859376, 0.04106172943115234, 0.04070230484008789, 0.04084870529174805, 0.04065555191040039, 0.040784896850585936, 0.0403702392578125, 0.04012108612060547, 0.04043798446655274, 0.03979183959960937, 0.04044217681884766, 0.04019449615478515, 0.04034156799316406, 0.040389728546142575, 0.04019801712036133, 0.040143840789794924, 0.04041535949707031, 0.04029859161376953, 0.03999107360839844, 0.040207584381103514, 0.03997980880737305, 0.04076688003540039, 0.04023971176147461, 0.04043529510498047, 0.04665340805053711, 0.04117545700073242, 0.04053097534179687, 0.0405015983581543, 0.04071615982055664, 0.04006991958618164, 0.039964672088623046, 0.03986227035522461, 0.04020016098022461, 0.04042150497436523, 0.04055440139770508, 0.04047872161865235, 0.04065689468383789, 0.03985513687133789, 0.040220993041992184, 0.04001449584960937, 0.039913471221923826, 0.03976595306396485, 0.03984419250488281, 0.04048406219482422, 0.040349952697753905, 0.04042982482910156, 0.04071187210083008, 0.04059552001953125, 0.04063462448120117, 0.04139414215087891, 0.04068451309204101, 0.04041830444335937, 0.040529918670654294, 0.04042252731323242, 0.040422401428222655, 0.04090252685546875, 0.04047679901123047, 0.040267681121826174, 0.04046793746948242, 0.04027238464355469, 0.04061183929443359, 0.04014009475708008, 0.040495777130126955, 0.040392223358154296, 0.040570945739746095, 0.04128607940673828, 0.0407039680480957, 0.04120284652709961, 0.040373470306396486, 0.04009884643554688, 0.04029718399047852, 0.04081449508666992, 0.04046454238891602, 0.040336288452148435, 0.040704959869384764, 0.040243198394775394, 0.04026367950439453, 0.042000385284423826, 0.040463935852050784, 0.040288318634033204, 0.04056921768188477, 0.04026582336425781, 0.04070556640625, 0.040431999206542966, 0.04079001617431641, 0.04107622528076172, 0.04029516983032227, 0.0402347526550293, 0.0401715202331543, 0.04013983917236328, 0.040498111724853514, 0.040172798156738285, 0.0408358383178711, 0.04012764739990234, 0.04040697479248047, 0.04099164962768555, 0.041062400817871096, 0.040417278289794925, 0.04002422332763672, 0.040195934295654295, 0.040504894256591796, 0.04031123352050781, 0.04031475067138672, 0.04040879821777344, 0.04073081588745117, 0.043383007049560544, 0.04082716751098633, 0.04056649780273437, 0.04118899154663086, 0.04451068878173828, 0.04067414474487305, 0.04060150527954102]",tokens/s,24.79573785852667,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1039.089664,904.855552,0.0,509.607936,491.434496,s,1,7.7753291015625,7.7753291015625,0.0,7.7753291015625,7.7753291015625,7.7753291015625,7.7753291015625,[7.7753291015625],,kWh,2.3769561345824284e-05,2.613634343838651e-06,8.089450916001772e-06,3.447264660566471e-05,,MB,1393.004544,1039.07328,0.0,629.1456,592.24832,s,10,0.2588756771087647,0.025887567710876462,0.00012938341568561115,0.025884959220886232,0.026041721725463866,0.026084604454040528,0.026118910636901856,"[0.026127487182617187, 0.025942752838134766, 0.02589206314086914, 0.02603219223022461, 0.02582262420654297, 0.02577039909362793, 0.025944799423217774, 0.025641632080078126, 0.02587785530090332, 0.02582387161254883]",tokens/s,9888.916674564354,kWh,7.569353342724582e-07,8.347716594352991e-08,4.6696856586011393e-07,1.307381066076102e-06,tokens/kWh,195811310.59848037,MB,1433.088,1053.753344,0.0,643.825664,605.085696,s,10,13.554338012695311,1.355433801269531,0.008772509112208286,1.3541259765625,1.3627170166015623,1.3704699584960938,1.3766723120117188,"[1.378222900390625, 1.355204833984375, 1.34927197265625, 1.356851318359375, 1.354110107421875, 1.354141845703125, 1.3519068603515625, 1.360994140625, 1.34954833984375, 1.344085693359375]",tokens/s,46.479584573582805,kWh,3.870019445531075e-05,4.268203905542278e-06,1.4181515375140097e-05,5.714991373599313e-05,tokens/kWh,1102363.8686670926,,s,630,13.548963447570797,0.021506291186620317,0.0005965968022652301,0.021377023696899415,0.02176020202636719,0.022067100620269776,0.024917819614410433,"[0.021605600357055665, 0.021792800903320312, 0.021680896759033202, 0.021649280548095704, 0.022975807189941407, 0.021523263931274413, 0.021261823654174804, 0.02147177505493164, 0.021694303512573242, 0.022093952178955076, 0.02187900733947754, 0.021880640029907226, 0.02219004821777344, 0.022295583724975587, 0.022047712326049806, 0.02183590316772461, 0.02166156768798828, 0.021579967498779298, 0.0215897274017334, 0.022064319610595705, 0.021475839614868163, 0.02144483184814453, 0.021657440185546876, 0.022732288360595702, 0.02273161506652832, 0.021560991287231445, 0.02210771179199219, 0.021524415969848634, 0.021602367401123045, 0.021600255966186522, 0.022296831130981444, 0.022540128707885743, 0.022129344940185546, 0.021938528060913086, 0.022011552810668945, 0.021779680252075197, 0.021781280517578126, 0.02230681610107422, 0.02163711929321289, 0.021552928924560545, 0.021431615829467773, 0.02162371253967285, 0.021443872451782225, 0.021514976501464844, 0.021552831649780273, 0.021574047088623045, 0.021441728591918945, 0.02137571144104004, 0.021816864013671874, 0.026312959671020507, 0.022155744552612305, 0.021871871948242187, 0.02197747230529785, 0.02200998306274414, 0.021794815063476563, 0.021606399536132814, 0.02148761558532715, 0.021831167221069335, 0.021527040481567384, 0.021514144897460938, 0.0215, 0.021485567092895508, 0.021611616134643553, 0.020992416381835938, 0.021509727478027343, 0.02124595260620117, 0.02125209617614746, 0.02175814437866211, 0.021278528213500975, 0.02127667236328125, 0.021362688064575194, 0.021270719528198243, 0.021317087173461913, 0.021282432556152343, 0.021279455184936524, 0.021603904724121093, 0.021422527313232423, 0.02117046356201172, 0.02134601593017578, 0.02123161506652832, 0.021356864929199217, 0.021398975372314454, 0.021328128814697266, 0.021354303359985352, 0.021288288116455077, 0.021238624572753908, 0.021284128189086916, 0.021654144287109375, 0.02170889663696289, 0.02143395233154297, 0.021449119567871093, 0.02128691291809082, 0.021299039840698242, 0.021321887969970702, 0.02126233673095703, 0.023619264602661134, 0.023195968627929688, 0.02149488067626953, 0.021391519546508787, 0.02424083137512207, 0.02174086380004883, 0.021441280364990236, 0.0214835205078125, 0.021497856140136717, 0.021260480880737304, 0.02150102424621582, 0.021502368927001952, 0.021519968032836914, 0.02140438461303711, 0.021376256942749024, 0.021631744384765624, 0.02128281593322754, 0.021331968307495116, 0.02154924774169922, 0.021354272842407228, 0.021288991928100586, 0.021342111587524415, 0.021262432098388673, 0.02150409507751465, 0.021641120910644532, 0.02143756866455078, 0.02126969528198242, 0.02166524887084961, 0.02130556869506836, 0.02138422393798828, 0.021494752883911134, 0.020882368087768555, 0.021180383682250975, 0.021178400039672852, 0.02126755142211914, 0.02129158401489258, 0.021375423431396486, 0.021245248794555666, 0.021242431640625, 0.021190687179565428, 0.021315872192382814, 0.021313247680664064, 0.0212992000579834, 0.02124595260620117, 0.02121238327026367, 0.02147225570678711, 0.021624191284179688, 0.02530950355529785, 0.021676416397094726, 0.021705951690673828, 0.021457408905029295, 0.021397247314453124, 0.02128211212158203, 0.021379903793334962, 0.02166387176513672, 0.021587968826293946, 0.021583871841430666, 0.021295103073120117, 0.02134809684753418, 0.021139711380004884, 0.021501951217651367, 0.021531999588012694, 0.02132649612426758, 0.021348320007324218, 0.021368671417236328, 0.021274816513061522, 0.021432319641113282, 0.02144816017150879, 0.021560895919799806, 0.021347232818603516, 0.021299264907836915, 0.021551103591918946, 0.021518335342407227, 0.02125619125366211, 0.021331968307495116, 0.02122956848144531, 0.02122480010986328, 0.02142064094543457, 0.021229631423950197, 0.02124569511413574, 0.02132953643798828, 0.021461631774902342, 0.021179647445678712, 0.021197568893432616, 0.021178367614746094, 0.021180320739746093, 0.021125215530395508, 0.0214466552734375, 0.021338144302368165, 0.021380735397338868, 0.021356063842773436, 0.021359424591064453, 0.02122137641906738, 0.021368831634521485, 0.0251943359375, 0.021628095626831056, 0.021242847442626955, 0.021303295135498047, 0.02133718490600586, 0.02122230339050293, 0.021299072265625, 0.02125632095336914, 0.021159423828125, 0.02126825523376465, 0.021248735427856446, 0.02126848030090332, 0.021622783660888673, 0.021434207916259766, 0.021233823776245116, 0.021264352798461915, 0.021231647491455077, 0.021300991058349608, 0.021405088424682618, 0.02131030464172363, 0.02126646423339844, 0.02140771293640137, 0.02134022331237793, 0.02125721549987793, 0.02172163200378418, 0.021404064178466797, 0.021243904113769533, 0.021431968688964843, 0.021358943939208983, 0.021323776245117186, 0.021370847702026366, 0.02231839942932129, 0.021557056427001953, 0.021398048400878906, 0.021262720108032228, 0.021308799743652344, 0.02149407958984375, 0.02175382423400879, 0.02163484764099121, 0.021381696701049804, 0.021356351852416994, 0.021370912551879884, 0.0214466552734375, 0.021483295440673827, 0.021275007247924804, 0.02123776054382324, 0.021448703765869142, 0.021542783737182616, 0.025877983093261718, 0.021785247802734376, 0.021552160263061525, 0.021607744216918946, 0.021421728134155275, 0.021355520248413085, 0.021430784225463868, 0.021395103454589844, 0.02133078384399414, 0.021299264907836915, 0.0213370246887207, 0.02130636787414551, 0.02127462387084961, 0.02131059265136719, 0.021405599594116212, 0.020952192306518555, 0.02130624008178711, 0.021290912628173828, 0.021399648666381835, 0.021413888931274414, 0.02151356887817383, 0.02130169677734375, 0.02143254470825195, 0.02122547149658203, 0.021215232849121093, 0.021376096725463867, 0.021252960205078126, 0.021239871978759765, 0.021413440704345702, 0.02124435234069824, 0.02112512016296387, 0.021212160110473634, 0.021206016540527343, 0.021235071182250976, 0.021238399505615235, 0.021321727752685548, 0.02134121513366699, 0.02202524757385254, 0.02237843132019043, 0.021837472915649414, 0.021713247299194337, 0.021507551193237304, 0.02140332794189453, 0.021338111877441408, 0.0215797119140625, 0.021490591049194336, 0.02143177604675293, 0.025245824813842774, 0.02139792060852051, 0.021419551849365233, 0.02140208053588867, 0.021377023696899415, 0.02124595260620117, 0.02127872085571289, 0.021243904113769533, 0.021208576202392578, 0.02150169563293457, 0.02126006317138672, 0.021238752365112305, 0.021665599822998045, 0.02137660789489746, 0.021264991760253905, 0.021379072189331053, 0.021213184356689452, 0.021204992294311522, 0.02124595260620117, 0.021178304672241213, 0.021965919494628908, 0.02206937599182129, 0.022513952255249024, 0.02148204803466797, 0.021298879623413085, 0.021582143783569336, 0.021303295135498047, 0.021302976608276368, 0.02194384002685547, 0.021410591125488283, 0.021354496002197267, 0.02090598487854004, 0.021147647857666017, 0.021131263732910157, 0.021311296463012695, 0.021257408142089845, 0.02118966484069824, 0.021187616348266602, 0.021095359802246094, 0.021296735763549804, 0.021409791946411134, 0.021262144088745116, 0.021362815856933594, 0.02134614372253418, 0.021114879608154297, 0.021236352920532227, 0.021571584701538086, 0.026343360900878906, 0.02183788871765137, 0.021646751403808593, 0.021422592163085938, 0.021389408111572264, 0.021722335815429688, 0.02177872085571289, 0.021418495178222655, 0.021377023696899415, 0.02129100799560547, 0.021243904113769533, 0.021339744567871095, 0.021247520446777343, 0.021220224380493164, 0.021319679260253906, 0.021243743896484375, 0.02117238426208496, 0.021356544494628905, 0.0225218563079834, 0.021720895767211913, 0.021468416213989257, 0.02156025505065918, 0.0213723201751709, 0.021313663482666015, 0.021328351974487306, 0.021699871063232422, 0.021481472015380858, 0.021301984786987305, 0.021317632675170898, 0.02130496025085449, 0.02161712074279785, 0.021510047912597655, 0.021297088623046877, 0.021435455322265626, 0.02135321617126465, 0.021383424758911133, 0.02144256019592285, 0.021313343048095703, 0.021229759216308593, 0.021493696212768556, 0.021548255920410157, 0.021646175384521484, 0.021510015487670897, 0.021411968231201173, 0.021720287322998046, 0.02158252716064453, 0.02148182487487793, 0.02103091239929199, 0.02153654479980469, 0.021522655487060546, 0.021663808822631837, 0.021450687408447265, 0.02155708885192871, 0.021665952682495118, 0.021436416625976562, 0.021952384948730468, 0.02170857620239258, 0.021691936492919922, 0.021551296234130858, 0.021505727767944335, 0.021633695602416993, 0.02150831985473633, 0.021268543243408204, 0.02205683135986328, 0.02187788772583008, 0.02141900825500488, 0.021336063385009766, 0.021448415756225588, 0.021436704635620116, 0.02132374382019043, 0.02122979164123535, 0.021370687484741212, 0.021608448028564452, 0.02186057662963867, 0.021749088287353516, 0.021592512130737304, 0.021597312927246093, 0.021597055435180663, 0.021651456832885742, 0.02145280075073242, 0.021195968627929686, 0.021359424591064453, 0.02122956848144531, 0.02122547149658203, 0.021208351135253906, 0.021267168045043944, 0.02124185562133789, 0.02143596839904785, 0.021629695892333985, 0.02132054328918457, 0.02133087921142578, 0.021356000900268554, 0.021438623428344728, 0.021339584350585937, 0.021453216552734376, 0.021377471923828124, 0.021698144912719725, 0.021485984802246092, 0.021315584182739256, 0.021380416870117186, 0.021342912673950196, 0.021306400299072267, 0.021269472122192382, 0.02127020835876465, 0.021225183486938477, 0.02124220848083496, 0.02125644874572754, 0.021321727752685548, 0.021312704086303712, 0.021277376174926758, 0.02118492889404297, 0.021561344146728514, 0.021549055099487305, 0.021307743072509766, 0.021263872146606445, 0.02130668830871582, 0.021815967559814454, 0.02876025581359863, 0.02145039939880371, 0.021473535537719725, 0.021507328033447265, 0.021248863220214843, 0.02122889518737793, 0.02126892852783203, 0.021532896041870118, 0.02151628875732422, 0.021327871322631836, 0.021396991729736328, 0.021278528213500975, 0.022399551391601564, 0.021446399688720703, 0.021229759216308593, 0.021256128311157228, 0.02143257522583008, 0.02136515235900879, 0.02139299201965332, 0.021372928619384765, 0.02126028823852539, 0.02136636734008789, 0.021350816726684572, 0.021327871322631836, 0.021336063385009766, 0.021410911560058594, 0.02144534492492676, 0.021442176818847657, 0.02145337677001953, 0.02133795166015625, 0.021279232025146484, 0.022124191284179688, 0.02256924819946289, 0.02160611152648926, 0.021407743453979493, 0.021309440612792968, 0.021204992294311522, 0.021528543472290038, 0.021796607971191408, 0.022412832260131837, 0.021838623046875, 0.021879871368408202, 0.021707679748535155, 0.02140771293640137, 0.021536800384521486, 0.02149616050720215, 0.021366432189941408, 0.021336063385009766, 0.021193727493286133, 0.02129622459411621, 0.021338016510009765, 0.02147327995300293, 0.021661056518554687, 0.021492351531982423, 0.02130732727050781, 0.021278783798217772, 0.02151219177246094, 0.021647232055664063, 0.021543039321899413, 0.02147942352294922, 0.021395456314086913, 0.021596160888671875, 0.021420032501220702, 0.021321727752685548, 0.021310815811157225, 0.02123638343811035, 0.021198848724365234, 0.021168127059936523, 0.021462432861328123, 0.02162508773803711, 0.021428575515747072, 0.02161769676208496, 0.021523199081420898, 0.021730623245239257, 0.021402399063110353, 0.021304864883422852, 0.02126860809326172, 0.02131769561767578, 0.021249631881713867, 0.02128339195251465, 0.021258432388305663, 0.02125212860107422, 0.021361888885498045, 0.021441696166992187, 0.02145449638366699, 0.021415807723999022, 0.02125222396850586, 0.02117750358581543, 0.02134841537475586, 0.02145359992980957, 0.02122547149658203, 0.021327199935913085, 0.021357215881347657, 0.021188959121704102, 0.021443296432495117, 0.02119980812072754, 0.021230720520019532, 0.021437311172485353, 0.021334016799926758, 0.021301248550415038, 0.021204992294311522, 0.021347360610961916, 0.021293695449829102, 0.02139904022216797, 0.0213656005859375, 0.021335391998291015, 0.0213592643737793, 0.021369951248168945, 0.02143734359741211, 0.021372928619384765, 0.02390425682067871, 0.02165760040283203, 0.02142207908630371, 0.021411008834838867, 0.02127881622314453, 0.02142076873779297, 0.02136003112792969, 0.02129680061340332, 0.02125715255737305, 0.021032960891723632, 0.021559328079223634, 0.02142633628845215, 0.0211777286529541, 0.02126892852783203, 0.0212541446685791, 0.021552255630493164, 0.02146329689025879, 0.021227359771728516, 0.02118934440612793, 0.02126582336425781, 0.021340160369873046, 0.021207712173461915, 0.021180416107177736, 0.021187583923339845, 0.021139936447143556, 0.021217567443847656, 0.021160192489624023, 0.021229248046875, 0.021233983993530273, 0.021309440612792968, 0.02123481559753418, 0.021245920181274414, 0.02121334457397461, 0.021309856414794923, 0.021303647994995116, 0.021565376281738283, 0.021469247817993163, 0.02142207908630371, 0.02134966468811035, 0.021412448883056642, 0.021150848388671876, 0.021238784790039062, 0.021182464599609374, 0.021202272415161132, 0.021325855255126952, 0.021299840927124024, 0.021219327926635743, 0.021472383499145507, 0.02142838478088379, 0.021471935272216795, 0.021546144485473633, 0.021367679595947264, 0.021387264251708983, 0.021481472015380858, 0.021450143814086914, 0.021316192626953126, 0.021307392120361326, 0.021729183197021485, 0.02168022346496582, 0.021317632675170898, 0.02138697624206543, 0.021297439575195313, 0.02121891212463379, 0.021203359603881835, 0.02125209617614746, 0.021301248550415038, 0.021526496887207033, 0.021347551345825194, 0.021371711730957033, 0.021265472412109375, 0.02141484832763672, 0.02129715156555176]",tokens/s,46.49802196587614,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,846.299136,538.836992,0.0,136.31488,130.303488,s,1,10.16727734375,10.16727734375,0.0,10.16727734375,10.16727734375,10.16727734375,10.16727734375,[10.16727734375],,kWh,1.4653379708335971e-05,1.6086978044745782e-06,4.57222588000003e-06,2.083430339281058e-05,,MB,1321.955328,643.694592,0.0,226.492416,199.948288,s,18,0.22027567958831787,0.012237537754906548,0.0002552080311013855,0.012170192241668702,0.012458918476104737,0.012567841815948486,0.013026694736480712,"[0.012455615997314453, 0.01246662425994873, 0.013141407966613769, 0.012359071731567382, 0.012101856231689453, 0.01218172836303711, 0.012148127555847169, 0.012034751892089844, 0.012222399711608887, 0.012089823722839356, 0.012102144241333008, 0.012196672439575196, 0.01200870418548584, 0.011990367889404297, 0.012147551536560058, 0.012158656120300293, 0.012228384017944335, 0.012241791725158692]",tokens/s,20919.240874036015,kWh,3.5513980893653594e-07,3.916544445361539e-08,1.8755894972262903e-07,5.818642031127804e-07,tokens/kWh,439965199.14867586,MB,1355.341824,658.374656,0.0,241.17248,199.950848,s,18,9.825886962890625,0.545882609049479,0.0036158385687006995,0.5462534484863282,0.549705224609375,0.5508181335449218,0.5534387731933593,"[0.543361083984375, 0.5438079223632812, 0.5502400512695312, 0.5494760131835937, 0.5486055297851562, 0.5469168701171875, 0.5432039794921875, 0.5461586303710938, 0.5475557861328125, 0.5391224365234375, 0.5485118408203125, 0.5400864868164063, 0.5433206176757812, 0.5467012329101563, 0.5422628784179687, 0.5463482666015625, 0.5461134033203126, 0.5540939331054687]",tokens/s,115.40942861268118,kWh,1.5806746993037305e-05,1.7432181828343251e-06,6.037285211435228e-06,2.358725038730686e-05,tokens/kWh,2670934.4652525736,,s,1134,9.8160063199997,0.008656090229276625,0.0001746867009826967,0.008621520042419433,0.00883907175064087,0.008922691297531128,0.009238346557617188,"[0.008560640335083008, 0.008852640151977538, 0.008713055610656738, 0.008783871650695801, 0.008712191581726075, 0.008666239738464355, 0.008621983528137207, 0.008629471778869628, 0.008590463638305665, 0.0085797119140625, 0.008535391807556152, 0.008561663627624512, 0.008555999755859375, 0.008534432411193848, 0.008701855659484864, 0.00858460807800293, 0.008573408126831055, 0.008588831901550293, 0.008671711921691895, 0.008619296073913573, 0.008563520431518555, 0.008555744171142578, 0.008682047843933106, 0.008642687797546386, 0.008566847801208496, 0.008822719573974609, 0.008645855903625488, 0.008540960311889648, 0.008499391555786133, 0.008505151748657227, 0.008482784271240234, 0.008513567924499511, 0.008517696380615234, 0.00850937557220459, 0.008531007766723633, 0.00862217617034912, 0.0085696964263916, 0.008507040023803711, 0.008758975982666015, 0.008551136016845703, 0.008699295997619629, 0.008582880020141601, 0.008536895751953126, 0.008534015655517578, 0.008572671890258789, 0.008650367736816406, 0.008559040069580078, 0.008499103546142578, 0.00852847957611084, 0.008567487716674805, 0.008604991912841796, 0.008537792205810546, 0.00862326431274414, 0.008545248031616211, 0.008576895713806152, 0.008597248077392578, 0.008726911544799805, 0.008816703796386719, 0.008895456314086914, 0.00874995231628418, 0.008755167961120606, 0.008681471824645997, 0.008700991630554199, 0.008419487953186035, 0.008662079811096192, 0.008651552200317382, 0.00867459201812744, 0.008735456466674806, 0.008765503883361817, 0.008759231567382812, 0.00873459243774414, 0.008787167549133302, 0.008764320373535157, 0.008739968299865723, 0.00861683177947998, 0.008621248245239257, 0.008694111824035644, 0.008562399864196777, 0.008620800018310547, 0.008692992210388184, 0.008614591598510742, 0.008582624435424804, 0.008617823600769042, 0.008633088111877442, 0.008523776054382324, 0.008563776016235352, 0.008674240112304688, 0.00852406406402588, 0.008564607620239258, 0.008539679527282715, 0.008552000045776368, 0.008493984222412109, 0.00848851203918457, 0.008530048370361329, 0.008511648178100586, 0.008536160469055176, 0.008738719940185546, 0.008574624061584473, 0.008560256004333496, 0.009104031562805176, 0.008689184188842774, 0.008595392227172852, 0.008638208389282226, 0.008536928176879883, 0.008905728340148926, 0.008587424278259276, 0.008637280464172363, 0.008564831733703614, 0.008812447547912598, 0.008552000045776368, 0.008595583915710449, 0.008577343940734863, 0.008591360092163085, 0.008506591796875, 0.008530719757080078, 0.00862822437286377, 0.008646240234375, 0.008534239768981934, 0.008595647811889649, 0.008544320106506348, 0.008527008056640625, 0.008596256256103515, 0.008570879936218261, 0.008568832397460938, 0.00855568027496338, 0.008704863548278808, 0.008346719741821289, 0.008917759895324708, 0.008687583923339844, 0.008792256355285645, 0.008759296417236329, 0.008713631629943848, 0.008690272331237793, 0.00875887966156006, 0.008921504020690918, 0.008878399848937988, 0.008924896240234376, 0.008904191970825195, 0.009009856224060059, 0.008959775924682617, 0.008863743782043456, 0.00882259178161621, 0.008818880081176757, 0.008787263870239257, 0.008694592475891114, 0.008638208389282226, 0.008660703659057618, 0.008591775894165038, 0.00861184024810791, 0.00872447967529297, 0.009220095634460449, 0.009299712181091309, 0.00891539192199707, 0.00890556812286377, 0.009243616104125977, 0.008736767768859864, 0.008689663887023925, 0.009061823844909668, 0.008705920219421386, 0.008699808120727539, 0.008797120094299317, 0.008613568305969238, 0.008650912284851073, 0.008632160186767579, 0.00862019157409668, 0.008574975967407226, 0.008658944129943847, 0.008595264434814454, 0.008613632202148438, 0.008571328163146973, 0.008590399742126465, 0.008561023712158203, 0.008614272117614745, 0.00858233642578125, 0.008715264320373535, 0.00873692798614502, 0.008666175842285156, 0.00859449577331543, 0.008565567970275878, 0.008512191772460938, 0.008520095825195313, 0.008560447692871094, 0.008505311965942383, 0.00856873607635498, 0.008638912200927735, 0.008586943626403809, 0.008658944129943847, 0.008586560249328613, 0.00864352035522461, 0.008296064376831055, 0.008618399620056153, 0.008554911613464356, 0.009745535850524902, 0.008689663887023925, 0.008564672470092773, 0.00873583984375, 0.008903712272644042, 0.008688447952270507, 0.008560959815979003, 0.008527359962463378, 0.008534015655517578, 0.009941184043884277, 0.00859990406036377, 0.008672736167907714, 0.008644800186157226, 0.008591168403625489, 0.008515775680541992, 0.0086364164352417, 0.008625791549682618, 0.008520031929016113, 0.008491040229797363, 0.00850534439086914, 0.00853321647644043, 0.00852070426940918, 0.008621791839599609, 0.008576767921447754, 0.008716608047485351, 0.008689855575561524, 0.008603455543518067, 0.008699456214904784, 0.008581567764282226, 0.008559935569763184, 0.00856447982788086, 0.008567680358886718, 0.009179200172424316, 0.008803775787353516, 0.00909779167175293, 0.009379839897155762, 0.008712191581726075, 0.008666303634643555, 0.008670016288757324, 0.008753151893615722, 0.008588992118835449, 0.008771903991699219, 0.00859545612335205, 0.00857910442352295, 0.008607711791992188, 0.008625375747680663, 0.008809247970581054, 0.008660608291625977, 0.00909171199798584, 0.00865664005279541, 0.008803968429565429, 0.0086779203414917, 0.008621600151062012, 0.008943936347961427, 0.008665087699890137, 0.008844927787780761, 0.008837504386901856, 0.008615936279296875, 0.008642271995544433, 0.008642560005187988, 0.008397760391235352, 0.00853763198852539, 0.008507391929626466, 0.008532256126403808, 0.008579392433166504, 0.008586655616760254, 0.008491519927978516, 0.008561887741088868, 0.008495455741882324, 0.008515999794006348, 0.008521535873413085, 0.008678751945495606, 0.008678239822387695, 0.008617664337158203, 0.008651071548461915, 0.00861952018737793, 0.00879257583618164, 0.00887551975250244, 0.008880640029907226, 0.0089366397857666, 0.008901439666748046, 0.008865376472473145, 0.008773856163024903, 0.009027423858642579, 0.008833375930786133, 0.00871132755279541, 0.008716959953308106, 0.008649920463562012, 0.008558879852294922, 0.008618304252624512, 0.008605279922485352, 0.00860591983795166, 0.008733280181884765, 0.008820735931396484, 0.008832639694213867, 0.00910358428955078, 0.008913056373596191, 0.009041919708251953, 0.008671232223510742, 0.008699104309082032, 0.008647808074951172, 0.00860534381866455, 0.008598848342895508, 0.00859222412109375, 0.008802240371704101, 0.008811871528625488, 0.008898880004882813, 0.00882044792175293, 0.008802847862243652, 0.008719776153564453, 0.008809056282043457, 0.008650303840637207, 0.00863484764099121, 0.008848511695861816, 0.008696288108825683, 0.008591135978698731, 0.008611680030822754, 0.008696576118469239, 0.00859273624420166, 0.00863484764099121, 0.008566847801208496, 0.008652447700500488, 0.00861996841430664, 0.008351584434509278, 0.008663200378417968, 0.008851455688476563, 0.008835136413574219, 0.008826720237731933, 0.008794207572937012, 0.008705887794494629, 0.008697919845581055, 0.008642656326293945, 0.008613632202148438, 0.008591520309448241, 0.008534111976623536, 0.008556192398071288, 0.008843744277954101, 0.008560799598693847, 0.00860643196105957, 0.008638848304748535, 0.00863811206817627, 0.008547295570373535, 0.008534175872802734, 0.008578880310058594, 0.008531007766723633, 0.008509920120239258, 0.008582847595214844, 0.008574048042297363, 0.008505056381225586, 0.008521727561950684, 0.00856169605255127, 0.008699935913085937, 0.008580191612243653, 0.008529151916503906, 0.008636063575744628, 0.008565471649169923, 0.008546784400939941, 0.008572159767150879, 0.008566271781921387, 0.008508416175842285, 0.008669183731079102, 0.008821887969970703, 0.008547200202941894, 0.008573087692260742, 0.008709983825683594, 0.008660832405090331, 0.008616415977478028, 0.008611680030822754, 0.00862326431274414, 0.008828991889953614, 0.008929920196533204, 0.008906847953796386, 0.009038047790527344, 0.008934240341186523, 0.008960895538330078, 0.008953184127807616, 0.008733311653137206, 0.00868284797668457, 0.008671903610229492, 0.008763263702392578, 0.008837247848510742, 0.008896063804626465, 0.008788415908813477, 0.008675328254699707, 0.008697312355041504, 0.008630592346191407, 0.008256992340087891, 0.008624544143676758, 0.008591679573059082, 0.008609184265136719, 0.008749055862426757, 0.008669599533081055, 0.008668671607971192, 0.008681471824645997, 0.008655648231506347, 0.008652511596679688, 0.008691712379455567, 0.008676575660705567, 0.008676287651062011, 0.008701791763305664, 0.00871628761291504, 0.008691712379455567, 0.008672767639160157, 0.008628352165222168, 0.00855679988861084, 0.008607872009277344, 0.008675456047058105, 0.00861580753326416, 0.008552448272705078, 0.008640735626220704, 0.008580575942993164, 0.008569408416748046, 0.008533760070800782, 0.00854412841796875, 0.008499327659606934, 0.00859545612335205, 0.008520832061767578, 0.00857817554473877, 0.008729536056518555, 0.008541312217712402, 0.008721471786499024, 0.008608639717102051, 0.00877507209777832, 0.008759455680847168, 0.008689855575561524, 0.008616991996765137, 0.00927625560760498, 0.008577088356018066, 0.008560959815979003, 0.008661791801452637, 0.008524767875671387, 0.008529567718505859, 0.008647007942199707, 0.008542207717895508, 0.008523776054382324, 0.008534015655517578, 0.008564736366271973, 0.008552255630493165, 0.00855843162536621, 0.008648192405700684, 0.008588128089904785, 0.008517631530761719, 0.00853110408782959, 0.008606143951416016, 0.00850278377532959, 0.008536992073059082, 0.008546303749084473, 0.008493056297302246, 0.008527775764465333, 0.008159999847412109, 0.008482912063598632, 0.008548064231872559, 0.008546303749084473, 0.008567071914672852, 0.008572928428649903, 0.008541728019714355, 0.008538559913635254, 0.008541407585144043, 0.008544384002685547, 0.008516287803649903, 0.008525983810424805, 0.008646495819091797, 0.008820735931396484, 0.008802016258239747, 0.00878825569152832, 0.0087326717376709, 0.008648703575134278, 0.008638463973999023, 0.008566880226135254, 0.0085698881149292, 0.008602784156799317, 0.008556256294250488, 0.008542112350463867, 0.00856822395324707, 0.008624256134033203, 0.00862012767791748, 0.008701760292053223, 0.008794719696044923, 0.009934240341186524, 0.010820256233215333, 0.008756319999694824, 0.008719167709350585, 0.008647007942199707, 0.008591103553771972, 0.008630080223083496, 0.0088536958694458, 0.008742464065551759, 0.008665504455566407, 0.008628479957580567, 0.008559679985046387, 0.008592096328735352, 0.008560352325439453, 0.008546943664550782, 0.008603424072265624, 0.00870911979675293, 0.008571743965148926, 0.00856668758392334, 0.008501695632934571, 0.008509023666381836, 0.008571264266967773, 0.008801535606384277, 0.008614336013793945, 0.008595487594604493, 0.008605695724487305, 0.008544256210327148, 0.008517631530761719, 0.008572704315185547, 0.008560064315795899, 0.008588064193725585, 0.008552607536315917, 0.00852569580078125, 0.008548319816589356, 0.008245247840881348, 0.008639488220214844, 0.008576000213623047, 0.008604000091552735, 0.008824480056762696, 0.008841216087341308, 0.008756640434265137, 0.008720319747924804, 0.00915113639831543, 0.008672736167907714, 0.008744832038879394, 0.008655520439147949, 0.00872447967529297, 0.008656767845153809, 0.008712127685546875, 0.008683039665222167, 0.00866812801361084, 0.008684927940368653, 0.008578495979309082, 0.008557439804077149, 0.008539872169494629, 0.008659232139587402, 0.008540032386779785, 0.008507007598876954, 0.00889241600036621, 0.008755776405334472, 0.008568767547607423, 0.008605695724487305, 0.009048064231872559, 0.008747072219848632, 0.008624064445495605, 0.008580960273742676, 0.008580384254455566, 0.008737664222717285, 0.008550399780273438, 0.008538240432739258, 0.008615679740905761, 0.008513664245605469, 0.008525312423706055, 0.008514047622680664, 0.008521504402160644, 0.008501343727111817, 0.00849523162841797, 0.008488256454467774, 0.00867801570892334, 0.008664192199707032, 0.008667807579040527, 0.00853763198852539, 0.008575743675231933, 0.008576671600341796, 0.008604000091552735, 0.009117535591125488, 0.008679519653320313, 0.00885155200958252, 0.008809663772583009, 0.009091744422912598, 0.009041600227355957, 0.008956640243530274, 0.008788928031921387, 0.008735520362854004, 0.00890675163269043, 0.00873305606842041, 0.008641887664794921, 0.00820019245147705, 0.008534015655517578, 0.008542207717895508, 0.008625151634216309, 0.008544511795043945, 0.008567551612854003, 0.008513728141784667, 0.00854201602935791, 0.008575008392333985, 0.008564703941345215, 0.008564672470092773, 0.008591423988342286, 0.008613823890686035, 0.008546400070190429, 0.008542176246643066, 0.008549951553344726, 0.008554176330566406, 0.008501376152038574, 0.0084650239944458, 0.008525343894958496, 0.00851417636871338, 0.008521568298339844, 0.008623231887817382, 0.008604543685913086, 0.008541407585144043, 0.008563424110412598, 0.008484095573425292, 0.008503616333007813, 0.008511455535888671, 0.008431584358215331, 0.008491519927978516, 0.008548416137695313, 0.008455936431884765, 0.008499456405639648, 0.008523776054382324, 0.008588352203369141, 0.008588224411010743, 0.008612895965576172, 0.008567968368530273, 0.008572064399719238, 0.008561311721801758, 0.00865449619293213, 0.008600992202758789, 0.008520416259765625, 0.00853388786315918, 0.008458592414855958, 0.008508480072021485, 0.008502047538757324, 0.008511648178100586, 0.008566783905029298, 0.008559807777404786, 0.008554368019104005, 0.008491968154907227, 0.008527872085571288, 0.008533247947692871, 0.008530688285827637, 0.008511487960815429, 0.00860483169555664, 0.008728511810302734, 0.008627424240112305, 0.008718015670776368, 0.008712191581726075, 0.0086364164352417, 0.008528832435607911, 0.008927103996276855, 0.008917087554931641, 0.009138175964355469, 0.008998016357421874, 0.008962752342224121, 0.008992416381835937, 0.0088787841796875, 0.008890527725219727, 0.00878166389465332, 0.008767647743225098, 0.00868934440612793, 0.008896512031555176, 0.008849408149719238, 0.009107423782348632, 0.00892140769958496, 0.008791775703430175, 0.008839167594909669, 0.008789088249206543, 0.008696736335754395, 0.008855327606201172, 0.008687168121337891, 0.008623040199279786, 0.00869315242767334, 0.0087225923538208, 0.0087041597366333, 0.008757087707519531, 0.008648927688598634, 0.00870364761352539, 0.008626463890075684, 0.008627488136291503, 0.008641183853149414, 0.00867948818206787, 0.008589183807373047, 0.008644736289978028, 0.008671135902404785, 0.008603743553161621, 0.008689408302307128, 0.008718303680419922, 0.008658464431762696, 0.008546688079833985, 0.008554304122924804, 0.008483231544494629, 0.008488415718078613, 0.008487903594970703, 0.008572640419006348, 0.008550144195556641, 0.008611680030822754, 0.008604063987731933, 0.00857919979095459, 0.00850931167602539, 0.008558112144470215, 0.008595168113708496, 0.008563455581665038, 0.008562687873840333, 0.008714176177978516, 0.008630335807800293, 0.008549856185913087, 0.008521984100341797, 0.008540672302246094, 0.008668416023254395, 0.008565279960632324, 0.008597824096679687, 0.008549344062805176, 0.008840352058410645, 0.008708928108215332, 0.008656928062438965, 0.008660127639770508, 0.008610655784606933, 0.008525823593139649, 0.008568832397460938, 0.008669343948364258, 0.008672575950622558, 0.008740639686584473, 0.008708864212036134, 0.008675264358520509, 0.008671327590942383, 0.008581151962280273, 0.008615872383117677, 0.008608063697814942, 0.008642271995544433, 0.008566368103027343, 0.008515999794006348, 0.00854531192779541, 0.00854956817626953, 0.008538047790527343, 0.008554207801818847, 0.00852780818939209, 0.008521408081054687, 0.00849731159210205, 0.008540351867675782, 0.008476287841796874, 0.008523712158203125, 0.008587840080261231, 0.008572671890258789, 0.0084967041015625, 0.008513664245605469, 0.008468255996704102, 0.008555328369140625, 0.008507391929626466, 0.0085065279006958, 0.008522303581237792, 0.00848911952972412, 0.008483967781066894, 0.008490176200866699, 0.008429439544677734, 0.008499103546142578, 0.008433664321899414, 0.008443903923034669, 0.008455807685852051, 0.008516063690185547, 0.008578751564025879, 0.008538335800170898, 0.008490367889404297, 0.008655488014221191, 0.0086179838180542, 0.00868556785583496, 0.00858460807800293, 0.008546015739440919, 0.008509984016418457, 0.008622207641601563, 0.008607135772705079, 0.008545215606689454, 0.008476223945617676, 0.008522047996520996, 0.008538111686706543, 0.008202239990234375, 0.009005375862121582, 0.008838848114013672, 0.008704000473022461, 0.008678624153137207, 0.008751392364501953, 0.008649056434631347, 0.008648799896240235, 0.008630335807800293, 0.008551648139953613, 0.008569631576538086, 0.008570560455322266, 0.008692031860351562, 0.008867839813232421, 0.008767583847045898, 0.008722335815429687, 0.008706368446350098, 0.008668895721435546, 0.008660832405090331, 0.008608192443847655, 0.008527551651000977, 0.008593184471130371, 0.008596768379211426, 0.008720959663391113, 0.008664671897888183, 0.009057056427001954, 0.0086278076171875, 0.008649120330810547, 0.008630271911621093, 0.008627615928649902, 0.0085283203125, 0.008585375785827637, 0.008601663589477539, 0.00857260799407959, 0.008613375663757325, 0.008701855659484864, 0.008628064155578614, 0.00864998435974121, 0.008616895675659179, 0.008573760032653808, 0.008579168319702148, 0.008533344268798829, 0.0085830078125, 0.008708895683288575, 0.008583104133605957, 0.008533439636230468, 0.008545120239257813, 0.008548064231872559, 0.008493056297302246, 0.008517631530761719, 0.00854252815246582, 0.008558143615722656, 0.008515711784362792, 0.008474623680114746, 0.00849715232849121, 0.008540063858032227, 0.008511808395385742, 0.008461600303649902, 0.00847321605682373, 0.00855027198791504, 0.008613599777221679, 0.008618271827697754, 0.008545696258544922, 0.008259967803955078, 0.008582207679748535, 0.008721343994140625, 0.008548095703125, 0.008497407913208008, 0.008556544303894043, 0.008560735702514649, 0.008558336257934571, 0.008601759910583496, 0.008559871673583984, 0.008587519645690917, 0.008547967910766601, 0.00851625633239746, 0.008581343650817872, 0.00865609645843506, 0.008596256256103515, 0.00862822437286377, 0.008560640335083008, 0.008548352241516113, 0.00848844814300537, 0.00852620792388916, 0.008503583908081055, 0.008480832099914551, 0.008523679733276368, 0.008539936065673829, 0.00846009635925293, 0.008605728149414063, 0.00905577564239502, 0.008512319564819336, 0.008554400444030762, 0.00858518409729004, 0.00849830436706543, 0.008618783950805665, 0.008640640258789063, 0.008685440063476562, 0.008666879653930664, 0.008563008308410645, 0.00868329620361328, 0.008618271827697754, 0.008757247924804687, 0.008745023727416992, 0.008745216369628906, 0.008660672187805175, 0.008852736473083496, 0.008678144454956054, 0.00873641586303711, 0.008767871856689454, 0.008682815551757813, 0.008762016296386718, 0.008894463539123536, 0.008723872184753418, 0.008665568351745605, 0.008976479530334473, 0.008885631561279297, 0.009263775825500488, 0.009211903572082519, 0.00912326431274414, 0.008949503898620606, 0.008841055870056153, 0.00873366355895996, 0.008744447708129884, 0.008671744346618653, 0.008609184265136719, 0.008273728370666504, 0.0086244478225708, 0.008591360092163085, 0.008683327674865723, 0.008638655662536621, 0.008660639762878418, 0.008605664253234863, 0.00874739170074463, 0.008600671768188477, 0.008536959648132324, 0.008611200332641601, 0.008573599815368652, 0.008550399780273438, 0.00854368019104004, 0.008557120323181152, 0.008642784118652344, 0.008592191696166993, 0.00858022403717041, 0.008562527656555175, 0.008636544227600098, 0.008636287689208984, 0.008660991668701172, 0.008613696098327637, 0.008618464469909668, 0.00860540771484375, 0.008515744209289551, 0.00849523162841797, 0.008530879974365234, 0.008492992401123048, 0.00858579158782959, 0.008538399696350098, 0.008730624198913574, 0.008796159744262694, 0.008689663887023925, 0.008700223922729492, 0.008670080184936523, 0.00863929557800293, 0.008627455711364746, 0.009017824172973633, 0.008611935615539551, 0.00853651237487793, 0.008605024337768555, 0.008595871925354003, 0.00855628776550293, 0.008516896247863769, 0.008526399612426757, 0.008472991943359374, 0.008510944366455078, 0.008497023582458496, 0.00849392032623291, 0.008463711738586425, 0.008548831939697266, 0.008502976417541504, 0.008513855934143066, 0.008605695724487305, 0.008499199867248536, 0.008722271919250488, 0.008652959823608398, 0.008814240455627441, 0.008621439933776856, 0.008630720138549805, 0.008618464469909668, 0.008634655952453614, 0.008165375709533691, 0.00855241584777832, 0.008493247985839844, 0.0085830078125, 0.00853769588470459, 0.008536576271057129, 0.00865395164489746, 0.008847871780395507, 0.008783807754516601, 0.008720735549926757, 0.008712096214294434, 0.008755295753479005, 0.008814592361450196, 0.008605695724487305, 0.008624128341674805, 0.00854582405090332, 0.008573408126831055, 0.00851097583770752, 0.00849289608001709, 0.008520352363586425, 0.00862435245513916, 0.00863385581970215, 0.008657183647155762, 0.008558496475219727, 0.00872867202758789, 0.00862559986114502, 0.008556735992431641, 0.008530303955078125, 0.008559967994689941, 0.008672256469726563, 0.008728416442871093, 0.008681280136108398, 0.008671232223510742, 0.008663040161132812, 0.008723520278930664, 0.008678208351135254, 0.008629664421081543, 0.008589599609375, 0.008570367813110352, 0.00866972827911377, 0.008755647659301757, 0.008720352172851562, 0.008904416084289551, 0.008675616264343261, 0.008852671623229981, 0.008762463569641114, 0.008693663597106934, 0.008650400161743165, 0.008672415733337402, 0.008689855575561524, 0.008614944458007813, 0.008615679740905761, 0.008732192039489746, 0.008824511528015137, 0.008944448471069335, 0.008910655975341797, 0.008853823661804198, 0.00893734359741211, 0.008619135856628417, 0.008620351791381835, 0.008671072006225586, 0.008558496475219727, 0.00873964786529541, 0.008291872024536132, 0.008588288307189941, 0.008523967742919922, 0.00864790439605713, 0.008583680152893066, 0.008583456039428712, 0.008597472190856933, 0.00857049560546875, 0.008544384002685547, 0.008581055641174316, 0.008544032096862792, 0.008558879852294922, 0.008563872337341308, 0.008606559753417968, 0.008555520057678222, 0.008558752059936524, 0.008645471572875976, 0.008804351806640624, 0.008780927658081054, 0.008739071846008301, 0.00868166446685791, 0.00866761589050293, 0.008646559715270997, 0.008671584129333497, 0.008697919845581055, 0.008971936225891113, 0.008644063949584961, 0.00872707176208496, 0.008674912452697754, 0.00870032024383545, 0.008683520317077637, 0.008638463973999023, 0.008666624069213867, 0.008679936408996582, 0.00857907199859619, 0.00860102367401123, 0.008686271667480469, 0.008697664260864258, 0.008656160354614258, 0.00865766429901123, 0.00877999973297119, 0.008680480003356934, 0.008694560050964355, 0.008711232185363769, 0.008645312309265137, 0.008685152053833007, 0.00861628818511963, 0.008852031707763672, 0.008701696395874023, 0.008671232223510742, 0.00862822437286377, 0.008793312072753906, 0.00883135986328125, 0.008815008163452149, 0.008781824111938476, 0.008659104347229003, 0.00862019157409668, 0.008626976013183595, 0.008575903892517089, 0.008660287857055663, 0.008719039916992187, 0.008632063865661621, 0.008677311897277832, 0.00830463981628418, 0.008845503807067872, 0.008867648124694824, 0.008802304267883301, 0.00922764778137207, 0.008999551773071289, 0.009854687690734863, 0.009468192100524903, 0.008749055862426757, 0.008656928062438965, 0.008654111862182618, 0.00868012809753418, 0.008606975555419921, 0.008648927688598634, 0.008670047760009765, 0.008722111701965332, 0.008651103973388672, 0.008892064094543456, 0.008631360054016114, 0.008596416473388671, 0.008617183685302735, 0.008674271583557129, 0.008618080139160156, 0.00868118381500244, 0.008639871597290039, 0.008847040176391601, 0.00869164752960205, 0.008755552291870116, 0.008798463821411133, 0.00878326416015625, 0.008887136459350586, 0.008773792266845703, 0.00869375991821289, 0.00889468765258789, 0.008904512405395509, 0.009377984046936036, 0.008831999778747558, 0.008751903533935547, 0.0087326717376709, 0.008702048301696777, 0.009037728309631348, 0.008717727661132813, 0.008675583839416504, 0.008822272300720215, 0.008770367622375489, 0.008704256057739258, 0.008708191871643066, 0.008696991920471192, 0.008878751754760742, 0.008791135787963868, 0.00873356819152832, 0.008705727577209473, 0.008801728248596192, 0.008809087753295898, 0.00881884765625, 0.008741056442260742, 0.008673088073730469, 0.008744288444519042, 0.008655520439147949, 0.008716192245483399, 0.008650879859924316, 0.008843232154846191, 0.008652511596679688]",tokens/s,115.52559799085739,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,839.897088,538.836992,0.0,136.31488,130.303488,s,1,9.65812890625,9.65812890625,0.0,9.65812890625,9.65812890625,9.65812890625,9.65812890625,[9.65812890625],,kWh,1.4996775529167412e-05,1.6452502984010335e-06,4.631948150000086e-06,2.127397397756853e-05,,MB,1326.563328,643.694592,0.0,226.492416,199.948288,s,18,0.2241654071807861,0.012453633732265896,8.80499140778804e-05,0.012435808181762695,0.012543759727478027,0.012572836446762084,0.012638138303756713,"[0.012537471771240235, 0.012310912132263183, 0.01240556812286377, 0.01252236843109131, 0.012434016227722168, 0.012398112297058105, 0.012309951782226563, 0.01238211154937744, 0.012437600135803223, 0.012529855728149413, 0.01248303985595703, 0.01255843162536621, 0.012420415878295898, 0.01265446376800537, 0.012490688323974609, 0.012516415596008301, 0.012352767944335938, 0.012421216011047363]",tokens/s,20556.249324784156,kWh,3.6212558629032065e-07,3.993570233231424e-08,1.8878461477171275e-07,5.908459033943476e-07,tokens/kWh,433277100.72847575,MB,1359.679488,658.374656,0.0,241.17248,199.950848,s,18,10.15314276123047,0.564063486735026,0.0027230221726733533,0.5634424438476562,0.5674551696777344,0.5690417907714844,0.5705391687011718,"[0.5636718139648438, 0.5611249389648437, 0.563481201171875, 0.5656438598632813, 0.560795654296875, 0.5634036865234375, 0.5687114868164063, 0.566916748046875, 0.5658809204101563, 0.562343505859375, 0.5603230590820313, 0.5709135131835937, 0.5632256469726562, 0.5654956665039063, 0.5644441528320312, 0.5614124145507813, 0.5625384521484375, 0.5628160400390625]",tokens/s,111.68955530992352,kWh,1.6341924386394827e-05,1.8022211027407102e-06,6.248381028783827e-06,2.439252651791936e-05,tokens/kWh,2582758.286794056,,s,1134,10.143010853767395,0.008944454015667897,0.00019918955233984295,0.00890780782699585,0.009072828674316406,0.009191447877883912,0.009858987846374513,"[0.008783967971801757, 0.00905731201171875, 0.009265888214111328, 0.008980480194091797, 0.00898252773284912, 0.00891868782043457, 0.008917344093322755, 0.008882111549377441, 0.008852864265441894, 0.008859904289245605, 0.008794560432434081, 0.008798399925231933, 0.008795968055725098, 0.008895775794982911, 0.008853599548339844, 0.008923744201660156, 0.008889984130859375, 0.008882847785949707, 0.009009183883666992, 0.008914912223815918, 0.008863136291503907, 0.008947808265686035, 0.008986847877502441, 0.008882080078125, 0.008890239715576172, 0.008990976333618164, 0.009008511543273926, 0.008970944404602051, 0.009002400398254394, 0.008919487953186036, 0.008924832344055176, 0.0089682559967041, 0.008980192184448243, 0.008970911979675294, 0.008910847663879394, 0.008902655601501466, 0.008848671913146972, 0.008905440330505372, 0.00889027214050293, 0.008958047866821289, 0.008961440086364746, 0.008944224357604981, 0.009004735946655273, 0.00888259220123291, 0.008846591949462891, 0.00894223976135254, 0.009799391746520995, 0.008996447563171387, 0.009069536209106445, 0.009019392013549805, 0.008949472427368163, 0.008875455856323242, 0.008880703926086427, 0.008978655815124512, 0.00891267204284668, 0.008842623710632325, 0.008807135581970214, 0.008893983840942383, 0.00887337589263916, 0.008962847709655762, 0.00896019172668457, 0.008845312118530273, 0.008893888473510742, 0.008505311965942383, 0.008841216087341308, 0.008960224151611329, 0.008998687744140626, 0.009031200408935547, 0.008783616065979005, 0.008825599670410156, 0.008861663818359376, 0.008844799995422363, 0.008938240051269532, 0.008930815696716308, 0.0090600004196167, 0.008910655975341797, 0.008940544128417969, 0.00890006446838379, 0.00885750389099121, 0.008929696083068848, 0.00900710391998291, 0.008827008247375489, 0.008847007751464844, 0.0089617919921875, 0.008909600257873535, 0.00890550422668457, 0.008823871612548828, 0.008803487777709961, 0.008819392204284668, 0.008777728080749512, 0.00878985595703125, 0.008788319587707519, 0.009125696182250977, 0.008921312332153321, 0.009012160301208496, 0.008780351638793945, 0.008821023941040038, 0.008783871650695801, 0.00876966381072998, 0.009405823707580566, 0.008886112213134765, 0.008953856468200684, 0.00884598445892334, 0.008910847663879394, 0.008790016174316406, 0.008805600166320801, 0.008865920066833496, 0.008778112411499023, 0.008820320129394531, 0.00880617618560791, 0.008839615821838379, 0.008810144424438476, 0.00886070442199707, 0.009077792167663573, 0.008964096069335938, 0.009000864028930664, 0.008938143730163575, 0.009252127647399902, 0.008910880088806153, 0.00895702362060547, 0.008902463912963867, 0.008905759811401367, 0.008925663948059081, 0.008929759979248046, 0.008895968437194824, 0.008927328109741211, 0.00854377555847168, 0.0089071683883667, 0.00883516788482666, 0.008854784011840821, 0.008833791732788086, 0.00898863983154297, 0.008781855583190917, 0.008822496414184571, 0.008841055870056153, 0.008840959548950195, 0.009095840454101563, 0.00885155200958252, 0.008834976196289063, 0.008885472297668458, 0.008849823951721191, 0.008817024230957031, 0.008812191963195801, 0.008835424423217773, 0.008839424133300782, 0.008884063720703125, 0.008833056449890136, 0.008844160079956054, 0.008846336364746094, 0.008835071563720704, 0.00890060806274414, 0.008851679801940918, 0.00887337589263916, 0.008876416206359863, 0.008919039726257324, 0.008806528091430664, 0.009053855895996095, 0.008916447639465332, 0.008878975868225098, 0.008871808052062988, 0.008910207748413087, 0.008829888343811034, 0.008881855964660644, 0.008976384162902832, 0.008837120056152344, 0.008839167594909669, 0.008900351524353027, 0.00892460823059082, 0.008984416007995606, 0.008897503852844239, 0.008931327819824218, 0.009023584365844727, 0.008920991897583008, 0.00890060806274414, 0.008912511825561523, 0.00887564754486084, 0.008890687942504883, 0.008909536361694335, 0.008871328353881837, 0.008878496170043946, 0.008865216255187987, 0.008942303657531738, 0.0089617919921875, 0.009174207687377929, 0.009767519950866698, 0.009923871994018554, 0.00984160041809082, 0.009053407669067383, 0.00902444839477539, 0.008630592346191407, 0.008973600387573243, 0.008903231620788574, 0.008914048194885255, 0.008884287834167481, 0.008897055625915527, 0.009014719963073731, 0.009034624099731445, 0.009009152412414552, 0.009037152290344238, 0.008892064094543456, 0.008946368217468262, 0.008902976036071777, 0.009162688255310058, 0.008949824333190918, 0.009012384414672852, 0.009100128173828125, 0.008953887939453125, 0.008916704177856446, 0.008861568450927735, 0.008849408149719238, 0.009122176170349121, 0.00890447998046875, 0.008926527976989746, 0.009415583610534668, 0.009150464057922364, 0.009019392013549805, 0.008946975708007813, 0.009005791664123535, 0.008961824417114258, 0.008958175659179687, 0.009029248237609862, 0.009496959686279297, 0.009040960311889648, 0.00902444839477539, 0.00916592025756836, 0.008995840072631836, 0.008933568000793457, 0.008971615791320801, 0.008925567626953125, 0.008925408363342284, 0.008906047821044922, 0.008906399726867676, 0.00885638427734375, 0.008955519676208496, 0.008984992027282715, 0.00895587158203125, 0.008939807891845702, 0.009004863739013672, 0.008941247940063476, 0.008925408363342284, 0.008857600212097168, 0.008971936225891113, 0.008919455528259278, 0.008826560020446777, 0.008850848197937012, 0.008840319633483886, 0.00892080020904541, 0.008943391799926757, 0.008898783683776855, 0.00902956771850586, 0.00889452838897705, 0.009005151748657226, 0.008557727813720703, 0.00891881561279297, 0.008853504180908203, 0.008881664276123047, 0.008988991737365722, 0.008900799751281739, 0.008822784423828126, 0.008855648040771484, 0.008759424209594727, 0.008758399963378906, 0.008907423973083497, 0.00894115161895752, 0.008853280067443847, 0.009241056442260743, 0.008914079666137695, 0.008936448097229004, 0.008916000366210937, 0.008794400215148926, 0.008806719779968261, 0.008790399551391602, 0.008809760093688964, 0.008863455772399902, 0.008775744438171387, 0.008811264038085938, 0.00880620765686035, 0.008855423927307128, 0.008948224067687988, 0.008859647750854491, 0.008849408149719238, 0.008816736221313477, 0.008837311744689942, 0.008838879585266112, 0.008839296340942384, 0.008913056373596191, 0.008851167678833007, 0.008885567665100097, 0.008944064140319825, 0.009046272277832031, 0.00899465560913086, 0.009031519889831544, 0.009107168197631837, 0.009005087852478028, 0.0091911678314209, 0.009198368072509766, 0.008945695877075195, 0.008958080291748046, 0.00892300796508789, 0.008947711944580078, 0.008945664405822755, 0.008956159591674805, 0.008996031761169433, 0.008851103782653809, 0.008797183990478515, 0.00890675163269043, 0.008847264289855957, 0.008871935844421386, 0.008879199981689453, 0.008810848236083984, 0.008850144386291505, 0.008850720405578614, 0.008833600044250487, 0.008811840057373047, 0.008777600288391113, 0.008498047828674317, 0.00879753589630127, 0.008845184326171875, 0.008950688362121583, 0.008994815826416015, 0.008923135757446288, 0.008955904006958008, 0.00885580825805664, 0.008890111923217774, 0.00881049633026123, 0.008912896156311035, 0.008877408027648926, 0.00884006404876709, 0.00886086368560791, 0.008870495796203613, 0.008898143768310546, 0.008960000038146973, 0.008878016471862793, 0.008860320091247558, 0.008931232452392577, 0.009019295692443847, 0.008816639900207519, 0.008908672332763672, 0.008959199905395508, 0.00889948844909668, 0.008859583854675293, 0.008865216255187987, 0.008837984085083008, 0.008940832138061523, 0.009063232421875, 0.009169856071472167, 0.009104448318481445, 0.00915833568572998, 0.00929302406311035, 0.009011424064636231, 0.009011327743530273, 0.008956352233886718, 0.008896672248840332, 0.008912927627563476, 0.008902751922607421, 0.009071552276611328, 0.009021535873413086, 0.009001664161682129, 0.009046079635620118, 0.0089335355758667, 0.009471360206604005, 0.00888700771331787, 0.008907967567443848, 0.008900287628173829, 0.008844096183776856, 0.008887968063354491, 0.008884575843811035, 0.008870112419128417, 0.00891881561279297, 0.008853599548339844, 0.00887183952331543, 0.008849408149719238, 0.008882176399230958, 0.008853407859802246, 0.008833087921142579, 0.008805407524108886, 0.009191967964172364, 0.009059040069580079, 0.008761664390563965, 0.0100480318069458, 0.00938742446899414, 0.009710240364074706, 0.009046015739440917, 0.009967616081237793, 0.009328736305236816, 0.008943072319030762, 0.008964544296264648, 0.009011455535888672, 0.009064191818237305, 0.009051487922668458, 0.008956576347351074, 0.008916864395141602, 0.008925312042236328, 0.008871583938598633, 0.008905407905578613, 0.008903615951538086, 0.009087712287902831, 0.00891859245300293, 0.008933664321899414, 0.00888646411895752, 0.008863712310791015, 0.008912704467773437, 0.00882425594329834, 0.008833791732788086, 0.009187295913696289, 0.009103167533874511, 0.008939776420593262, 0.008898528099060058, 0.008876031875610351, 0.008865823745727539, 0.008819968223571777, 0.00884182357788086, 0.008824640274047852, 0.008823167800903321, 0.008905759811401367, 0.008862815856933593, 0.009008959770202636, 0.008955904006958008, 0.008991904258728027, 0.008917856216430663, 0.008892736434936523, 0.009022751808166503, 0.009294495582580567, 0.008972031593322754, 0.008918463706970215, 0.008839808464050292, 0.008843199729919433, 0.009053183555603026, 0.00889139175415039, 0.008833151817321777, 0.00882585620880127, 0.009215167999267578, 0.009995967864990234, 0.008935168266296386, 0.008917247772216797, 0.009027584075927735, 0.008983903884887696, 0.008932160377502442, 0.009039775848388672, 0.008931167602539062, 0.008904607772827148, 0.008548416137695313, 0.008865407943725586, 0.008843584060668945, 0.008841279983520509, 0.00887782382965088, 0.008881535530090332, 0.0089486083984375, 0.008939616203308106, 0.009004896163940429, 0.008933440208435058, 0.009162752151489258, 0.008982432365417481, 0.011307104110717774, 0.009969599723815918, 0.009244735717773438, 0.009076704025268554, 0.009107168197631837, 0.009034399986267089, 0.008993951797485352, 0.00900972843170166, 0.008986751556396484, 0.009069567680358886, 0.008933695793151855, 0.00893008041381836, 0.00895088005065918, 0.00895254421234131, 0.008930656433105469, 0.008903231620788574, 0.008882176399230958, 0.008896512031555176, 0.008883392333984375, 0.008839743614196777, 0.008879424095153809, 0.00886025619506836, 0.008819040298461913, 0.008936448097229004, 0.008829952239990235, 0.008869536399841308, 0.008855615615844727, 0.008845600128173828, 0.008824831962585449, 0.008847359657287598, 0.0091212158203125, 0.009071264266967773, 0.00892092800140381, 0.008856863975524902, 0.008868639945983886, 0.00888419246673584, 0.008924896240234376, 0.009051712036132812, 0.008909567832946778, 0.009013248443603515, 0.00913372802734375, 0.008954527854919434, 0.008980159759521484, 0.008951935768127441, 0.008905695915222169, 0.008909728050231934, 0.008901727676391602, 0.00891808032989502, 0.008875871658325195, 0.009008480072021484, 0.008880800247192382, 0.00849500846862793, 0.0089050235748291, 0.008815679550170898, 0.008877344131469726, 0.00883852767944336, 0.008821023941040038, 0.008849408149719238, 0.008942784309387207, 0.009171775817871094, 0.009117695808410644, 0.009555135726928711, 0.00939475154876709, 0.009320704460144043, 0.009145983695983887, 0.009055808067321777, 0.008942463874816894, 0.008920191764831543, 0.008845855712890625, 0.008827103614807128, 0.008980544090270996, 0.008820799827575684, 0.0089169282913208, 0.008976384162902832, 0.008977408409118653, 0.008944640159606934, 0.008923232078552246, 0.008939423561096192, 0.008989791870117187, 0.008937472343444825, 0.008940447807312011, 0.00893945598602295, 0.00894489574432373, 0.008962176322937011, 0.009031776428222657, 0.00905891227722168, 0.009188544273376465, 0.008983360290527344, 0.00900499153137207, 0.00892073631286621, 0.008972288131713867, 0.008933759689331055, 0.008865344047546386, 0.008896896362304688, 0.008959391593933105, 0.008947423934936524, 0.009087103843688965, 0.008982815742492677, 0.00895680046081543, 0.009008831977844238, 0.009230239868164063, 0.00909324836730957, 0.009070207595825195, 0.00898863983154297, 0.008948287963867187, 0.008901472091674805, 0.008889280319213867, 0.008885631561279297, 0.008880607604980469, 0.008887999534606934, 0.008888832092285156, 0.00900710391998291, 0.008902463912963867, 0.008912447929382325, 0.008630528450012206, 0.008863424301147461, 0.008865344047546386, 0.008923839569091797, 0.008873087882995605, 0.008893376350402831, 0.008923135757446288, 0.008939519882202148, 0.008949184417724609, 0.008933600425720214, 0.008933216094970704, 0.008882687568664551, 0.008919072151184083, 0.008869183540344239, 0.008893247604370116, 0.008959263801574706, 0.008915519714355469, 0.0089169921875, 0.008945887565612792, 0.008921055793762208, 0.008936256408691405, 0.008911871910095214, 0.008914624214172363, 0.008958271980285645, 0.00889408016204834, 0.008886655807495118, 0.009176095962524415, 0.008977375984191894, 0.008971648216247559, 0.008939999580383301, 0.008884384155273437, 0.0088853759765625, 0.009032032012939454, 0.008989215850830078, 0.00898252773284912, 0.009045408248901368, 0.008884096145629882, 0.008958527565002442, 0.008989919662475586, 0.008948287963867187, 0.009084896087646484, 0.008941984176635743, 0.00894092845916748, 0.008886688232421875, 0.008882399559020995, 0.008919136047363281, 0.008851712226867675, 0.008844960212707519, 0.008845312118530273, 0.008865280151367188, 0.008851231575012207, 0.008798591613769531, 0.008824928283691406, 0.008822848320007324, 0.008828831672668456, 0.008952095985412597, 0.008809760093688964, 0.00899721622467041, 0.00917734432220459, 0.009037343978881836, 0.008847968101501465, 0.008844287872314453, 0.00883356761932373, 0.00860102367401123, 0.008894368171691895, 0.008911295890808106, 0.00885091209411621, 0.008867712020874023, 0.008837887763977051, 0.008865280151367188, 0.008845503807067872, 0.008872480392456054, 0.009061599731445312, 0.00881926441192627, 0.00888374423980713, 0.008800800323486329, 0.00888374423980713, 0.008853919982910156, 0.008859647750854491, 0.008967904090881348, 0.008864031791687012, 0.008835071563720704, 0.00880851173400879, 0.008815679550170898, 0.008784031867980957, 0.008769311904907227, 0.00879916763305664, 0.00880844783782959, 0.008824831962585449, 0.008816831588745118, 0.008776800155639648, 0.008815360069274903, 0.008771552085876464, 0.008802047729492187, 0.008786016464233399, 0.008806816101074219, 0.00884505558013916, 0.008832127571105956, 0.008829919815063477, 0.008814080238342285, 0.00884931182861328, 0.008872447967529297, 0.008873984336853028, 0.008977536201477052, 0.008880160331726073, 0.008885343551635743, 0.009036767959594726, 0.008968992233276367, 0.00890675163269043, 0.00892467212677002, 0.008901120185852051, 0.008966079711914062, 0.00889452838897705, 0.008992159843444824, 0.009002943992614747, 0.009148544311523437, 0.009009696006774903, 0.009017056465148925, 0.008974623680114747, 0.00899068832397461, 0.008899776458740235, 0.008878944396972656, 0.008924256324768067, 0.009052127838134765, 0.009052895545959472, 0.008960224151611329, 0.008795743942260742, 0.008989343643188477, 0.008972288131713867, 0.008992511749267578, 0.008941503524780273, 0.008945088386535644, 0.009015935897827149, 0.009197279930114747, 0.009172991752624511, 0.009344896316528321, 0.009314399719238281, 0.009056575775146484, 0.008981663703918457, 0.009027647972106933, 0.009048864364624024, 0.008957951545715333, 0.009058303833007812, 0.008829952239990235, 0.008952832221984864, 0.00886143970489502, 0.008868096351623535, 0.008884223937988281, 0.008960224151611329, 0.008927328109741211, 0.009975008010864258, 0.011098591804504395, 0.009867551803588868, 0.009083711624145507, 0.009057184219360352, 0.008966015815734864, 0.008947839736938476, 0.008947423934936524, 0.008942015647888183, 0.008995871543884277, 0.008855648040771484, 0.008919679641723632, 0.008875967979431153, 0.008949919700622559, 0.008920384407043456, 0.008917695999145507, 0.008918751716613769, 0.00892137622833252, 0.009214015960693359, 0.009134016036987304, 0.009091072082519532, 0.008955007553100585, 0.008880543708801269, 0.008909279823303223, 0.008924639701843262, 0.009417247772216798, 0.009082880020141602, 0.009018943786621094, 0.009100959777832032, 0.008892224311828614, 0.008882911682128906, 0.008851712226867675, 0.008792063713073731, 0.009775103569030762, 0.008921088218688965, 0.008852895736694335, 0.008813119888305665, 0.008792192459106445, 0.008795519828796387, 0.008576319694519044, 0.00890336036682129, 0.008852640151977538, 0.009206624031066894, 0.008870944023132325, 0.008827872276306152, 0.008820735931396484, 0.008785920143127441, 0.00890873622894287, 0.008798272132873534, 0.00881049633026123, 0.008839424133300782, 0.008898367881774902, 0.008912832260131835, 0.008984576225280762, 0.00912179183959961, 0.009049632072448731, 0.00891164779663086, 0.008916671752929688, 0.008882176399230958, 0.008829952239990235, 0.008842368125915528, 0.008914303779602051, 0.008997088432312011, 0.008943615913391113, 0.008923232078552246, 0.008939711570739747, 0.009004511833190919, 0.009063072204589843, 0.00894159984588623, 0.008949600219726562, 0.008950912475585937, 0.00892403221130371, 0.008912575721740723, 0.008960448265075683, 0.009044063568115235, 0.009111231803894042, 0.00905840015411377, 0.009056256294250489, 0.009018912315368652, 0.008931679725646973, 0.008900544166564942, 0.00889583969116211, 0.008867712020874023, 0.00886678409576416, 0.008854656219482422, 0.008864640235900879, 0.008847359657287598, 0.008905823707580567, 0.008841983795166016, 0.008920255661010743, 0.008855999946594239, 0.008802847862243652, 0.009035072326660155, 0.008972864151000976, 0.008941823959350587, 0.009104479789733886, 0.008934176445007325, 0.008924480438232421, 0.008921055793762208, 0.008995552062988282, 0.008957951545715333, 0.00924009609222412, 0.009008671760559082, 0.009152992248535156, 0.009017696380615234, 0.008953856468200684, 0.00894156837463379, 0.008871616363525391, 0.008913567543029785, 0.008855392456054687, 0.009106495857238769, 0.008940287590026855, 0.008951583862304687, 0.008990943908691406, 0.009043968200683594, 0.008986111640930175, 0.009007935523986816, 0.00903545570373535, 0.00896560001373291, 0.008899104118347169, 0.00887337589263916, 0.008949600219726562, 0.00884812831878662, 0.008853504180908203, 0.008873727798461914, 0.0088570556640625, 0.008820575714111328, 0.008850367546081542, 0.009173248291015625, 0.009008895874023438, 0.008806400299072266, 0.008873727798461914, 0.009005215644836426, 0.008982336044311524, 0.008935711860656738, 0.00891651153564453, 0.008878560066223144, 0.008893856048583984, 0.008868096351623535, 0.00887228775024414, 0.009107456207275391, 0.009750528335571289, 0.00935321617126465, 0.009189375877380371, 0.008879872322082519, 0.008829248428344727, 0.008835007667541504, 0.008767840385437011, 0.008826432228088378, 0.008791135787963868, 0.008841983795166016, 0.009044223785400391, 0.009158656120300293, 0.009193471908569336, 0.00919961643218994, 0.009187264442443848, 0.00901734447479248, 0.008957632064819336, 0.008939295768737794, 0.008872351646423339, 0.008976096153259278, 0.008914912223815918, 0.00884598445892334, 0.00882595157623291, 0.008855903625488281, 0.008640576362609863, 0.008822496414184571, 0.008834624290466308, 0.00881321620941162, 0.00885750389099121, 0.008779775619506837, 0.008775775909423827, 0.008810527801513672, 0.008787872314453125, 0.008751071929931641, 0.008775679588317872, 0.00879593563079834, 0.008816864013671875, 0.008904704093933105, 0.008970239639282226, 0.008881440162658691, 0.008889056205749512, 0.008825087547302246, 0.008760543823242187, 0.010648096084594726, 0.010707296371459962, 0.009039520263671875, 0.008953856468200684, 0.008869888305664063, 0.008839167594909669, 0.00880355167388916, 0.008778528213500976, 0.00877945613861084, 0.008819007873535156, 0.008822784423828126, 0.00880844783782959, 0.008791775703430175, 0.008774144172668457, 0.00888809585571289, 0.008767487525939942, 0.008755200386047364, 0.00877295970916748, 0.008986592292785645, 0.008881888389587402, 0.009073375701904297, 0.00895206356048584, 0.008843263626098634, 0.008979616165161132, 0.009105536460876465, 0.009366239547729492, 0.009162303924560546, 0.00925875186920166, 0.00921455955505371, 0.009090880393981933, 0.009025952339172364, 0.008974207878112792, 0.008992863655090331, 0.008896160125732421, 0.00888038444519043, 0.008824319839477538, 0.008848992347717285, 0.008932415962219237, 0.008855423927307128, 0.008902655601501466, 0.00893564796447754, 0.009028639793395996, 0.008857664108276367, 0.008905376434326172, 0.008648768424987793, 0.008950976371765137, 0.008886431694030762, 0.008808032035827636, 0.008870783805847169, 0.008877216339111328, 0.008874143600463867, 0.00882748794555664, 0.008912896156311035, 0.008834752082824707, 0.009161087989807129, 0.00901136016845703, 0.008927231788635253, 0.008953184127807616, 0.009042592048645019, 0.008986144065856934, 0.008933856010437011, 0.008867839813232421, 0.008879520416259766, 0.008891136169433593, 0.008908639907836914, 0.009527615547180176, 0.008963871955871582, 0.009006496429443359, 0.008880127906799316, 0.008880640029907226, 0.008901920318603516, 0.008835807800292968, 0.008851231575012207, 0.00898419189453125, 0.008941472053527832, 0.008819392204284668, 0.008807647705078124, 0.008796959877014161, 0.008820256233215333, 0.008882176399230958, 0.008806879997253419, 0.008847359657287598, 0.008816639900207519, 0.00880025577545166, 0.008833024024963379, 0.008896544456481933, 0.008818304061889649, 0.008757632255554199, 0.008831968307495117, 0.009211008071899413, 0.008822879791259765, 0.0088340482711792, 0.00888092803955078, 0.009000960350036622, 0.008996416091918945, 0.008962656021118164, 0.00890595245361328, 0.008896639823913574, 0.008790271759033203, 0.009064031600952148, 0.008886015892028809, 0.008890751838684083, 0.008867584228515624, 0.008863936424255371, 0.008907648086547851, 0.008834239959716796, 0.008870047569274902, 0.008939743995666504, 0.009431743621826172, 0.00994927978515625, 0.009061920166015624, 0.008902688026428223, 0.00891478443145752, 0.008865376472473145, 0.008889535903930663, 0.00888419246673584, 0.008868000030517579, 0.008885248184204102, 0.008891072273254395, 0.008889408111572265, 0.008826047897338866, 0.008850303649902344, 0.008821184158325196, 0.008827327728271484, 0.008841216087341308, 0.00876144027709961, 0.008820639610290528, 0.008738816261291504, 0.008906847953796386, 0.008845120429992677, 0.009007455825805664, 0.008840928077697754, 0.008841183662414551, 0.008832544326782226, 0.00895206356048584, 0.008953472137451172, 0.008816736221313477, 0.008835391998291016, 0.008919232368469239, 0.008853568077087402, 0.00888982391357422, 0.00884995174407959, 0.008855775833129883, 0.008803711891174317, 0.008821151733398437, 0.008838496208190917, 0.008911680221557618, 0.008929408073425293, 0.008834783554077148, 0.008820096015930175, 0.008919903755187988, 0.00885756778717041, 0.008873696327209473, 0.008839103698730469, 0.008945504188537597, 0.008909119606018066, 0.009060064315795898, 0.008902848243713379, 0.009021535873413086, 0.008890368461608887, 0.008896608352661133, 0.008863295555114746, 0.008885663986206055, 0.009035967826843261, 0.00893785572052002, 0.008829024314880371, 0.008988960266113281, 0.009286751747131347, 0.009032511711120605, 0.008976096153259278, 0.008620608329772949, 0.008955264091491699, 0.008882368087768554, 0.008933823585510254, 0.009000960350036622, 0.008998527526855468, 0.008868415832519532, 0.008908608436584472, 0.008933343887329102, 0.008902688026428223, 0.008927231788635253, 0.008843008041381837, 0.008855135917663574, 0.008870559692382813, 0.008871168136596679, 0.008899680137634277, 0.00892073631286621, 0.008936863899230957, 0.008870495796203613, 0.008828607559204102, 0.008803903579711914, 0.00885148811340332, 0.008876064300537109, 0.008836864471435547, 0.008858559608459472, 0.008836992263793945, 0.008789983749389649, 0.008808608055114747, 0.00880844783782959, 0.008797247886657715, 0.008823360443115234, 0.008833536148071289, 0.008896384239196777, 0.008869024276733398, 0.008880960464477538, 0.008968352317810058, 0.008943488121032715, 0.009084256172180175, 0.008913567543029785, 0.008876255989074707, 0.008853471755981446, 0.00884819221496582, 0.008836095809936523, 0.008886272430419923, 0.008875904083251953, 0.008861824035644532, 0.008955424308776855, 0.009183808326721192, 0.009304096221923828, 0.009276479721069335, 0.009395039558410645, 0.009178432464599609, 0.009191072463989258, 0.009059328079223633, 0.009029919624328614, 0.008996224403381348, 0.008962112426757813, 0.008909088134765625, 0.008894463539123536, 0.008894463539123536, 0.00887827205657959, 0.008875200271606445, 0.00892144012451172]",tokens/s,111.80112260047528,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4412.592128,4707.975168,0.0,4305.453056,4304.491008,s,1,10.4071025390625,10.4071025390625,0.0,10.4071025390625,10.4071025390625,10.4071025390625,10.4071025390625,[10.4071025390625],,kWh,9.888320817079451e-05,1.0900108380933137e-05,3.075863571799786e-05,0.0001405419522697255,,MB,4447.096832,4829.609984,0.0,4412.407808,4373.842432,s,10,1.9709659881591797,0.19709659881591796,0.0009136969979040734,0.1969654235839844,0.19806541290283203,0.19866162033081053,0.19913858627319336,"[0.19614093017578124, 0.19593606567382812, 0.19677565002441405, 0.19731097412109375, 0.19689251708984376, 0.197038330078125, 0.19639730834960936, 0.19793292236328125, 0.19925782775878906, 0.19728346252441406]",tokens/s,1298.8554928798947,kWh,5.804917726633897e-06,6.401814714006342e-07,3.830460580705801e-06,1.0275559778740331e-05,tokens/kWh,24913484.570412643,MB,4457.79968,4844.290048,0.0,4429.185024,4373.844992,s,10,18.095225463867187,1.809522546386719,0.009216393917116204,1.8056864013671876,1.8201615112304688,1.8234003356933592,1.825991395263672,"[1.8029951171875, 1.7980750732421875, 1.8045372314453125, 1.799474853515625, 1.8036649169921875, 1.82663916015625, 1.8194417724609375, 1.81436328125, 1.819198486328125, 1.8068355712890625]",tokens/s,34.81581377684368,kWh,5.268680201961281e-05,5.809376723954254e-06,3.465118131569406e-05,9.314736005926111e-05,tokens/kWh,676347.6706148074,,s,630,18.09113057518008,0.028716080278063577,0.0004462183527656419,0.028671728134155274,0.029064616775512697,0.029322348499298094,0.030622859077453622,"[0.02946393585205078, 0.028597375869750977, 0.028632736206054686, 0.028476608276367187, 0.028307424545288087, 0.028375648498535157, 0.02865567970275879, 0.0285546875, 0.028598976135253907, 0.02841334342956543, 0.02822547149658203, 0.028336959838867186, 0.02817433547973633, 0.028209375381469726, 0.02847427177429199, 0.02879372787475586, 0.028568927764892577, 0.028551136016845703, 0.028836511611938478, 0.028641536712646486, 0.0288305606842041, 0.028593088150024416, 0.028494976043701173, 0.02848761558532715, 0.028427200317382814, 0.029197471618652344, 0.02913190460205078, 0.02892361640930176, 0.028579200744628906, 0.028725183486938477, 0.02864303970336914, 0.02860700798034668, 0.029428159713745117, 0.028729343414306642, 0.029278207778930664, 0.028614656448364258, 0.02866156768798828, 0.028747648239135743, 0.02881977653503418, 0.029207807540893554, 0.02878335952758789, 0.02889727973937988, 0.028888639450073243, 0.02876255989074707, 0.028511520385742187, 0.028545759201049806, 0.02853887939453125, 0.028360383987426758, 0.02836307144165039, 0.028266496658325195, 0.028262367248535158, 0.028227615356445312, 0.028294303894042968, 0.02825916862487793, 0.028317888259887694, 0.028462688446044923, 0.02836092758178711, 0.02865670394897461, 0.0290129280090332, 0.02864134407043457, 0.02837411117553711, 0.028326751708984375, 0.028309503555297853, 0.03039289665222168, 0.028694528579711914, 0.028463104248046874, 0.02839049530029297, 0.028502527236938476, 0.028312063217163085, 0.028385183334350587, 0.028317792892456055, 0.028370431900024414, 0.028424575805664064, 0.02823129653930664, 0.028213663101196287, 0.02826406478881836, 0.02833036804199219, 0.028399072647094726, 0.028469791412353517, 0.0285614070892334, 0.028442623138427735, 0.02832383918762207, 0.028264448165893553, 0.02838118362426758, 0.028246015548706056, 0.02838118362426758, 0.0286167049407959, 0.031107072830200196, 0.029613183975219726, 0.029070175170898438, 0.02862918472290039, 0.02831955146789551, 0.028676000595092774, 0.02851443290710449, 0.028721151351928712, 0.028803071975708007, 0.02850201606750488, 0.028917760848999025, 0.02864681625366211, 0.028389055252075194, 0.02851468849182129, 0.02830771255493164, 0.028247392654418946, 0.028416959762573243, 0.028494943618774415, 0.02839369583129883, 0.02845977592468262, 0.028514015197753907, 0.028352319717407228, 0.028303775787353515, 0.02845280075073242, 0.028495935440063475, 0.028293119430541993, 0.02828825569152832, 0.028271360397338866, 0.028430335998535155, 0.02816409683227539, 0.028172128677368163, 0.028340608596801757, 0.028309280395507813, 0.028880704879760744, 0.02835196876525879, 0.028601055145263673, 0.028315071105957032, 0.02835103988647461, 0.028338176727294922, 0.0283907527923584, 0.02832057571411133, 0.0284117431640625, 0.028726879119873046, 0.028288543701171873, 0.02821209526062012, 0.028274879455566407, 0.028531776428222657, 0.028467615127563475, 0.02839353561401367, 0.0282258243560791, 0.028450111389160156, 0.028340991973876954, 0.02824287986755371, 0.028247039794921876, 0.028216320037841795, 0.0283637752532959, 0.02853887939453125, 0.028434431076049805, 0.028397535324096678, 0.02870582389831543, 0.02851737594604492, 0.02879897689819336, 0.028670976638793946, 0.028803712844848634, 0.028600032806396485, 0.028760480880737304, 0.029258207321166994, 0.028673824310302735, 0.028509504318237306, 0.028517248153686524, 0.028446527481079103, 0.02856755256652832, 0.028437728881835937, 0.028658464431762697, 0.0290119686126709, 0.02841596794128418, 0.028635103225708006, 0.02850003242492676, 0.028516351699829103, 0.03160883140563965, 0.02870444869995117, 0.029512351989746093, 0.028698272705078125, 0.028882272720336916, 0.029406015396118163, 0.029026496887207032, 0.02852739143371582, 0.028696575164794923, 0.02853977584838867, 0.028505311965942384, 0.02843507194519043, 0.028551071166992188, 0.028824960708618164, 0.028486528396606446, 0.02876006317138672, 0.029655040740966795, 0.02857164764404297, 0.028524415969848633, 0.028538591384887697, 0.02835830307006836, 0.0284003849029541, 0.02852835273742676, 0.029204544067382814, 0.02853071975708008, 0.028554271697998047, 0.02846774482727051, 0.02958582305908203, 0.02838025665283203, 0.02850864028930664, 0.028477888107299804, 0.02855731201171875, 0.028595455169677736, 0.02876406478881836, 0.028566368103027345, 0.028516319274902342, 0.028624927520751953, 0.028360992431640624, 0.028357887268066408, 0.028391071319580078, 0.028435264587402344, 0.028445920944213866, 0.028449567794799804, 0.02860032081604004, 0.028438528060913085, 0.02835251235961914, 0.028803071975708007, 0.028645376205444335, 0.028493824005126952, 0.028485343933105468, 0.028391712188720702, 0.028266496658325195, 0.028317695617675782, 0.028227584838867188, 0.0282739200592041, 0.028555103302001953, 0.02833500862121582, 0.028684288024902343, 0.028478944778442383, 0.029341344833374024, 0.028711519241333007, 0.02865385627746582, 0.02831155204772949, 0.02836275291442871, 0.02840707206726074, 0.028357343673706056, 0.02859769630432129, 0.028717119216918944, 0.028579744338989257, 0.02871766471862793, 0.028405567169189454, 0.0286680965423584, 0.02852457618713379, 0.02841609573364258, 0.02840972709655762, 0.028483583450317384, 0.028345439910888674, 0.028418560028076172, 0.028495296478271485, 0.028635744094848634, 0.028541311264038086, 0.0287127685546875, 0.02874998474121094, 0.028667488098144532, 0.028762432098388673, 0.029046911239624024, 0.029714431762695313, 0.028680192947387696, 0.028563360214233398, 0.02875014305114746, 0.02873107147216797, 0.02875811195373535, 0.028270591735839845, 0.028360000610351564, 0.028693119049072267, 0.02865567970275879, 0.028365888595581056, 0.028490688323974608, 0.028629024505615233, 0.028375072479248045, 0.02844281578063965, 0.028294912338256838, 0.028276063919067382, 0.028395904541015624, 0.02830364799499512, 0.028328224182128905, 0.0284498233795166, 0.02837936019897461, 0.028239519119262695, 0.02841254425048828, 0.02866921615600586, 0.02868899154663086, 0.029253952026367186, 0.029099872589111328, 0.02871721649169922, 0.028919807434082033, 0.028645376205444335, 0.028588031768798827, 0.02857164764404297, 0.02919424057006836, 0.029081472396850584, 0.02876006317138672, 0.02868176078796387, 0.028612831115722655, 0.0285166072845459, 0.028661888122558595, 0.028618751525878908, 0.028641376495361328, 0.028471200942993165, 0.02851430320739746, 0.02884739112854004, 0.02885215950012207, 0.028936479568481447, 0.028941919326782226, 0.028930976867675783, 0.028676095962524413, 0.028491584777832032, 0.028354719161987306, 0.0283156795501709, 0.028254207611083985, 0.028252063751220705, 0.0283853759765625, 0.028294431686401368, 0.028334815979003905, 0.028254207611083985, 0.02821286392211914, 0.028268896102905273, 0.030709632873535155, 0.028592287063598634, 0.02954745674133301, 0.02903366470336914, 0.029096799850463866, 0.028909536361694337, 0.028846080780029298, 0.028722463607788087, 0.028732128143310547, 0.02898044776916504, 0.028936895370483398, 0.028847328186035158, 0.028789344787597655, 0.028790271759033204, 0.028865312576293944, 0.028915712356567383, 0.02892799949645996, 0.02905404853820801, 0.028973983764648437, 0.029097984313964844, 0.028870176315307618, 0.03042937660217285, 0.0290982723236084, 0.02897667121887207, 0.028738016128540038, 0.02886649513244629, 0.028741695404052733, 0.029265920639038087, 0.030373888015747072, 0.028976991653442384, 0.028884864807128905, 0.028916000366210937, 0.028855327606201173, 0.028844255447387696, 0.029031167984008788, 0.02911027145385742, 0.028948480606079102, 0.029654048919677736, 0.028988384246826173, 0.028959808349609376, 0.028928640365600587, 0.02877471923828125, 0.028839935302734376, 0.028734975814819336, 0.02876025581359863, 0.028672319412231445, 0.028821504592895508, 0.02919628715515137, 0.028880863189697265, 0.028960800170898436, 0.028829696655273438, 0.0288372802734375, 0.029014303207397462, 0.029053247451782227, 0.0288985595703125, 0.028684480667114258, 0.028813888549804687, 0.029005823135375978, 0.028770303726196288, 0.028886688232421874, 0.029356351852416994, 0.029074495315551757, 0.029002944946289064, 0.028837663650512695, 0.02875596809387207, 0.029176959991455077, 0.0289102725982666, 0.028779775619506835, 0.028867328643798828, 0.028852224349975586, 0.028702720642089844, 0.028662879943847655, 0.02873027229309082, 0.028682239532470705, 0.02931622314453125, 0.028733888626098634, 0.028671743392944336, 0.028705184936523437, 0.02878499221801758, 0.028729280471801757, 0.028719104766845704, 0.028821504592895508, 0.028762111663818358, 0.029040639877319335, 0.028870655059814454, 0.028637184143066406, 0.028683296203613283, 0.028627935409545897, 0.03236207962036133, 0.029163936614990234, 0.029327360153198243, 0.0289300479888916, 0.028856512069702148, 0.029063999176025392, 0.028876991271972657, 0.028853183746337892, 0.02893948745727539, 0.028967039108276367, 0.029137439727783203, 0.028686336517333984, 0.02874083137512207, 0.02873219108581543, 0.028763328552246094, 0.029532991409301757, 0.028386976242065428, 0.028381280899047852, 0.028379392623901368, 0.028348159790039063, 0.02875315284729004, 0.02858880043029785, 0.028797183990478516, 0.028788288116455077, 0.028793119430541993, 0.028801183700561523, 0.029014015197753908, 0.02892720031738281, 0.028853023529052734, 0.028848127365112306, 0.028845535278320313, 0.028959007263183595, 0.028701120376586915, 0.028827072143554688, 0.028774784088134765, 0.028867807388305664, 0.028730144500732423, 0.02875801658630371, 0.02878873634338379, 0.028788415908813477, 0.02926019287109375, 0.028880895614624022, 0.028729120254516603, 0.028689952850341798, 0.028791263580322267, 0.028688608169555666, 0.02876006317138672, 0.028657663345336915, 0.028661760330200195, 0.02875961685180664, 0.02863542366027832, 0.028410015106201173, 0.02829516792297363, 0.02838937568664551, 0.02851020812988281, 0.028624895095825196, 0.028722368240356445, 0.028822336196899414, 0.02885139274597168, 0.028838720321655274, 0.02876825523376465, 0.028825439453125, 0.028763551712036133, 0.028822240829467775, 0.02870275115966797, 0.029476863861083984, 0.02872319984436035, 0.028741600036621094, 0.028785919189453123, 0.028705568313598634, 0.02876620864868164, 0.028850080490112305, 0.028966527938842773, 0.028754400253295898, 0.02874777603149414, 0.028751840591430666, 0.028723232269287108, 0.028725248336791992, 0.02967724800109863, 0.030701887130737304, 0.02896281623840332, 0.028759391784667968, 0.028846752166748046, 0.02853273582458496, 0.028602367401123048, 0.028948320388793945, 0.028815488815307617, 0.028778528213500975, 0.028675104141235353, 0.028726112365722655, 0.029071487426757813, 0.02862460708618164, 0.028618816375732423, 0.028627168655395507, 0.028843488693237305, 0.02865206336975098, 0.028712352752685546, 0.02881760025024414, 0.028676511764526368, 0.028690431594848635, 0.028755680084228515, 0.028671712875366212, 0.028666431427001954, 0.02939072036743164, 0.02877903938293457, 0.028631040573120117, 0.028655359268188477, 0.028734848022460936, 0.028542112350463868, 0.02852412796020508, 0.028585792541503906, 0.028514495849609377, 0.028547199249267578, 0.028675680160522462, 0.028690847396850586, 0.028649023056030273, 0.028584384918212892, 0.02855526351928711, 0.028479488372802734, 0.02856345558166504, 0.0285467529296875, 0.028719327926635743, 0.028517663955688475, 0.028737375259399414, 0.028750591278076172, 0.029046079635620118, 0.02876883125305176, 0.02876652717590332, 0.029056800842285156, 0.028755264282226564, 0.02867910385131836, 0.0287825927734375, 0.02875187110900879, 0.02878054428100586, 0.02910380744934082, 0.029097471237182617, 0.033148929595947264, 0.030184255599975587, 0.0293287353515625, 0.029296384811401368, 0.029313919067382812, 0.030017248153686525, 0.02878054428100586, 0.02880143928527832, 0.02874972724914551, 0.028779903411865235, 0.028807680130004884, 0.028630912780761717, 0.02880460739135742, 0.028766687393188477, 0.028808544158935547, 0.028727872848510742, 0.02877894401550293, 0.02861248016357422, 0.028763391494750976, 0.028775232315063477, 0.028738784790039062, 0.028592927932739258, 0.02857779121398926, 0.028839807510375976, 0.028700799942016603, 0.028690431594848635, 0.028761632919311525, 0.02891823959350586, 0.028639232635498047, 0.028590080261230468, 0.029496448516845703, 0.029002111434936525, 0.02890729522705078, 0.02914179229736328, 0.028856576919555663, 0.028772031784057617, 0.02856755256652832, 0.028886911392211913, 0.028852256774902343, 0.028967008590698243, 0.029059328079223633, 0.02930240058898926, 0.028999807357788086, 0.028893503189086914, 0.028724128723144532, 0.028580640792846678, 0.028507808685302734, 0.02856585693359375, 0.02837299156188965, 0.028329984664916992, 0.02833612823486328, 0.028392704010009765, 0.028525312423706053, 0.028495199203491212, 0.028719743728637694, 0.028446752548217772, 0.028688480377197265, 0.028498111724853517, 0.02846870422363281, 0.028242176055908202, 0.028245023727416992, 0.02863532829284668, 0.02858255958557129, 0.028450431823730468, 0.028386911392211913, 0.028334911346435548, 0.028359039306640625, 0.028300287246704102, 0.029175968170166017, 0.028461631774902345, 0.02844198417663574, 0.02838387107849121, 0.028848127365112306, 0.02879283142089844, 0.028903423309326173, 0.029051935195922852, 0.028879615783691408, 0.028528863906860352, 0.028431392669677733, 0.028400224685668947, 0.028399999618530274, 0.028471296310424804, 0.028379135131835938, 0.02832383918762207, 0.028290143966674806, 0.02826684761047363, 0.028320320129394533, 0.028413951873779295, 0.030315967559814454, 0.030834335327148438, 0.028619680404663086, 0.028395647048950194, 0.028331775665283204]",tokens/s,34.82369426177943,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4410.59328,4707.975168,0.0,4305.453056,4304.491008,s,1,10.421193359375,10.421193359375,0.0,10.421193359375,10.421193359375,10.421193359375,10.421193359375,[10.421193359375],,kWh,9.875442427084332e-05,1.0885916664281946e-05,3.108863598200173e-05,0.000140728976917127,,MB,4271.702016,4829.609984,0.0,4412.407808,4373.842432,s,10,1.9635379180908201,0.19635379180908202,0.0005258069885088617,0.1964731216430664,0.1968551834106445,0.19693533554077147,0.19699945724487306,"[0.19682847595214845, 0.1953652801513672, 0.19582838439941405, 0.19660870361328125, 0.19615023803710938, 0.19683737182617186, 0.19633753967285156, 0.1967821502685547, 0.19578428649902344, 0.19701548767089844]",tokens/s,1303.7690672605547,kWh,5.797203101552838e-06,6.393265815337422e-07,3.8559889235683555e-06,1.0292518606654937e-05,tokens/kWh,24872434.99705461,MB,4281.012224,4844.290048,0.0,4429.185024,4373.844992,s,10,20.827313964843754,2.082731396484375,0.29721883058390564,2.089891540527344,2.3863911132812503,2.3877281494140625,2.3887977783203125,"[2.370843505859375, 2.37403466796875, 2.386093994140625, 2.378898193359375, 2.389065185546875, 1.8089395751953126, 1.783486572265625, 1.78332177734375, 1.775119140625, 1.7775113525390625]",tokens/s,30.248739758925815,kWh,6.056855987594817e-05,6.680083376092295e-06,3.893257308503296e-05,0.00010618121633707344,tokens/kWh,593325.2808105514,,s,630,20.824298826217653,0.03305444258129786,0.004756487799541903,0.03701318359375,0.03809029388427734,0.03827253875732422,0.03943698825836183,"[0.03786547088623047, 0.03763814544677734, 0.03753779220581055, 0.03745177459716797, 0.037787647247314454, 0.037314239501953124, 0.03743388748168945, 0.03732601547241211, 0.03728140640258789, 0.03735446548461914, 0.037294078826904296, 0.03783683013916016, 0.03791254425048828, 0.03805759811401367, 0.03802355194091797, 0.037967201232910155, 0.03782624053955078, 0.03864675140380859, 0.03816812896728516, 0.038021568298339845, 0.03821526336669922, 0.03919692611694336, 0.03794291305541992, 0.03755062484741211, 0.03750883102416992, 0.037251232147216796, 0.037400703430175784, 0.03747443389892578, 0.03750428771972656, 0.037706336975097655, 0.03740585708618164, 0.037619873046875, 0.03742380905151367, 0.03741491317749023, 0.03749478530883789, 0.037455646514892575, 0.03750048065185547, 0.03780207824707031, 0.037298465728759764, 0.037488929748535155, 0.03743743896484375, 0.03781596755981445, 0.03772041702270508, 0.037466110229492186, 0.03760108947753906, 0.037437633514404295, 0.03728339385986328, 0.03953504180908203, 0.039732673645019534, 0.037795841217041014, 0.0372374382019043, 0.03716080093383789, 0.037027168273925784, 0.03711660766601563, 0.03717529678344727, 0.03730979156494141, 0.0370797119140625, 0.037292030334472655, 0.03717529678344727, 0.037107681274414064, 0.03742723083496094, 0.037197662353515626, 0.03699919891357422, 0.03759462356567383, 0.03716339111328125, 0.037406177520751954, 0.03751103973388672, 0.037048286437988284, 0.03732342529296875, 0.03714678573608399, 0.037101566314697264, 0.03742323303222656, 0.0371671028137207, 0.0372017936706543, 0.03714572906494141, 0.0374257926940918, 0.037290271759033204, 0.03736163330078125, 0.037410655975341794, 0.037226657867431644, 0.03742281723022461, 0.0373455696105957, 0.037209407806396484, 0.03733369445800781, 0.03722256088256836, 0.03708671951293945, 0.037196128845214844, 0.037104736328125, 0.03716188812255859, 0.03739033508300781, 0.038136993408203125, 0.037731040954589845, 0.038056095123291014, 0.03779955291748047, 0.03779619216918945, 0.03809280014038086, 0.03817881774902344, 0.03790419387817383, 0.03824415969848633, 0.03805427169799805, 0.03787980651855469, 0.03783852767944336, 0.03782451248168945, 0.03813558578491211, 0.03821622467041016, 0.037953536987304685, 0.038182849884033206, 0.038260799407958984, 0.03827711868286133, 0.03814604949951172, 0.03805184173583984, 0.03781024169921875, 0.03765580749511719, 0.03762041473388672, 0.0376360969543457, 0.03781017684936523, 0.037522750854492186, 0.03754207992553711, 0.04001228713989258, 0.03809001541137695, 0.03794515228271484, 0.03779062271118164, 0.037978111267089845, 0.03769651031494141, 0.03752576065063477, 0.03782118225097656, 0.037974689483642576, 0.037806079864501956, 0.03781619262695313, 0.037602432250976564, 0.037663806915283204, 0.038295745849609375, 0.0383377914428711, 0.03789875030517578, 0.03797372817993164, 0.038018943786621094, 0.037775295257568356, 0.03768368148803711, 0.03836502456665039, 0.039542465209960936, 0.041674686431884767, 0.037959327697753904, 0.03765321731567383, 0.0377017936706543, 0.037676288604736326, 0.03810153579711914, 0.037928256988525394, 0.037553054809570316, 0.0377341423034668, 0.0375048942565918, 0.03885055923461914, 0.03757696151733399, 0.03793916702270508, 0.038058143615722656, 0.03823321533203125, 0.038088768005371094, 0.03808748626708985, 0.03768320083618164, 0.037713920593261716, 0.03814329528808594, 0.038129375457763674, 0.03881833648681641, 0.03828531265258789, 0.038950782775878906, 0.03822463989257813, 0.03785299301147461, 0.038438175201416014, 0.037862239837646486, 0.038801280975341794, 0.03747635269165039, 0.03747430419921875, 0.03733110427856445, 0.037257057189941406, 0.037416961669921874, 0.037492256164550784, 0.037302753448486325, 0.03721830368041992, 0.037230209350585936, 0.03723836898803711, 0.037280544281005856, 0.03751289749145508, 0.037079296112060546, 0.03715488052368164, 0.03721567916870117, 0.03708121490478516, 0.03734764862060547, 0.03731232070922851, 0.0372165756225586, 0.03719347381591797, 0.03834614562988281, 0.03819785690307617, 0.03789801788330078, 0.037402847290039065, 0.03768729782104492, 0.0376866569519043, 0.038163070678710935, 0.03853251266479492, 0.03847433471679688, 0.03804374313354492, 0.03762371063232422, 0.03751116943359375, 0.03751702499389648, 0.037424671173095704, 0.03738291168212891, 0.03730031967163086, 0.03776835250854492, 0.03786419296264648, 0.037555999755859375, 0.03742041778564453, 0.037415775299072265, 0.0377262077331543, 0.03828326416015625, 0.038254398345947266, 0.03836348724365234, 0.03851772689819336, 0.0379153938293457, 0.03778575897216797, 0.037791713714599606, 0.03754179382324219, 0.03738428878784179, 0.03735551834106445, 0.03736371231079102, 0.037359169006347656, 0.03734163284301758, 0.037644256591796876, 0.03825398254394531, 0.03755286407470703, 0.03757660675048828, 0.03750003051757812, 0.03819510269165039, 0.03748124694824219, 0.03732092666625977, 0.03744764709472656, 0.037722110748291016, 0.037560417175292966, 0.03754998397827149, 0.03751107025146484, 0.03773564910888672, 0.03766563034057617, 0.03754576110839844, 0.037814529418945315, 0.03751523208618164, 0.04087811279296875, 0.037625694274902345, 0.037900062561035154, 0.0374329605102539, 0.03771775817871094, 0.03727052688598633, 0.037643871307373046, 0.037454238891601564, 0.03749068832397461, 0.03740671920776367, 0.03812457656860352, 0.03775382232666016, 0.037463871002197266, 0.03727788925170898, 0.0373737907409668, 0.03739254379272461, 0.03751321411132812, 0.037528705596923825, 0.03743420791625977, 0.03770479965209961, 0.03774496078491211, 0.03773308944702149, 0.03781798553466797, 0.038580703735351565, 0.039567169189453126, 0.037996543884277346, 0.037689342498779296, 0.037763072967529294, 0.03783814239501953, 0.03793142318725586, 0.038131839752197264, 0.03773468780517578, 0.03785820770263672, 0.03840918350219726, 0.03825254440307617, 0.038335742950439455, 0.03826748657226563, 0.03824873733520508, 0.03807424163818359, 0.03792099380493164, 0.03808540725708008, 0.03809516906738281, 0.03808121490478516, 0.038265918731689455, 0.038238849639892575, 0.03812313461303711, 0.03857888031005859, 0.03794124984741211, 0.03773583984375, 0.03831254577636719, 0.03827667236328125, 0.03876499176025391, 0.03787558364868164, 0.03792268753051758, 0.037818622589111325, 0.037773311614990236, 0.03792844772338867, 0.03793561553955078, 0.037863040924072264, 0.037882080078125, 0.03793116760253906, 0.037599071502685544, 0.037654689788818356, 0.037738494873046875, 0.0377704963684082, 0.03755219268798828, 0.037562175750732424, 0.03769232177734375, 0.03766032028198242, 0.03780230331420899, 0.037803680419921874, 0.037513633728027344, 0.03751222229003906, 0.03813792037963867, 0.028981311798095703, 0.02886854362487793, 0.028608352661132812, 0.029390335083007812, 0.02878704071044922, 0.028376991271972657, 0.028670656204223634, 0.028315359115600586, 0.028471200942993165, 0.028643423080444336, 0.028556480407714843, 0.02881577682495117, 0.02863350486755371, 0.02854911994934082, 0.028434431076049805, 0.029191871643066407, 0.028456703186035156, 0.028338720321655273, 0.02838479995727539, 0.02828748893737793, 0.02839756774902344, 0.0282541446685791, 0.028473407745361327, 0.028688383102416993, 0.028305408477783203, 0.02841788864135742, 0.029312288284301758, 0.02853923225402832, 0.028479839324951173, 0.028175615310668947, 0.028766624450683592, 0.028636863708496094, 0.028402528762817382, 0.02885203170776367, 0.0285861759185791, 0.028696575164794923, 0.028804447174072264, 0.02852931213378906, 0.02852249526977539, 0.02833203125, 0.028130495071411132, 0.028136255264282227, 0.028182079315185547, 0.0282293758392334, 0.02869264030456543, 0.028605056762695313, 0.028682336807250977, 0.028611711502075195, 0.028436672210693358, 0.029339935302734373, 0.028641183853149413, 0.028082496643066408, 0.02820089530944824, 0.028155967712402342, 0.028296192169189452, 0.02810982322692871, 0.028357919692993165, 0.028402399063110352, 0.028276575088500976, 0.02830761528015137, 0.0288536319732666, 0.029827327728271485, 0.028917856216430664, 0.02845827293395996, 0.028403520584106445, 0.028291999816894533, 0.02863225555419922, 0.028111679077148437, 0.02815385627746582, 0.02804694366455078, 0.027911712646484375, 0.027971519470214843, 0.027943872451782228, 0.028023839950561524, 0.02800326347351074, 0.02806524848937988, 0.02802364730834961, 0.028178144454956054, 0.02899692726135254, 0.02858857536315918, 0.028417312622070312, 0.028232576370239258, 0.02830486488342285, 0.028355104446411133, 0.02826268768310547, 0.028237632751464844, 0.02819424057006836, 0.02813385581970215, 0.028712160110473634, 0.029105247497558592, 0.03048819160461426, 0.028235776901245117, 0.02820102310180664, 0.028184576034545897, 0.028321247100830078, 0.028160543441772462, 0.029286399841308593, 0.028583904266357422, 0.028223520278930665, 0.028224863052368165, 0.028114816665649415, 0.028211839675903322, 0.028680351257324217, 0.02834748840332031, 0.02810563278198242, 0.028073984146118162, 0.028137279510498048, 0.028082368850708007, 0.028213151931762694, 0.028282880783081055, 0.028204736709594728, 0.02807644844055176, 0.028198623657226564, 0.027959583282470703, 0.029050655364990234, 0.028053728103637696, 0.028069952011108398, 0.028001440048217772, 0.028230016708374023, 0.028166336059570314, 0.02814588737487793, 0.028279935836791992, 0.028033824920654298, 0.02799420738220215, 0.02790729522705078, 0.028239679336547852, 0.028242143630981445, 0.029321151733398436, 0.03182723236083984, 0.02815683174133301, 0.028481536865234375, 0.02840575981140137, 0.02811248016357422, 0.02816655921936035, 0.02808367919921875, 0.027922975540161134, 0.02800230407714844, 0.02795699119567871, 0.02809878349304199, 0.02798918342590332, 0.02797654342651367, 0.028056991577148437, 0.02817084884643555, 0.028124704360961914, 0.02812928009033203, 0.027894111633300783, 0.028104448318481447, 0.028040864944458007, 0.02808835220336914, 0.028680896759033202, 0.02937779235839844, 0.030400447845458985, 0.02867897605895996, 0.030002496719360353, 0.02848124885559082, 0.028387615203857422, 0.028428991317749022, 0.028173887252807617, 0.028092863082885743, 0.028004352569580077, 0.028735488891601563, 0.028133119583129883, 0.02803536033630371, 0.027973600387573242, 0.027981407165527345, 0.027939039230346678, 0.02840595245361328, 0.02827231979370117, 0.028141536712646485, 0.028023136138916015, 0.02812723159790039, 0.028003328323364256, 0.02788844871520996, 0.02800624084472656, 0.02793641662597656, 0.0279266242980957, 0.027965408325195312, 0.027972223281860352, 0.02790399932861328, 0.027963392257690428, 0.02788764762878418, 0.027897823333740236, 0.028028928756713867, 0.02786463928222656, 0.028138111114501953, 0.028294143676757814, 0.029105279922485353, 0.028184255599975585, 0.02822428894042969, 0.02850364875793457, 0.02816860771179199, 0.028108800888061523, 0.027951135635375976, 0.0280185604095459, 0.02798396873474121, 0.028461055755615236, 0.028227584838867188, 0.028159744262695314, 0.02806403160095215, 0.028056831359863282, 0.02805740737915039, 0.02814454460144043, 0.028280832290649413, 0.027967487335205078, 0.02799331283569336, 0.027939456939697266, 0.028022079467773436, 0.027911008834838866, 0.027926240921020508, 0.02803331184387207, 0.02802899169921875, 0.02797177505493164, 0.028251903533935547, 0.028304927825927733, 0.028100128173828124, 0.027960384368896484, 0.028060735702514647, 0.02823456001281738, 0.028012544631958007, 0.028045536041259766, 0.02870249557495117, 0.02996633529663086, 0.028366464614868164, 0.02827017593383789, 0.028384159088134766, 0.028190591812133788, 0.028045183181762696, 0.028022911071777342, 0.028053279876708984, 0.027922655105590822, 0.028016096115112306, 0.028518335342407226, 0.028123680114746093, 0.028299583435058593, 0.028239679336547852, 0.02828486442565918, 0.028219392776489258, 0.02812668800354004, 0.0281646728515625, 0.02806492805480957, 0.028119871139526367, 0.028176128387451174, 0.027983264923095705, 0.027982816696166993, 0.0281409912109375, 0.028002048492431642, 0.028127552032470703, 0.028006784439086913, 0.028008447647094727, 0.028016319274902345, 0.029122880935668945, 0.028511871337890626, 0.02865760040283203, 0.028033439636230468, 0.028039327621459963, 0.028041088104248046, 0.027917631149291994, 0.028025535583496092, 0.028862079620361327, 0.0281298885345459, 0.028882080078125, 0.028179071426391603, 0.02813337516784668, 0.027914047241210938, 0.028233983993530273, 0.02805894470214844, 0.02806028747558594, 0.02814499282836914, 0.02820924758911133, 0.028109119415283202, 0.028084672927856446, 0.02851820755004883, 0.02822332763671875, 0.02814771270751953, 0.027959455490112306, 0.028092479705810545, 0.02817024040222168, 0.02817817687988281, 0.028242111206054688, 0.02817228889465332, 0.027925695419311523, 0.028266336441040037, 0.02812131118774414, 0.027976448059082032, 0.028032320022583008, 0.028529344558715822, 0.028733407974243164, 0.028162303924560546, 0.02892166328430176, 0.028196575164794922, 0.028007999420166015, 0.02799411201477051, 0.028050111770629882, 0.02816636848449707, 0.028213024139404297, 0.02872150421142578, 0.028657312393188476, 0.02830486488342285, 0.028105247497558595, 0.028090208053588868, 0.02826051139831543, 0.028151487350463866, 0.028041759490966798, 0.02804128074645996, 0.028003168106079102, 0.02805235290527344, 0.028135391235351564, 0.02811292839050293, 0.028004352569580077, 0.02804047966003418, 0.028090560913085937, 0.028129823684692384, 0.028917760848999025, 0.02812723159790039]",tokens/s,30.253119457104326,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4399.464448,4707.975168,0.0,4305.453056,4304.491008,s,1,10.4375556640625,10.4375556640625,0.0,10.4375556640625,10.4375556640625,10.4375556640625,10.4375556640625,[10.4375556640625],,kWh,0.00010035908481667473,1.1062763752588356e-05,3.16041919499882e-05,0.0001430260405192513,,MB,4333.52704,4829.609984,0.0,4412.407808,4373.842432,s,10,1.971469039916992,0.1971469039916992,0.0012051284099117738,0.1968300094604492,0.19905128784179688,0.19922076568603517,0.1993563479614258,"[0.19614028930664062, 0.19901362609863282, 0.19662838745117187, 0.19564703369140626, 0.195801025390625, 0.19939024353027343, 0.19699551391601564, 0.19765420532226563, 0.1966645050048828, 0.19753421020507814]",tokens/s,1298.5240691924778,kWh,5.804839304901595e-06,6.400014898219934e-07,3.853085871137283e-06,1.0297926665860872e-05,tokens/kWh,24859372.988999557,MB,4342.403072,4846.3872,0.0,4429.185024,4373.844992,s,10,18.24494299316406,1.824494299316406,0.02397269952250489,1.810436096191406,1.8609276000976562,1.8610742126464843,1.8611915026855468,"[1.8047381591796876, 1.8015810546875, 1.807452392578125, 1.810733154296875, 1.848447509765625, 1.799188232421875, 1.86089501953125, 1.8612208251953124, 1.840547607421875, 1.8101390380859375]",tokens/s,34.530116111409384,kWh,5.2653107582178747e-05,5.806195810555981e-06,3.468183384566244e-05,9.31411372383972e-05,tokens/kWh,676392.8578491568,,s,630,18.241828424453722,0.02895528321341863,0.0006153523640659173,0.028787663459777832,0.02968755531311035,0.029953547477722168,0.031324677066802996,"[0.028867103576660155, 0.028528640747070313, 0.028407808303833007, 0.02839347267150879, 0.028429759979248046, 0.028910144805908203, 0.028788543701171874, 0.02867219161987305, 0.028676095962524413, 0.028669952392578125, 0.02856345558166504, 0.028636320114135742, 0.02854956817626953, 0.028741056442260743, 0.028617599487304687, 0.029266016006469726, 0.028897151947021485, 0.028678272247314455, 0.028712959289550782, 0.028485631942749022, 0.02855276870727539, 0.02860691261291504, 0.02909334373474121, 0.02856582450866699, 0.028478944778442383, 0.028762880325317382, 0.028475391387939454, 0.028620128631591798, 0.02847577667236328, 0.0286395206451416, 0.028519487380981444, 0.028593088150024416, 0.028604415893554686, 0.028706111907958985, 0.028803775787353516, 0.028721151351928712, 0.028675968170166016, 0.028708992004394532, 0.028430335998535155, 0.028403711318969727, 0.028384319305419924, 0.028488000869750976, 0.0284268798828125, 0.028725248336791992, 0.028602367401123048, 0.02859401512145996, 0.02847145652770996, 0.028495136260986327, 0.028520448684692383, 0.028345056533813476, 0.02848089599609375, 0.028967552185058594, 0.028716543197631835, 0.028725120544433595, 0.02927804756164551, 0.029005983352661132, 0.028965503692626952, 0.028725248336791992, 0.028686336517333984, 0.028564767837524416, 0.02845529556274414, 0.028455263137817384, 0.028440576553344726, 0.029288127899169923, 0.02874809646606445, 0.028708864212036132, 0.028606464385986328, 0.028416000366210937, 0.02841379165649414, 0.02828303909301758, 0.02841747283935547, 0.028420223236083984, 0.028398015975952148, 0.02830681610107422, 0.02839206314086914, 0.02833203125, 0.02845827293395996, 0.028333824157714845, 0.028696863174438477, 0.02849247932434082, 0.028493824005126952, 0.02839094352722168, 0.028250656127929687, 0.028288192749023437, 0.028209856033325195, 0.028266559600830077, 0.02828451156616211, 0.028314016342163087, 0.029001728057861328, 0.028804384231567382, 0.02937446403503418, 0.02861129570007324, 0.02881331253051758, 0.028571359634399413, 0.028530975341796876, 0.028423967361450194, 0.028428064346313477, 0.028501632690429688, 0.02848627281188965, 0.02835465621948242, 0.029001792907714843, 0.028972223281860353, 0.028760927200317383, 0.028712959289550782, 0.028641279220581056, 0.03062579154968262, 0.028839935302734376, 0.028802175521850586, 0.028668800354003907, 0.028487136840820312, 0.02859257507324219, 0.028510303497314454, 0.028553216934204102, 0.028565216064453124, 0.028557600021362303, 0.02867196846008301, 0.02852854347229004, 0.028646560668945314, 0.028570592880249022, 0.028833696365356445, 0.028462207794189454, 0.02853167915344238, 0.028448768615722656, 0.028379135131835938, 0.0283951358795166, 0.028404096603393554, 0.029393632888793944, 0.028979103088378907, 0.028816543579101562, 0.028590496063232423, 0.02849990463256836, 0.028578208923339843, 0.02878678321838379, 0.030971904754638672, 0.03189555168151856, 0.028735488891601563, 0.02852454376220703, 0.028473344802856446, 0.028687583923339845, 0.02870147132873535, 0.028708864212036132, 0.02936953544616699, 0.02892678451538086, 0.028663808822631837, 0.02845891189575195, 0.028471391677856447, 0.028435455322265626, 0.028326400756835936, 0.028352224349975585, 0.028259103775024413, 0.028225215911865234, 0.02837500762939453, 0.028590431213378908, 0.028631040573120117, 0.028598079681396483, 0.028509952545166015, 0.028436735153198243, 0.02840390396118164, 0.028426015853881836, 0.028553247451782227, 0.02850009536743164, 0.029129888534545897, 0.028666784286499023, 0.028659711837768553, 0.028739551544189453, 0.028606464385986328, 0.028614688873291015, 0.028616416931152345, 0.02865974426269531, 0.02871446418762207, 0.028646175384521484, 0.028512256622314453, 0.028455936431884765, 0.028559711456298827, 0.028560031890869142, 0.030713855743408205, 0.028560735702514647, 0.028310176849365234, 0.02830335998535156, 0.02834828758239746, 0.028250240325927736, 0.028221439361572266, 0.028387327194213868, 0.028448768615722656, 0.02834668731689453, 0.028325567245483397, 0.028254079818725585, 0.028496000289916994, 0.028207103729248048, 0.02856959915161133, 0.029476127624511718, 0.02849865531921387, 0.028464319229125977, 0.028369728088378905, 0.03296460723876953, 0.02868796730041504, 0.02857187271118164, 0.028368576049804688, 0.02827724838256836, 0.02829516792297363, 0.02829516792297363, 0.028474624633789063, 0.02848771286010742, 0.028278783798217775, 0.028773088455200196, 0.02838118362426758, 0.028317695617675782, 0.02842736053466797, 0.02873436737060547, 0.0286167049407959, 0.029022207260131837, 0.02897920036315918, 0.02920147132873535, 0.029088352203369142, 0.02934351921081543, 0.02920467185974121, 0.02914137649536133, 0.02889081573486328, 0.02876448059082031, 0.028833599090576173, 0.02863942337036133, 0.028722848892211914, 0.029094240188598634, 0.029022207260131837, 0.029134143829345704, 0.029608640670776367, 0.029034496307373047, 0.029246944427490235, 0.028813631057739257, 0.028860639572143555, 0.028778495788574218, 0.02859212875366211, 0.028415519714355467, 0.028385759353637696, 0.029047935485839844, 0.028754463195800783, 0.028599679946899412, 0.028382080078125, 0.028347551345825197, 0.028247072219848634, 0.028257600784301756, 0.02830396842956543, 0.028309471130371095, 0.028391456604003905, 0.028431936264038084, 0.028356832504272463, 0.02836092758178711, 0.0286529598236084, 0.028420703887939453, 0.02852659225463867, 0.02831974411010742, 0.028597888946533204, 0.029834112167358397, 0.029027904510498047, 0.029020095825195314, 0.02919375991821289, 0.02965193557739258, 0.031468767166137696, 0.030028575897216796, 0.029812736511230467, 0.029687456130981445, 0.030054271697998045, 0.02975382423400879, 0.029776927947998046, 0.030006240844726563, 0.029783071517944334, 0.02995043182373047, 0.029811199188232423, 0.02963039970397949, 0.02967900848388672, 0.029541023254394533, 0.02963046455383301, 0.029550592422485353, 0.02976358413696289, 0.029453920364379882, 0.029446559906005858, 0.029226144790649413, 0.02980950355529785, 0.029070911407470704, 0.029139392852783202, 0.028786687850952147, 0.02893417549133301, 0.02894339179992676, 0.02896335983276367, 0.028906944274902344, 0.02900681686401367, 0.02888256072998047, 0.028983680725097657, 0.02882476806640625, 0.028949312210083008, 0.02877568054199219, 0.02889731216430664, 0.028836576461791993, 0.0289300479888916, 0.029319168090820313, 0.029027488708496092, 0.02886716842651367, 0.029699583053588868, 0.030522111892700196, 0.02918383979797363, 0.029114528656005858, 0.029321216583251954, 0.02943180847167969, 0.029255680084228516, 0.029177631378173828, 0.030011615753173827, 0.02919625663757324, 0.029159456253051757, 0.02906096076965332, 0.028896575927734376, 0.028744543075561523, 0.028775903701782228, 0.028799423217773436, 0.028612192153930665, 0.02853673553466797, 0.02929280090332031, 0.029048831939697265, 0.02879542350769043, 0.029132799148559572, 0.028878847122192384, 0.028487680435180664, 0.02949692726135254, 0.028442047119140626, 0.028410848617553712, 0.028395423889160155, 0.028321535110473632, 0.028578079223632813, 0.028607648849487306, 0.029234079360961913, 0.028692480087280273, 0.028475040435791014, 0.02831385612487793, 0.02845257568359375, 0.028481311798095703, 0.028470943450927735, 0.028447168350219727, 0.028549631118774413, 0.028269760131835936, 0.02830828857421875, 0.028577728271484373, 0.028489791870117187, 0.028313600540161132, 0.028438528060913085, 0.02840166473388672, 0.028370943069458008, 0.028192768096923827, 0.028280960083007813, 0.028709760665893556, 0.028453535079956054, 0.028434335708618166, 0.02835091209411621, 0.028637184143066406, 0.028810688018798828, 0.028561119079589845, 0.028500415802001952, 0.02844905662536621, 0.028668031692504883, 0.028342271804809572, 0.02827225685119629, 0.028712959289550782, 0.028762496948242188, 0.028540639877319335, 0.028377376556396484, 0.02836467170715332, 0.028272544860839844, 0.02844076728820801, 0.028405792236328126, 0.028359840393066407, 0.02845756721496582, 0.02857804870605469, 0.02835456085205078, 0.028319456100463866, 0.028344415664672853, 0.02846067237854004, 0.02865190315246582, 0.028844064712524414, 0.028870111465454103, 0.02889593505859375, 0.03002572822570801, 0.029845504760742186, 0.029560831069946288, 0.030674943923950194, 0.029752416610717772, 0.03034614372253418, 0.030155935287475587, 0.029735071182250977, 0.02954310417175293, 0.029616128921508788, 0.02975948715209961, 0.029509632110595704, 0.029714431762695313, 0.02995609664916992, 0.02918400001525879, 0.029091840744018556, 0.02899942398071289, 0.02890777587890625, 0.029052671432495118, 0.029228992462158203, 0.02899558448791504, 0.028993471145629883, 0.02875430488586426, 0.029044416427612303, 0.029024576187133787, 0.029029632568359377, 0.029026912689208983, 0.02911609649658203, 0.029105632781982423, 0.02897817611694336, 0.029083648681640626, 0.029076831817626953, 0.028875423431396485, 0.028833791732788085, 0.029009599685668946, 0.029105855941772462, 0.029160064697265627, 0.02920857620239258, 0.02902182388305664, 0.032626880645751956, 0.029483200073242188, 0.029570528030395508, 0.03084137535095215, 0.029885536193847657, 0.03079814338684082, 0.03156595230102539, 0.030149375915527344, 0.030166751861572267, 0.029891616821289064, 0.029710880279541017, 0.02962060737609863, 0.02966329574584961, 0.029710336685180663, 0.029349727630615233, 0.029336992263793944, 0.02938047981262207, 0.029502336502075194, 0.029241344451904298, 0.029390111923217773, 0.029129440307617188, 0.02913030433654785, 0.029110111236572266, 0.029166175842285157, 0.029566976547241212, 0.02931110382080078, 0.029126527786254883, 0.03221196746826172, 0.030569375991821288, 0.029421663284301756, 0.029598848342895508, 0.029195135116577148, 0.029607295989990234, 0.029358720779418944, 0.02933350372314453, 0.03061759948730469, 0.02963046455383301, 0.02974224090576172, 0.02947977638244629, 0.029831167221069335, 0.029808767318725587, 0.029765504837036133, 0.029855424880981446, 0.029720096588134765, 0.029661983489990235, 0.02963046455383301, 0.0295229434967041, 0.02949836730957031, 0.029239295959472656, 0.029249536514282228, 0.029231103897094726, 0.029375743865966798, 0.029111040115356444, 0.029707967758178713, 0.029358400344848632, 0.02928371238708496, 0.029257440567016603, 0.02933852767944336, 0.029171712875366212, 0.02951932716369629, 0.02935215950012207, 0.029140480041503908, 0.029088287353515624, 0.029221151351928713, 0.02902355194091797, 0.02893484878540039, 0.03022233581542969, 0.029242944717407227, 0.029087167739868164, 0.02917852783203125, 0.029169376373291016, 0.030253440856933593, 0.02912076759338379, 0.02954444885253906, 0.029336864471435548, 0.02968844795227051, 0.030347007751464844, 0.029686111450195313, 0.029619199752807617, 0.03000422477722168, 0.029527488708496093, 0.029329984664916993, 0.02935379219055176, 0.02931043243408203, 0.029309663772583008, 0.029241344451904298, 0.02959974479675293, 0.030079231262207032, 0.02937651252746582, 0.02980454444885254, 0.02930998420715332, 0.02921366310119629, 0.0291627197265625, 0.02928825569152832, 0.029318431854248046, 0.02906822395324707, 0.02919705581665039, 0.029097087860107423, 0.02922742462158203, 0.02929302406311035, 0.0293536319732666, 0.029485408782958984, 0.029388799667358398, 0.029506591796875, 0.02938751983642578, 0.029274335861206056, 0.029001247406005858, 0.029182079315185547, 0.029307167053222657, 0.029292415618896485, 0.029345983505249022, 0.029814207077026367, 0.029536832809448244, 0.02967046356201172, 0.0294716796875, 0.029671424865722655, 0.029654304504394532, 0.02948579216003418, 0.029419519424438476, 0.029445280075073244, 0.029293407440185548, 0.029106176376342774, 0.029077152252197265, 0.02912086486816406, 0.029212032318115234, 0.029126399993896483, 0.029049087524414062, 0.029340288162231446, 0.028895231246948243, 0.028903423309326173, 0.02898851203918457, 0.028828128814697266, 0.02881171226501465, 0.02896691131591797, 0.028860416412353516, 0.028985343933105468, 0.02871244812011719, 0.028741952896118163, 0.02904447937011719, 0.029692352294921873, 0.028573055267333985, 0.028422176361083986, 0.028353120803833006, 0.02852889633178711, 0.02842336082458496, 0.029079776763916015, 0.0320904655456543, 0.028661760330200195, 0.0284998722076416, 0.028741024017333985, 0.02921939277648926, 0.028690080642700195, 0.02837539291381836, 0.028370336532592775, 0.028367456436157228, 0.028763839721679688, 0.02862486457824707, 0.028731103897094726, 0.029035135269165038, 0.02880512046813965, 0.02869411277770996, 0.028842048645019533, 0.028743583679199217, 0.029053375244140624, 0.029054975509643553, 0.02898294448852539, 0.029057376861572264, 0.029911039352416992, 0.02894643211364746, 0.02893414306640625, 0.028753919601440428, 0.028846080780029298, 0.028618751525878908, 0.028673919677734375, 0.02867795181274414, 0.028665695190429687, 0.02854550361633301, 0.028811264038085937, 0.02852854347229004, 0.028414047241210938, 0.02883350372314453, 0.02845017623901367, 0.028427167892456053, 0.028488895416259766, 0.028588863372802736, 0.02857164764404297, 0.02861568069458008, 0.028406368255615235, 0.028602239608764648, 0.028512800216674804, 0.03017728042602539, 0.028383232116699218, 0.028391616821289063, 0.028335264205932617, 0.02840028762817383, 0.028499488830566407, 0.028516159057617188, 0.028576416015625, 0.028604415893554686, 0.028669567108154298, 0.028641279220581056, 0.028799360275268554, 0.028767776489257813, 0.02865740776062012, 0.028692672729492188, 0.028656160354614258, 0.02871049690246582, 0.028723615646362305, 0.028628992080688476, 0.028724895477294923, 0.02898099136352539, 0.029399648666381836, 0.028675680160522462]",tokens/s,34.53601170568327,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4415.11936,4707.975168,0.0,4305.453056,4304.491008,s,1,11.59986328125,11.59986328125,0.0,11.59986328125,11.59986328125,11.59986328125,11.59986328125,[11.59986328125],,kWh,0.0001269411279208043,1.399500317815278e-05,3.98894763559976e-05,0.00018082560745495468,,MB,4448.31744,4829.609984,0.0,4412.407808,4373.842432,s,10,1.9623368682861329,0.1962336868286133,0.0006252823265912817,0.19620695495605467,0.19687013854980467,0.19715223999023437,0.19737792114257813,"[0.19581907653808595, 0.19525216674804688, 0.19603631591796875, 0.1956294403076172, 0.19637759399414062, 0.195721923828125, 0.1968074493408203, 0.19667430114746093, 0.19743434143066407, 0.19658425903320312]",tokens/s,1304.567040131012,kWh,5.789630836110506e-06,6.382631315602204e-07,3.85367410690212e-06,1.0281568074572846e-05,tokens/kWh,24898925.74198957,MB,4458.610688,4844.290048,0.0,4429.185024,4373.844992,s,10,23.742114501953125,2.3742114501953124,0.015632395599483418,2.3700614013671872,2.3915708251953127,2.4010015991210936,2.4085462182617188,"[2.3574619140625, 2.366328369140625, 2.3652587890625, 2.3673447265625, 2.372778076171875, 2.376846923828125, 2.38947509765625, 2.410432373046875, 2.38122705078125, 2.354961181640625]",tokens/s,26.53512600776033,kWh,5.234218386930875e-05,5.771405713331326e-06,3.444805097889653e-05,9.256164056153662e-05,tokens/kWh,680627.5214851716,,s,630,23.738529830932595,0.03768020608084543,0.0005415232815325333,0.03756222343444825,0.03827065773010254,0.03846059131622315,0.03946921695709228,"[0.037668895721435544, 0.03738809585571289, 0.03710083389282227, 0.037264289855957033, 0.037571678161621096, 0.037583774566650394, 0.037575839996337894, 0.037561119079589846, 0.03716921615600586, 0.037408096313476566, 0.03709939193725586, 0.03718838500976562, 0.037166175842285154, 0.0370552978515625, 0.03710563278198242, 0.03704012680053711, 0.037391681671142575, 0.03764252853393555, 0.037517822265625, 0.03767001724243164, 0.03769232177734375, 0.03731990432739258, 0.03724889755249024, 0.03722243118286133, 0.03709632110595703, 0.03733036804199219, 0.03735017776489258, 0.03715868759155273, 0.03741491317749023, 0.037227710723876956, 0.03736406326293945, 0.03737238311767578, 0.03735327911376953, 0.03791622543334961, 0.03772227096557617, 0.03736937713623047, 0.037198143005371095, 0.03763407897949219, 0.03728854370117188, 0.03727155303955078, 0.03712988662719727, 0.03734767913818359, 0.037238784790039066, 0.03740467071533203, 0.03814118576049805, 0.03843292617797851, 0.03798483276367187, 0.037679134368896486, 0.03771187210083008, 0.03721420669555664, 0.03734732818603516, 0.037188640594482424, 0.03788899230957031, 0.03737395095825195, 0.03732400131225586, 0.03733379364013672, 0.037280895233154296, 0.03726835250854492, 0.0373205451965332, 0.037496990203857425, 0.037588672637939455, 0.03757497787475586, 0.03719168090820312, 0.037102783203125, 0.03719977569580078, 0.037122848510742185, 0.037152896881103514, 0.03725894546508789, 0.03720608139038086, 0.03722576141357422, 0.03753263854980469, 0.037335041046142575, 0.0372911376953125, 0.03717801666259766, 0.03827324676513672, 0.03902873611450195, 0.03895843124389648, 0.03764723205566406, 0.03756009674072266, 0.037443584442138675, 0.037562080383300785, 0.03782831954956055, 0.03777798461914063, 0.0373309440612793, 0.03740671920776367, 0.03734636688232422, 0.03727203369140625, 0.037412670135498045, 0.0373765754699707, 0.03728937530517578, 0.03732345581054688, 0.037566112518310546, 0.037574432373046876, 0.037552703857421876, 0.037905567169189455, 0.03818521499633789, 0.037832416534423825, 0.037825408935546874, 0.03780352020263672, 0.03756697463989258, 0.03794739151000977, 0.03748147201538086, 0.037580833435058594, 0.03749753570556641, 0.037619998931884766, 0.03769343948364258, 0.0378238410949707, 0.03764028930664062, 0.0379991683959961, 0.03800083160400391, 0.03779155349731445, 0.037510753631591794, 0.037464481353759765, 0.03735318374633789, 0.037060897827148435, 0.0371486701965332, 0.03707699203491211, 0.0373043212890625, 0.03714771270751953, 0.037284351348876955, 0.037286334991455075, 0.037162494659423825, 0.037429759979248044, 0.03715212631225586, 0.0385968017578125, 0.03771612930297852, 0.0373752326965332, 0.03730508804321289, 0.03719887924194336, 0.037140766143798826, 0.03795024108886719, 0.037471935272216796, 0.03726972961425781, 0.03754377746582031, 0.03752758407592773, 0.037496192932128906, 0.03725183868408203, 0.037287105560302736, 0.037403457641601565, 0.03740262222290039, 0.03753945541381836, 0.03737235260009766, 0.037653728485107424, 0.037343807220458984, 0.037251232147216796, 0.03735686492919922, 0.03942854309082031, 0.03857843017578125, 0.037976062774658204, 0.04103299331665039, 0.03759990310668945, 0.037529632568359374, 0.037528705596923825, 0.03725353622436523, 0.03723929595947266, 0.037187583923339845, 0.03700297546386719, 0.03803305435180664, 0.03721484756469726, 0.03749824142456055, 0.037274238586425784, 0.03789376068115234, 0.03725683212280274, 0.0371781120300293, 0.037169151306152344, 0.037470176696777345, 0.03709955215454101, 0.03717299270629883, 0.037154560089111326, 0.037480960845947264, 0.03764223861694336, 0.037959007263183596, 0.03805446243286133, 0.03810915374755859, 0.038104705810546875, 0.03790240097045899, 0.03730681610107422, 0.03719145584106445, 0.037576927185058596, 0.03744153594970703, 0.037285888671875, 0.03729199981689453, 0.03709251022338867, 0.03722943878173828, 0.03777503967285156, 0.03719404983520508, 0.037310462951660156, 0.037086528778076173, 0.03700921630859375, 0.0372305908203125, 0.03715686416625977, 0.03706880187988281, 0.03727360153198242, 0.03706675338745117, 0.0373260498046875, 0.037370655059814455, 0.037566463470458986, 0.03762176132202148, 0.03751459121704102, 0.03710617446899414, 0.03709148788452148, 0.037238784790039066, 0.037383617401123045, 0.03701375961303711, 0.03746771240234375, 0.03849292755126953, 0.038309600830078124, 0.037756320953369144, 0.037206912994384764, 0.037337055206298826, 0.037283870697021486, 0.037185344696044925, 0.03714998245239258, 0.037364574432373045, 0.037248737335205076, 0.03715107345581055, 0.03769456100463867, 0.03942697525024414, 0.037631999969482424, 0.03739174270629883, 0.0372988166809082, 0.03768320083618164, 0.03737190246582031, 0.03756032180786133, 0.038059070587158204, 0.03855251312255859, 0.037375999450683595, 0.03730636978149414, 0.037119998931884765, 0.03712819290161133, 0.0372408332824707, 0.03718105697631836, 0.03783513641357422, 0.03765798568725586, 0.03755235290527344, 0.03784873580932617, 0.03778022384643555, 0.03765862274169922, 0.03750083160400391, 0.037717376708984375, 0.037472991943359374, 0.038174720764160154, 0.03733708953857422, 0.03800883102416992, 0.04069171142578125, 0.03749478530883789, 0.03736751937866211, 0.03741929626464844, 0.03726540756225586, 0.0374799690246582, 0.03812195205688477, 0.03763814544677734, 0.03730451202392578, 0.03769513702392578, 0.03785145568847656, 0.03734688186645508, 0.03712995147705078, 0.0373092155456543, 0.037007328033447265, 0.037179393768310545, 0.03713827133178711, 0.03722387313842773, 0.037461761474609376, 0.037755104064941404, 0.037876480102539065, 0.03786137771606445, 0.0377262077331543, 0.038524063110351565, 0.037663585662841795, 0.037993568420410156, 0.03781315231323242, 0.03771596908569336, 0.03759308624267578, 0.037599231719970705, 0.03751891326904297, 0.03753414535522461, 0.03821491241455078, 0.0371712646484375, 0.03719852828979492, 0.037287296295166014, 0.038023807525634765, 0.038012928009033206, 0.03769110488891601, 0.037394718170166014, 0.0373021125793457, 0.037289344787597656, 0.03729843139648437, 0.03727993774414062, 0.037269855499267576, 0.037422431945800784, 0.04151772689819336, 0.038262462615966795, 0.038139678955078124, 0.038693408966064456, 0.037738494873046875, 0.03788083267211914, 0.03750300979614258, 0.037606369018554686, 0.03745526504516602, 0.03737456130981445, 0.037351425170898435, 0.03748803329467774, 0.03812393569946289, 0.03780969619750976, 0.037550750732421874, 0.03739955139160156, 0.037485118865966796, 0.03744982528686523, 0.037990558624267576, 0.037658817291259764, 0.03727974319458008, 0.03770127868652344, 0.03765871810913086, 0.03739263916015625, 0.03728793716430664, 0.038080318450927735, 0.03769683074951172, 0.03761651229858398, 0.03734511947631836, 0.03732905578613281, 0.037264511108398436, 0.038340991973876956, 0.03880553436279297, 0.03784460830688476, 0.03831280136108398, 0.03790028762817383, 0.03806745529174805, 0.03790310287475586, 0.03765212631225586, 0.03779183959960938, 0.03780019378662109, 0.03764223861694336, 0.037550048828125, 0.03735145568847656, 0.037449726104736326, 0.03737939071655273, 0.03787027359008789, 0.037754878997802735, 0.03759475326538086, 0.0375579833984375, 0.03741513442993164, 0.037781856536865235, 0.03802735900878906, 0.038088062286376956, 0.03784723281860351, 0.03877523040771484, 0.037459968566894535, 0.03740444946289063, 0.03725033569335937, 0.03733395385742187, 0.03748044967651367, 0.038055519104003906, 0.03794371032714844, 0.037475616455078124, 0.037933216094970704, 0.03746227264404297, 0.037845119476318356, 0.037535934448242186, 0.03748454284667969, 0.03727558517456055, 0.037332862854003904, 0.03717510223388672, 0.0373414077758789, 0.037230430603027345, 0.03725344085693359, 0.037171070098876956, 0.037120128631591795, 0.03705219268798828, 0.03707107162475586, 0.038449153900146485, 0.0382393913269043, 0.0383455696105957, 0.03825033569335937, 0.03818102264404297, 0.03817814254760742, 0.03801971054077148, 0.0382259521484375, 0.03786924743652344, 0.03875241470336914, 0.03793612670898437, 0.03762073516845703, 0.03777503967285156, 0.037833023071289065, 0.03757056045532227, 0.03762176132202148, 0.03778355026245117, 0.0375623664855957, 0.0375623664855957, 0.03716486358642578, 0.03789433670043945, 0.03723263931274414, 0.03741491317749023, 0.03743292617797851, 0.03791484832763672, 0.03730758285522461, 0.037311038970947265, 0.03727001571655274, 0.03717523193359375, 0.037518497467041015, 0.037673824310302736, 0.03776921463012695, 0.03798015975952149, 0.038438079833984375, 0.037833057403564456, 0.03772454452514649, 0.03830147171020508, 0.03811564636230469, 0.037894142150878905, 0.03782783889770508, 0.03787238311767578, 0.03783462524414063, 0.03782179260253906, 0.03792099380493164, 0.03786924743652344, 0.03836812973022461, 0.03811891174316406, 0.03903539276123047, 0.03821769714355469, 0.038147457122802736, 0.03828803253173828, 0.0381214714050293, 0.03810508728027344, 0.03797398376464844, 0.038115264892578125, 0.03804694366455078, 0.038030113220214844, 0.03797615814208984, 0.03793484878540039, 0.03822780990600586, 0.038405601501464846, 0.038042110443115236, 0.038392257690429685, 0.038166526794433595, 0.03789209747314453, 0.038069793701171875, 0.03800931167602539, 0.037928958892822266, 0.038029312133789066, 0.03783065414428711, 0.03790195083618164, 0.03905779266357422, 0.038370689392089846, 0.03795846557617188, 0.03781017684936523, 0.03784089660644531, 0.038035457611083984, 0.037795841217041014, 0.037804031372070314, 0.038067649841308594, 0.03818143844604492, 0.03910854339599609, 0.03828521728515625, 0.03845955276489258, 0.03829759979248047, 0.03829721450805664, 0.037987968444824216, 0.037945953369140625, 0.0381684799194336, 0.03808691024780273, 0.037950782775878905, 0.03856428909301758, 0.040186111450195315, 0.038254177093505856, 0.03793552017211914, 0.03843475341796875, 0.037933120727539064, 0.03830374526977539, 0.03832627105712891, 0.03818086242675781, 0.03828736114501953, 0.03794524765014649, 0.03812361526489258, 0.03846960067749024, 0.037885982513427736, 0.03794716644287109, 0.03785363388061523, 0.038313758850097655, 0.03791782379150391, 0.03785408020019531, 0.0394769287109375, 0.03824399948120117, 0.03797663879394531, 0.03824371337890625, 0.038083038330078124, 0.03849763107299805, 0.03828822326660156, 0.03886905670166016, 0.03994214248657227, 0.03827097702026367, 0.038211166381835936, 0.038007198333740236, 0.038076416015625, 0.03945033645629883, 0.038959327697753905, 0.03790848159790039, 0.03796793746948242, 0.03802521514892578, 0.038043647766113284, 0.0381399040222168, 0.03794944000244141, 0.03808051300048828, 0.03804569625854492, 0.038115169525146486, 0.03793241500854492, 0.03827062225341797, 0.037660385131835936, 0.03794496154785156, 0.03802214431762695, 0.038338817596435544, 0.03784627151489258, 0.037902847290039066, 0.03787366485595703, 0.038215679168701173, 0.0380148811340332, 0.03792822265625, 0.037788478851318356, 0.04134707260131836, 0.038204639434814454, 0.038066654205322265, 0.03829996871948242, 0.03799039840698242, 0.03786675262451172, 0.03818368148803711, 0.03801839828491211, 0.037925537109375, 0.0379554557800293, 0.03809830474853516, 0.038443775177001954, 0.03802048110961914, 0.037972511291503905, 0.03780172729492187, 0.0375893440246582, 0.038670337677001954, 0.03830579376220703, 0.038029312133789066, 0.03748585510253906, 0.037316673278808596, 0.03726150512695312, 0.037308895111083984, 0.03846144104003906, 0.037807487487792966, 0.037383872985839846, 0.03833478546142578, 0.0373438720703125, 0.03738009643554688, 0.03728179168701172, 0.03716825485229492, 0.03724582290649414, 0.03728793716430664, 0.03728793716430664, 0.03766995239257812, 0.03729913711547852, 0.03725107192993164, 0.03782595062255859, 0.03814640045166016, 0.037609729766845706, 0.03734092712402344, 0.03728819274902344, 0.03726921463012695, 0.03718313598632812, 0.03727833557128906, 0.03719168090820312, 0.037289440155029295, 0.0372270393371582, 0.037154815673828126, 0.03742886352539063, 0.03753398513793945, 0.03782489776611328, 0.03734662246704101, 0.0373480339050293, 0.037168704986572265, 0.03773276901245117, 0.037101505279541015, 0.037255264282226565, 0.0370700798034668, 0.03774540710449219, 0.037548030853271484, 0.037418495178222655, 0.037160606384277345, 0.037286590576171875, 0.037488414764404294, 0.03724940872192383, 0.03807401657104492, 0.037235038757324215, 0.03719372940063476, 0.037125473022460935, 0.037339809417724606, 0.037246849060058596, 0.038430110931396484, 0.037354209899902346, 0.03722403335571289, 0.03708335876464844, 0.03716524887084961, 0.03701756668090821, 0.037084991455078126, 0.037079166412353516, 0.03690630340576172, 0.03705311965942383, 0.03751689529418945, 0.03715532684326172, 0.03726540756225586, 0.03726505661010742, 0.037779808044433594, 0.03762128067016601, 0.03736950302124024, 0.03808729553222656, 0.03720534515380859, 0.037235553741455076, 0.03728998565673828, 0.03776102447509765, 0.03752345657348633, 0.0373306884765625, 0.03778995132446289, 0.03876233673095703, 0.03725881576538086, 0.03719638442993164, 0.03716700744628906, 0.03715081787109375, 0.03714591979980469, 0.038109886169433595, 0.03714252853393555, 0.03712790298461914, 0.03710800170898437, 0.03711795043945312, 0.037150527954101564, 0.0374189453125, 0.03727385711669922, 0.03728521728515625, 0.0370978889465332, 0.03761321640014648]",tokens/s,26.539132982830093,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpes7n4r9m/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp9uxqxet5/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4384.014336,4566.482944,0.0,4188.012544,4187.049984,s,1,10.3226669921875,10.3226669921875,0.0,10.3226669921875,10.3226669921875,10.3226669921875,10.3226669921875,[10.3226669921875],,kWh,9.785378997917177e-05,1.0785661533696069e-05,3.2396137028004035e-05,0.00014103558854087187,,MB,4337.463296,4962.844672,0.0,4555.014144,4514.269184,s,10,7.85943878173828,0.7859438781738282,0.003248193699247347,0.7875285034179688,0.7886953369140625,0.7889753234863282,0.7891993127441406,"[0.78, 0.7875231323242188, 0.7826694946289062, 0.7807692260742187, 0.7872844848632813, 0.7876341552734375, 0.788135986328125, 0.7892553100585937, 0.7875338745117187, 0.7886331176757813]",tokens/s,325.72300276048486,kWh,2.281208450769251e-05,2.5157658233373223e-06,1.5158153152154578e-05,4.048600348318441e-05,tokens/kWh,6323172.898661828,MB,4346.912768,4979.621888,0.0,4571.79136,4514.271744,s,10,466.6024882812499,46.660248828125,0.011068309855742515,46.662541015625,46.666687499999995,46.672259765625,46.676717578125,"[46.632265625, 46.65224609375, 46.66014453125, 46.6612578125, 46.6629609375, 46.663859375, 46.66212109375, 46.6643515625, 46.66544921875, 46.67783203125]",tokens/s,1.3501856844369426,kWh,0.0013613940065339734,0.000150172057835159,0.0009055239445040442,0.0024170900088731767,tokens/kWh,26064.39965773967,,s,630,466.5971835937501,0.7406304501488096,0.0003842440844463258,0.7406415100097656,0.741086279296875,0.7412395568847657,0.7414922277832031,"[0.7400195922851562, 0.7400505981445312, 0.7398154907226563, 0.739328857421875, 0.7396557006835938, 0.7402882080078125, 0.739842529296875, 0.7398804931640625, 0.7397766723632813, 0.7399896240234375, 0.739989501953125, 0.7401123046875, 0.7397409057617188, 0.7400776977539063, 0.7401336059570313, 0.7396638793945313, 0.7399824829101562, 0.7398463745117188, 0.740166259765625, 0.7398889770507813, 0.7399959716796874, 0.740632080078125, 0.7399937133789063, 0.740153564453125, 0.740021728515625, 0.7406412963867187, 0.7402557373046875, 0.7396383056640625, 0.7405346069335937, 0.74044873046875, 0.7398190307617187, 0.7404366455078125, 0.7408128051757813, 0.7404246826171875, 0.740150146484375, 0.7401944580078125, 0.7401615600585938, 0.7403519897460937, 0.739842041015625, 0.740030029296875, 0.7403851928710937, 0.7400409545898438, 0.7400709228515625, 0.740415771484375, 0.74038623046875, 0.7406367797851563, 0.7403157958984375, 0.7405259399414063, 0.7403190307617188, 0.7402066650390625, 0.7406757202148437, 0.74048681640625, 0.7396448974609375, 0.740488037109375, 0.7407861938476562, 0.7405541381835937, 0.739809814453125, 0.740874267578125, 0.740378662109375, 0.7405140380859375, 0.7401420288085937, 0.74058544921875, 0.740694580078125, 0.7398834228515625, 0.740550048828125, 0.7404017944335938, 0.740384765625, 0.7400115966796875, 0.7401517333984375, 0.7411056518554687, 0.7402147827148438, 0.7400131225585938, 0.7413422241210937, 0.7399955444335937, 0.7396536254882813, 0.7408514404296875, 0.7402305908203125, 0.740455078125, 0.7398401489257812, 0.7406387329101562, 0.7405711059570312, 0.7406399536132813, 0.7395582275390625, 0.74058544921875, 0.7408004760742187, 0.7407980346679688, 0.739641845703125, 0.7406991577148437, 0.7408834228515625, 0.7402168579101562, 0.739751220703125, 0.7406128540039062, 0.740921142578125, 0.7402518310546875, 0.74022705078125, 0.7406018676757813, 0.7409004516601563, 0.7403721313476562, 0.740874755859375, 0.7404523315429687, 0.7409985961914063, 0.7404183349609375, 0.7406793823242187, 0.7401761474609375, 0.7410494384765625, 0.7407820434570312, 0.7404300537109375, 0.740877197265625, 0.7407839965820312, 0.7402352905273437, 0.7404441528320312, 0.7406465454101563, 0.7406863403320313, 0.740124755859375, 0.7403067016601562, 0.7408099365234375, 0.7403179931640625, 0.7408599243164062, 0.7406141357421875, 0.7409703369140626, 0.740460693359375, 0.7409868774414062, 0.7406669311523437, 0.7407661743164062, 0.7410028076171875, 0.74051953125, 0.740729248046875, 0.7405772705078125, 0.7405260620117188, 0.740441162109375, 0.7405618286132812, 0.7406755981445312, 0.739999755859375, 0.7403212890625, 0.7403572998046875, 0.7405899658203124, 0.739885498046875, 0.7405383911132812, 0.74066943359375, 0.7408038940429688, 0.740121337890625, 0.7403797607421875, 0.7404747314453125, 0.7406744995117187, 0.7401647338867188, 0.7403672485351562, 0.7405545654296875, 0.7406163940429688, 0.740849853515625, 0.74037841796875, 0.7402281494140625, 0.7402581176757812, 0.7403209838867187, 0.7402026977539062, 0.74078076171875, 0.7406192626953125, 0.7403645629882812, 0.74097021484375, 0.740475341796875, 0.7406515502929687, 0.7405623779296875, 0.7410548095703124, 0.740423828125, 0.7406408081054687, 0.7409862060546875, 0.7403770141601562, 0.7402435302734375, 0.7409392700195313, 0.7409180297851562, 0.7407205810546875, 0.7408023071289063, 0.7407756958007813, 0.7408173217773437, 0.7409131469726562, 0.7404827880859375, 0.7408663330078125, 0.7405254516601563, 0.7406817016601562, 0.7409219970703125, 0.7408558349609375, 0.7406814575195313, 0.7406544799804687, 0.7410221557617187, 0.7406570434570312, 0.7411595458984375, 0.7401569213867187, 0.74084814453125, 0.74292431640625, 0.7409152221679688, 0.741015625, 0.7407022094726563, 0.7403618774414062, 0.73979931640625, 0.7403438110351562, 0.7413043212890625, 0.740691650390625, 0.7397128295898437, 0.7405382690429687, 0.7412589721679688, 0.7402750244140625, 0.7407606201171875, 0.7405452270507813, 0.7407579956054687, 0.7404172973632812, 0.7403253784179687, 0.7409540405273437, 0.7409331665039063, 0.7402005004882812, 0.7401491088867187, 0.7404910888671875, 0.7404653930664062, 0.7410072631835938, 0.7403982543945312, 0.7402630615234375, 0.7406876831054687, 0.7406591186523438, 0.7409129028320313, 0.740784423828125, 0.7405977783203125, 0.7408927001953125, 0.740636474609375, 0.7404173583984375, 0.7406964721679687, 0.7409541015625, 0.7407647705078125, 0.7403014526367188, 0.7405775146484375, 0.7406959838867188, 0.7411856079101562, 0.7405547485351562, 0.7409561767578124, 0.74051953125, 0.7405894775390625, 0.7410322265625, 0.7404525146484375, 0.740384033203125, 0.74058154296875, 0.7407244262695313, 0.7407064208984375, 0.7408749389648438, 0.7411825561523437, 0.7406376342773437, 0.7403233032226563, 0.7412362670898438, 0.7411880493164062, 0.7403493041992187, 0.7403728637695313, 0.7409031982421875, 0.7412422485351563, 0.7403444213867187, 0.74039501953125, 0.7407388916015625, 0.7407108764648438, 0.7405159301757812, 0.7409315795898438, 0.7405479125976563, 0.7402473754882812, 0.7405596923828125, 0.7411990966796875, 0.7401375122070313, 0.74032568359375, 0.7403480834960937, 0.7403026733398438, 0.7407575073242187, 0.7404391479492187, 0.7401747436523437, 0.7403162841796875, 0.7408889770507813, 0.7405757446289063, 0.7406051635742188, 0.7404649658203125, 0.7399645385742187, 0.7404735107421875, 0.7410853271484374, 0.740697509765625, 0.740595458984375, 0.740115234375, 0.7407564697265625, 0.7409378051757812, 0.7409120483398437, 0.7404903564453125, 0.740961181640625, 0.7407882080078125, 0.740537353515625, 0.7407932739257812, 0.7409603271484375, 0.7407615966796876, 0.7405029296875, 0.7403587646484375, 0.7408844604492187, 0.7408968505859375, 0.7407646484375, 0.7403816528320313, 0.7410360107421875, 0.740190185546875, 0.741265380859375, 0.7407677001953125, 0.7410004272460937, 0.74047119140625, 0.7410343017578125, 0.7408720703125, 0.7408909301757812, 0.7405621948242187, 0.7408171997070313, 0.740706787109375, 0.7409868774414062, 0.7408309936523437, 0.7405748291015625, 0.7406619262695312, 0.7407206420898438, 0.7407114868164062, 0.7407252197265625, 0.7413704833984375, 0.7406524047851563, 0.7408787841796876, 0.7406328125, 0.7405247802734375, 0.7409304809570313, 0.7407001342773437, 0.7402608032226563, 0.7400745239257812, 0.7405209350585937, 0.7406243896484375, 0.74045849609375, 0.7405711059570312, 0.7408148193359375, 0.7404664916992187, 0.7404846801757813, 0.7408441772460937, 0.7406868896484375, 0.7407542724609375, 0.7403439331054688, 0.7405230712890625, 0.740754150390625, 0.74096630859375, 0.740278564453125, 0.7405875244140625, 0.740833251953125, 0.7408721923828125, 0.7404459228515625, 0.7403925170898438, 0.74037841796875, 0.7408239135742187, 0.7405977783203125, 0.7402426147460938, 0.7409152221679688, 0.7409365234375, 0.7402453002929688, 0.7410056762695313, 0.7410706787109375, 0.74068994140625, 0.740447509765625, 0.7406742553710938, 0.7407472534179688, 0.74175927734375, 0.7406363525390625, 0.7406817626953125, 0.74133642578125, 0.7406576538085937, 0.7406693725585938, 0.7409738159179687, 0.7410184936523437, 0.7404336547851562, 0.7407271728515625, 0.7404195556640625, 0.7414859008789062, 0.7402501831054688, 0.7405077514648437, 0.7404359741210937, 0.741105224609375, 0.7406533203125, 0.7404771118164063, 0.74070654296875, 0.7406445922851562, 0.7408680419921875, 0.7405767822265625, 0.7410755004882813, 0.74109130859375, 0.7405181274414062, 0.7403685913085938, 0.7405951538085938, 0.7404239501953125, 0.7404251098632812, 0.7405381469726563, 0.740688232421875, 0.7403665161132813, 0.7406198120117188, 0.740737548828125, 0.7408297119140625, 0.7404478759765625, 0.7403399047851562, 0.7411488037109375, 0.7406000366210937, 0.740068115234375, 0.7401787719726562, 0.7406900024414063, 0.7399046020507812, 0.7412335815429687, 0.740421630859375, 0.7405916137695312, 0.7405240478515625, 0.7406632690429688, 0.7404830932617188, 0.7408414916992188, 0.7406366577148438, 0.7406956176757813, 0.7402152099609375, 0.7408025512695312, 0.741148681640625, 0.7409144897460938, 0.7405612182617187, 0.74076318359375, 0.7404671630859375, 0.7405343017578125, 0.7407120361328124, 0.7408722534179687, 0.7407449951171875, 0.7402197875976563, 0.7407444458007812, 0.7409302978515625, 0.7409007568359375, 0.74053857421875, 0.741087158203125, 0.7408059692382812, 0.7411715087890625, 0.74035205078125, 0.7410333251953125, 0.7403152465820313, 0.740950927734375, 0.7404436645507813, 0.7409464111328125, 0.7409313354492187, 0.7408642578125, 0.7404564208984376, 0.7408292236328125, 0.7408162841796875, 0.7409790649414062, 0.7411129760742188, 0.7406704711914063, 0.7402434692382812, 0.7409337768554688, 0.7404783935546875, 0.7400045166015625, 0.740599609375, 0.7409668579101563, 0.7405609130859375, 0.74033740234375, 0.7406492309570313, 0.7410005493164062, 0.7402352294921875, 0.7402005004882812, 0.7404649047851563, 0.7413662109375, 0.74050537109375, 0.7403521728515625, 0.7408446655273437, 0.7406520385742188, 0.7407183227539063, 0.7406736450195313, 0.740123779296875, 0.74053515625, 0.7404544067382812, 0.7410208129882813, 0.7403468017578125, 0.7404906005859375, 0.7409271850585938, 0.7403507080078126, 0.7404238891601562, 0.7406466064453125, 0.7406123657226562, 0.7408271484375, 0.7403028564453125, 0.7402880249023438, 0.7413002319335937, 0.740756103515625, 0.7403969116210938, 0.7409766235351563, 0.741285888671875, 0.7410333862304688, 0.740450927734375, 0.74140283203125, 0.7407388305664062, 0.7408353271484375, 0.7404111328125, 0.7407590942382812, 0.7405862426757812, 0.7411541137695312, 0.7404346313476563, 0.7409149780273437, 0.7412155151367188, 0.7401030883789063, 0.7410667724609376, 0.7412072143554688, 0.7405166625976562, 0.74062353515625, 0.741194580078125, 0.7408453979492188, 0.7408536987304688, 0.7405017700195312, 0.74096630859375, 0.7404544067382812, 0.741702880859375, 0.7403590087890625, 0.7408206787109375, 0.7402210083007813, 0.7412155151367188, 0.74056689453125, 0.7406417236328126, 0.7403038940429687, 0.7407339477539062, 0.7400856323242188, 0.7402881469726562, 0.7404854736328125, 0.7403048706054688, 0.7405787353515625, 0.740346435546875, 0.7411138305664062, 0.7401697387695313, 0.7402978515625, 0.7403488159179688, 0.740921142578125, 0.7409185180664063, 0.7402034912109375, 0.7403060302734376, 0.7403733520507813, 0.7407513427734375, 0.7413466796875, 0.7407335815429688, 0.740063232421875, 0.7410543212890625, 0.7406931762695312, 0.741095458984375, 0.7402077026367188, 0.7405111694335937, 0.740576904296875, 0.7409139404296875, 0.74109130859375, 0.7403963012695313, 0.7408911743164063, 0.7407228393554688, 0.740656494140625, 0.7409578857421875, 0.7406044311523438, 0.7414297485351562, 0.741580810546875, 0.740294677734375, 0.7404707641601562, 0.7409067993164062, 0.7414436645507813, 0.740173828125, 0.740737060546875, 0.7412010498046875, 0.7408501586914062, 0.7404363403320312, 0.7409541015625, 0.7410216674804687, 0.7404930419921875, 0.74058984375, 0.7407490844726563, 0.7411278686523437, 0.740725341796875, 0.7408616333007813, 0.7412691650390625, 0.7407438354492187, 0.7408267211914062, 0.741392822265625, 0.740957275390625, 0.7410277709960937, 0.7412838745117187, 0.7407637329101563, 0.7409290771484375, 0.7401904907226563, 0.7408988037109375, 0.740469970703125, 0.7405146484375, 0.7407565307617188, 0.74098583984375, 0.7407430419921875, 0.7400154418945313, 0.7405875854492188, 0.7409999389648437, 0.7410193481445313, 0.7408694458007813, 0.7402786865234375, 0.740968505859375, 0.7406876220703125, 0.7410165405273438, 0.7406650390625, 0.7407493896484375, 0.7404891967773437, 0.7407401733398438, 0.7408728637695312, 0.740468994140625, 0.7417009887695313, 0.7409425048828125, 0.7408291625976563, 0.740797607421875, 0.7413209228515625, 0.740708984375, 0.7406807861328125, 0.7412581787109375, 0.7409722900390625, 0.7409625244140625, 0.7412992553710938, 0.7411414184570313, 0.7412796630859375, 0.7408763427734375, 0.7417642211914063, 0.74086083984375, 0.7408189697265625, 0.7406930541992187, 0.7413052978515625, 0.7410216674804687, 0.7411827392578125, 0.741086181640625, 0.7409776611328125, 0.7408381958007813, 0.7414948120117187, 0.7412696533203125, 0.740348876953125, 0.740801025390625, 0.7411981811523437, 0.7411402587890625, 0.7410465087890625, 0.740831298828125, 0.7412284545898438, 0.7412449951171876, 0.740482177734375, 0.7411937255859375, 0.7407173461914063]",tokens/s,1.350201034536289,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1235, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1037, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 763, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 557, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpez1zazry/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1174, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 894, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 507, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 436, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 59167 has 14.69 GiB memory in use. Of the allocated memory 14.29 GiB is allocated by PyTorch, and 313.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpzayuox3i/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 4 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 28.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 192203 has 14.74 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 80.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 107730 has 14.64 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 312.03 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1349, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1142, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 852, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 604, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 22577 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp4_8t_k5a/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpypqlu7n4/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpsk0jz8r1/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 58.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 130263 has 14.69 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 193.68 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpq_8o8mfj/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpuyfaw2ko/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3900, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpgpbskzdx/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4353.662976,6253.576192,0.0,5851.05408,5850.293248,s,1,13.43523046875,13.43523046875,0.0,13.43523046875,13.43523046875,13.43523046875,13.43523046875,[13.43523046875],,kWh,0.00011552100522083225,1.273548332158396e-05,3.81872527719998e-05,0.000166443741314416,,MB,4455.845888,6368.919552,0.0,5951.717376,5922.919424,s,10,2.042951889038086,0.20429518890380863,0.0005764272242085995,0.2043500518798828,0.20499408264160157,0.20500892791748046,0.2050208041381836,"[0.2032725830078125, 0.20455424499511718, 0.20404818725585938, 0.20427906799316406, 0.20333526611328126, 0.2047478790283203, 0.20432272338867188, 0.20502377319335938, 0.20437738037109374, 0.20499078369140625]",tokens/s,1253.0887358318375,kWh,5.992506990476375e-06,6.608654146028158e-07,3.977871663020425e-06,1.0631244068099615e-05,tokens/kWh,24079966.404699538,MB,4459.905024,6383.599616,0.0,5966.39744,5922.921984,s,10,20.19313903808594,2.0193139038085937,0.0051289884957970554,2.0185454711914064,2.0265273071289065,2.0276191833496093,2.028492684326172,"[2.02628466796875, 2.016418212890625, 2.0287110595703126, 2.017899658203125, 2.017953125, 2.0191378173828123, 2.010199462890625, 2.014671630859375, 2.01951025390625, 2.0223531494140623]",tokens/s,31.198715504893404,kWh,5.9091085152439815e-05,6.517611972527023e-06,3.9292885175779655e-05,0.0001049015823007465,tokens/kWh,600562.9144790476,,s,630,20.190220825195315,0.032047969563802084,0.00043743496736233607,0.03196361637115479,0.032334534454345705,0.032560089302062986,0.03367718029022217,"[0.03343110275268555, 0.032532737731933596, 0.03226467132568359, 0.03193862342834473, 0.031841600418090824, 0.03191695976257324, 0.031884544372558596, 0.032076545715332035, 0.031941856384277344, 0.03192911911010742, 0.032, 0.03207321548461914, 0.031828479766845705, 0.031926271438598636, 0.03182572746276856, 0.031936704635620115, 0.03200400161743164, 0.03185625648498535, 0.03226467132568359, 0.032075103759765626, 0.03202073669433594, 0.03197583961486816, 0.03207961654663086, 0.03196339225769043, 0.032151073455810544, 0.032285152435302736, 0.03220479965209961, 0.03215564727783203, 0.03214131164550781, 0.03227782440185547, 0.032040767669677735, 0.03241664123535156, 0.032505855560302735, 0.03223756790161133, 0.0320552978515625, 0.0320184326171875, 0.03197747230529785, 0.03560812759399414, 0.03221052932739258, 0.032510814666748045, 0.032083393096923825, 0.03253430557250977, 0.03222608184814453, 0.03219251251220703, 0.03202252960205078, 0.03201388931274414, 0.03205958557128906, 0.03197958374023437, 0.031901887893676754, 0.03211468887329102, 0.03198566436767578, 0.03214662551879883, 0.031986207962036134, 0.03196137619018555, 0.03207372665405273, 0.03210147094726563, 0.032154529571533204, 0.0321036491394043, 0.03187788772583008, 0.03195049667358398, 0.032073089599609375, 0.03206860733032227, 0.03200156784057617, 0.03277427291870117, 0.03195289611816406, 0.031938560485839845, 0.031987232208251955, 0.032020961761474606, 0.031971328735351565, 0.0321814079284668, 0.03224828720092773, 0.032010623931884766, 0.032032577514648435, 0.03185062408447266, 0.032271713256835935, 0.03193315124511719, 0.031854591369628905, 0.03186483192443847, 0.03183206367492676, 0.031898784637451175, 0.031960927963256835, 0.032135295867919925, 0.032007038116455076, 0.03191142463684082, 0.03188787269592285, 0.03183987236022949, 0.03187251281738281, 0.03191423988342285, 0.03287104034423828, 0.03206175994873047, 0.03228742218017578, 0.03236307144165039, 0.032334400177001954, 0.031713184356689454, 0.03225107192993164, 0.031854976654052736, 0.03184848022460938, 0.03346384048461914, 0.031963840484619144, 0.031876415252685544, 0.032011104583740235, 0.03178291130065918, 0.0317061767578125, 0.03180159950256348, 0.03171945571899414, 0.03198633575439453, 0.03188476753234863, 0.031835968017578126, 0.03183020782470703, 0.03184284782409668, 0.031837472915649416, 0.031848447799682614, 0.03181971168518066, 0.03175094413757324, 0.03178854370117187, 0.03179980850219727, 0.031838207244873046, 0.03176652717590332, 0.03173785591125488, 0.03216918563842774, 0.03254687881469726, 0.032063232421875, 0.032102401733398435, 0.03186121559143067, 0.031848064422607424, 0.03189955139160156, 0.03255507278442383, 0.032078655242919925, 0.03201433563232422, 0.03201638412475586, 0.03234201431274414, 0.032040382385253904, 0.033102401733398436, 0.03402361679077148, 0.03217353439331055, 0.03194879913330078, 0.03224611282348633, 0.032086017608642575, 0.03204710388183594, 0.03204832077026367, 0.03211673736572265, 0.03216672134399414, 0.03210137557983399, 0.032115070343017575, 0.03245676803588867, 0.03253510284423828, 0.03218227386474609, 0.03244803237915039, 0.032061695098876956, 0.03205699157714844, 0.0319290885925293, 0.032107711791992184, 0.03204150390625, 0.032505504608154295, 0.03211923217773437, 0.03208515167236328, 0.032076641082763674, 0.032116287231445315, 0.032086463928222654, 0.033685504913330076, 0.03287449645996094, 0.03240083312988281, 0.032124481201171874, 0.0321385612487793, 0.03199558448791504, 0.032276481628417966, 0.03207273483276367, 0.03207408142089844, 0.031988351821899415, 0.03210153579711914, 0.03199020767211914, 0.031941024780273435, 0.032029918670654293, 0.03180009651184082, 0.031850496292114255, 0.03227612686157227, 0.03249955368041992, 0.03223948669433594, 0.032256641387939454, 0.03217407989501953, 0.031854591369628905, 0.03180944061279297, 0.03212227249145508, 0.03187065505981445, 0.03181590461730957, 0.03190454483032226, 0.031848447799682614, 0.03236182403564453, 0.03201241683959961, 0.03257561492919922, 0.03187504005432129, 0.0318047046661377, 0.03211135864257812, 0.03179520034790039, 0.031968767166137696, 0.03188582420349121, 0.03179929542541504, 0.032468128204345706, 0.031769439697265624, 0.031752191543579104, 0.032661502838134765, 0.03216790390014648, 0.031966848373413084, 0.03235676956176758, 0.03203276824951172, 0.032030559539794924, 0.03191619110107422, 0.031991519927978516, 0.03180361557006836, 0.03223148727416992, 0.031788223266601565, 0.03155641555786133, 0.031831743240356446, 0.031899967193603516, 0.031752191543579104, 0.03183193588256836, 0.031822015762329105, 0.03183523178100586, 0.032002750396728515, 0.031946239471435545, 0.0319597110748291, 0.03191193580627441, 0.03179625511169434, 0.03205795288085937, 0.03190320014953613, 0.031935359954833986, 0.03180143928527832, 0.03181356811523438, 0.03200307083129883, 0.031939584732055666, 0.03223961639404297, 0.03195289611816406, 0.03203087997436523, 0.032511646270751954, 0.03237612915039063, 0.03229692840576172, 0.032019264221191404, 0.032094303131103515, 0.032214496612548826, 0.03219919967651367, 0.0319815673828125, 0.032118785858154295, 0.032006206512451174, 0.03203241729736328, 0.03222147369384765, 0.03218227386474609, 0.03209353637695313, 0.032069950103759765, 0.03231369781494141, 0.03218431854248047, 0.03206758499145508, 0.032045055389404296, 0.03266182327270508, 0.031954944610595705, 0.03171737670898438, 0.03193619155883789, 0.032205120086669925, 0.03203014373779297, 0.03234668731689453, 0.03223302459716797, 0.03191238403320312, 0.03186892890930176, 0.03310505676269531, 0.03201315307617188, 0.03185868835449219, 0.03216291046142578, 0.03196623992919922, 0.03189529609680176, 0.031946880340576175, 0.03175395202636719, 0.03184259223937988, 0.0317395191192627, 0.031826240539550785, 0.031985727310180664, 0.03194195175170898, 0.031982271194458005, 0.03197542381286621, 0.03181318473815918, 0.03170694351196289, 0.03202511978149414, 0.03183625602722168, 0.032020481109619144, 0.03186812782287598, 0.03176326370239258, 0.03365679931640625, 0.03248463821411133, 0.03182393646240234, 0.032061088562011716, 0.03208419036865234, 0.03180624008178711, 0.03213516616821289, 0.03203420639038086, 0.03193712043762207, 0.03190543937683105, 0.032161182403564456, 0.031906656265258786, 0.031876512527465824, 0.0319489917755127, 0.03184867286682129, 0.031866943359375, 0.03217552185058594, 0.03205734252929687, 0.031886144638061525, 0.03184464073181152, 0.032368350982666015, 0.032247806549072264, 0.0319815673828125, 0.03192831993103027, 0.0318832950592041, 0.03194451141357422, 0.03191004753112793, 0.03202374267578125, 0.0320909423828125, 0.03195248031616211, 0.031938560485839845, 0.03336969757080078, 0.03220896148681641, 0.03202249526977539, 0.03533452987670899, 0.037211456298828126, 0.0320206413269043, 0.03233436965942383, 0.03206553649902344, 0.03193446350097656, 0.03184406471252441, 0.03194908714294434, 0.03192569541931152, 0.03187324714660644, 0.03195939254760742, 0.03227033615112305, 0.03199795150756836, 0.03186006355285644, 0.03203452682495117, 0.03178560066223145, 0.03164601516723633, 0.03193600082397461, 0.03178342437744141, 0.031737344741821286, 0.03189401626586914, 0.03162521553039551, 0.03185168075561524, 0.03185955238342285, 0.031666175842285156, 0.0317740478515625, 0.0317587833404541, 0.031792928695678714, 0.032276927947998045, 0.03195689582824707, 0.031960639953613285, 0.03185500717163086, 0.031827104568481444, 0.031970272064208986, 0.03191718482971191, 0.03187766456604004, 0.03179555130004883, 0.03173990440368652, 0.03170931243896485, 0.032054878234863284, 0.031750431060791014, 0.03172966384887695, 0.03182307243347168, 0.03206835174560547, 0.03165113639831543, 0.031811519622802736, 0.03173612785339355, 0.03169123268127441, 0.03175424003601074, 0.03191398429870605, 0.03191801643371582, 0.03167782402038574, 0.03181161689758301, 0.03239324951171875, 0.031916671752929685, 0.03172352027893066, 0.03190480041503906, 0.03191423988342285, 0.031784767150878905, 0.03161942481994629, 0.03265577697753906, 0.032005184173583986, 0.031839168548583985, 0.0318791675567627, 0.032001472473144534, 0.03198624038696289, 0.031733760833740236, 0.031803136825561525, 0.032239070892333986, 0.031782815933227536, 0.0317520637512207, 0.031705408096313475, 0.03172140884399414, 0.03177753639221191, 0.031992864608764646, 0.03177481651306152, 0.03178995132446289, 0.03172047996520996, 0.03165692710876465, 0.03164159965515137, 0.031767744064331055, 0.03173664093017578, 0.03171545600891113, 0.03177424049377441, 0.03166652870178223, 0.031968704223632814, 0.032066303253173827, 0.03190764808654785, 0.03276800155639648, 0.03187731170654297, 0.03162470436096192, 0.03178512001037598, 0.031843807220458986, 0.03197407913208008, 0.031834112167358396, 0.031870975494384765, 0.03187727928161621, 0.03179830360412598, 0.03179110336303711, 0.032153823852539065, 0.03188796806335449, 0.03181158447265625, 0.03188857650756836, 0.031917951583862306, 0.03185894393920898, 0.03178358459472656, 0.03181769561767578, 0.03182819175720215, 0.0317762565612793, 0.03185654449462891, 0.03187756729125977, 0.03188227272033691, 0.03256419372558594, 0.03184230422973633, 0.031905792236328126, 0.03191398429870605, 0.03189142417907715, 0.03237068939208984, 0.03196879959106445, 0.03191859245300293, 0.031893503189086916, 0.0319180793762207, 0.03226406478881836, 0.032852577209472655, 0.03233574295043945, 0.03190592002868652, 0.03180544090270996, 0.03186223983764649, 0.03208451080322266, 0.03182387161254883, 0.031870080947875974, 0.03176915168762207, 0.031689023971557616, 0.031649791717529296, 0.03175833511352539, 0.03160233688354492, 0.03160707283020019, 0.031727680206298826, 0.03187711906433106, 0.03211264038085938, 0.03191923141479492, 0.03174019241333008, 0.0322808952331543, 0.032245216369628904, 0.031812416076660154, 0.03173785591125488, 0.03190352058410645, 0.03175158309936523, 0.0316485767364502, 0.03167411231994629, 0.03232329559326172, 0.03182815933227539, 0.032076126098632814, 0.031757631301879884, 0.03168649673461914, 0.03169331169128418, 0.031865184783935546, 0.03183206367492676, 0.03174102401733398, 0.031832128524780276, 0.03171004867553711, 0.03159449577331543, 0.03171705627441406, 0.03165750312805176, 0.031679040908813474, 0.03184048080444336, 0.03171123123168945, 0.03188457679748535, 0.0318450870513916, 0.03212284851074219, 0.03334147262573242, 0.03556556701660156, 0.031959039688110355, 0.03180121612548828, 0.03242816162109375, 0.032107742309570315, 0.031922176361083986, 0.031865631103515625, 0.03237068939208984, 0.031899168014526365, 0.03178656005859375, 0.031967647552490236, 0.03201279830932617, 0.03198054313659668, 0.03204198455810547, 0.03187711906433106, 0.032699520111083985, 0.03195689582824707, 0.031880159378051755, 0.031834112167358396, 0.032087070465087894, 0.032019390106201175, 0.03193046379089355, 0.03210847854614258, 0.03209174346923828, 0.03207526397705078, 0.03199068832397461, 0.03204438400268555, 0.031908512115478516, 0.03257254409790039, 0.032133502960205076, 0.03212364959716797, 0.0320816650390625, 0.0319180793762207, 0.03197542381286621, 0.03196108818054199, 0.03272499084472656, 0.0319749755859375, 0.032020927429199215, 0.03219625473022461, 0.03199935913085938, 0.031845344543457034, 0.03215564727783203, 0.031893503189086916, 0.031944351196289064, 0.0319081916809082, 0.032777984619140624, 0.03297919845581055, 0.032056926727294925, 0.031866399765014645, 0.03181158447265625, 0.03201011276245117, 0.031902719497680664, 0.03189142417907715, 0.03177884864807129, 0.03202252960205078, 0.03196435165405274, 0.03194675254821777, 0.031834943771362305, 0.03184992027282715, 0.03171180725097656, 0.03224371337890625, 0.032161792755126956, 0.03175814437866211, 0.032170177459716794, 0.031999616622924804, 0.03210652923583984, 0.032224704742431644, 0.031982080459594726, 0.03205161666870117, 0.03207167816162109, 0.03207110214233398, 0.03186540794372559, 0.032077728271484376, 0.031811552047729494, 0.03201241683959961, 0.0321638412475586, 0.031993791580200194, 0.03198899269104004, 0.03292153549194336, 0.03213532638549805, 0.03230710220336914, 0.0319768009185791, 0.032197280883789064, 0.031923583984375, 0.031880895614624025, 0.03289798355102539, 0.032276481628417966, 0.03202252960205078, 0.032169921875, 0.03219804763793945, 0.03199216079711914, 0.03201055908203125, 0.03203481674194336, 0.032138496398925784, 0.031953632354736326, 0.03195846366882324, 0.032059040069580075, 0.032484161376953126, 0.03176665687561035, 0.0318525447845459, 0.031791072845458984, 0.031789087295532224, 0.03166313552856445, 0.03188425636291504, 0.03175823974609375, 0.03191203117370606, 0.03193036842346191, 0.03200614547729492, 0.031909887313842776, 0.031843551635742186, 0.031914783477783204, 0.03197337532043457, 0.03190784072875977, 0.03189292716979981, 0.03184025573730469, 0.03194467163085937, 0.03221712112426758, 0.03445971298217773, 0.03268387222290039, 0.032521888732910155, 0.032031742095947266, 0.03210854339599609, 0.03207372665405273, 0.03201795196533203, 0.03203334426879883, 0.03195075225830078, 0.03181516838073731, 0.03209471893310547, 0.03218841552734375, 0.03213312149047851, 0.031859935760498045, 0.031979551315307615, 0.0319465274810791, 0.032390113830566405, 0.032024574279785153, 0.0322410888671875, 0.03192451286315918, 0.032140735626220704, 0.032043872833251955, 0.03208143997192383, 0.03197321510314941]",tokens/s,31.203224841098567,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4348.66176,6253.576192,0.0,5851.05408,5850.293248,s,1,13.153853515625,13.153853515625,0.0,13.153853515625,13.153853515625,13.153853515625,13.153853515625,[13.153853515625],,kWh,0.00011701633530417439,1.2900362979212507e-05,3.8144197182003736e-05,0.00016806089546539063,,MB,4317.696,6368.919552,0.0,5951.717376,5923.050496,s,10,2.123413284301758,0.21234132843017578,0.0006266375365747859,0.2125782699584961,0.2129613952636719,0.21299703216552734,0.2130255416870117,"[0.21255673217773438, 0.2130326690673828, 0.21272767639160156, 0.21219612121582032, 0.21259980773925782, 0.21189900207519533, 0.2120560302734375, 0.21295347595214845, 0.210779296875, 0.2126124725341797]",tokens/s,1205.6060960557686,kWh,6.22663144051418e-06,6.866479197337513e-07,4.12433545076594e-06,1.1037614811013871e-05,tokens/kWh,23193416.72845393,MB,4324.225024,6383.599616,0.0,5966.39744,5923.053056,s,10,23.10874267578125,2.310874267578125,0.0079398044342355,2.30845849609375,2.3193526123046877,2.323701330566406,2.327180305175781,"[2.328050048828125, 2.306549560546875, 2.31838623046875, 2.306042724609375, 2.30158447265625, 2.30126171875, 2.317349365234375, 2.3126015625, 2.3075537109375, 2.30936328125]",tokens/s,27.262409246534276,kWh,6.775018199448545e-05,7.472793897015518e-06,4.4939064911034255e-05,0.00012016204080253523,tokens/kWh,524292.0274925191,,s,630,23.10581096267699,0.03667589041694762,0.0005571584557686987,0.03657051086425781,0.03700735855102539,0.03723766250610352,0.03838498100280762,"[0.03758524703979492, 0.036942176818847657, 0.036984321594238284, 0.03731903839111328, 0.03691968154907226, 0.03682515335083008, 0.03675334548950195, 0.03690496063232422, 0.036754432678222655, 0.03679743957519531, 0.0368353271484375, 0.036773887634277344, 0.036951393127441404, 0.036652767181396484, 0.03664787292480469, 0.03681894302368164, 0.036724510192871096, 0.036784351348876955, 0.03714857482910156, 0.0367529296875, 0.03688288116455078, 0.03669171142578125, 0.03676198577880859, 0.03692675018310547, 0.03670908737182617, 0.03800841522216797, 0.03719136047363281, 0.03680681610107422, 0.03664902496337891, 0.036757408142089845, 0.03685174560546875, 0.037155391693115235, 0.036759552001953126, 0.03690496063232422, 0.03683238220214844, 0.03672710418701172, 0.03693414306640625, 0.036909152984619144, 0.03677174377441406, 0.036853824615478516, 0.03679596710205078, 0.03666105651855469, 0.036800350189208984, 0.03672256088256836, 0.037008289337158204, 0.037326847076416016, 0.03698483276367188, 0.03678406524658203, 0.03686316680908203, 0.038386558532714846, 0.03701939010620117, 0.03695027160644531, 0.03703705596923828, 0.036860927581787106, 0.0370335693359375, 0.0382017593383789, 0.03687366485595703, 0.036964927673339844, 0.036947967529296875, 0.036923393249511716, 0.03684745788574219, 0.036756671905517575, 0.03703087997436524, 0.037558815002441404, 0.03678822326660156, 0.03688604736328125, 0.03683129501342773, 0.03705487823486328, 0.037163009643554686, 0.03682918548583984, 0.03683295822143555, 0.036778305053710936, 0.03670809555053711, 0.03665536117553711, 0.037031936645507815, 0.0373205451965332, 0.03688214492797852, 0.037015777587890625, 0.036554977416992186, 0.03643814468383789, 0.03647808074951172, 0.03647564697265625, 0.03643590545654297, 0.03636435317993164, 0.03649945449829101, 0.036468734741210936, 0.036239360809326174, 0.03630284881591797, 0.036222240447998044, 0.03631692886352539, 0.03648611068725586, 0.036413345336914066, 0.03647206497192383, 0.036428478240966795, 0.03627788925170899, 0.03640921783447266, 0.036570049285888674, 0.036390048980712894, 0.03641759872436524, 0.03645264053344727, 0.03648944091796875, 0.03655680084228516, 0.03667283248901367, 0.036520641326904295, 0.03653200149536133, 0.03652118301391601, 0.03646156692504883, 0.03636540985107422, 0.03641977691650391, 0.03645718383789062, 0.03655868911743164, 0.03661948776245117, 0.03632342529296875, 0.03656380844116211, 0.03642777633666992, 0.03670220947265625, 0.036480350494384764, 0.036434593200683596, 0.0365404167175293, 0.036706302642822264, 0.036503551483154296, 0.03695001602172852, 0.036678913116455075, 0.03675417709350586, 0.03699302291870117, 0.03656473541259766, 0.03883766555786133, 0.03681955337524414, 0.03672038269042969, 0.036701473236083984, 0.03663494491577148, 0.036590240478515626, 0.036636672973632815, 0.037127742767333986, 0.036674209594726566, 0.03673680114746094, 0.036533985137939456, 0.03655299377441406, 0.03651107025146484, 0.03648400115966797, 0.03652345657348633, 0.03681232070922852, 0.03764508819580078, 0.03683712005615234, 0.03668134307861328, 0.036538558959960936, 0.036612545013427734, 0.036730209350585935, 0.036522655487060546, 0.03654611206054687, 0.036329471588134765, 0.03706924819946289, 0.03648716735839844, 0.03645964813232422, 0.03626668930053711, 0.036660415649414066, 0.036307262420654296, 0.03681964874267578, 0.03827916717529297, 0.03662992095947266, 0.03648060989379883, 0.036391937255859375, 0.03668582534790039, 0.03641465759277344, 0.03626886367797852, 0.03640899276733398, 0.03626019287109375, 0.03633504104614258, 0.0363928337097168, 0.03630112075805664, 0.03649369430541992, 0.03635776138305664, 0.0362213134765625, 0.03640447998046875, 0.03636300659179687, 0.03642777633666992, 0.03643392181396484, 0.03639091110229492, 0.036691967010498046, 0.037445087432861325, 0.03659148788452148, 0.03647964859008789, 0.04683900833129883, 0.036530975341796876, 0.03647836685180664, 0.03629248046875, 0.036311775207519534, 0.03668377685546875, 0.03640480041503906, 0.03751366424560547, 0.037318656921386716, 0.036706302642822264, 0.03661129760742188, 0.03650870513916016, 0.036431617736816406, 0.036593505859375, 0.03682015991210937, 0.03650864028930664, 0.036495361328125, 0.03651583862304687, 0.03700735855102539, 0.03674879837036133, 0.03675945663452149, 0.036751968383789066, 0.03651324844360351, 0.03658544158935547, 0.03653907012939453, 0.03646656036376953, 0.03638800048828125, 0.03639177703857422, 0.03630899047851562, 0.036280319213867186, 0.03636633682250977, 0.03644416046142578, 0.03639910507202149, 0.03644224166870117, 0.03647020721435547, 0.036456897735595704, 0.03653017425537109, 0.03660723114013672, 0.037483264923095706, 0.03652937698364258, 0.03663113784790039, 0.0365428466796875, 0.036480831146240233, 0.03672671890258789, 0.036435264587402344, 0.036550655364990234, 0.03694259262084961, 0.0367630386352539, 0.03691990280151367, 0.03666476821899414, 0.03654304122924805, 0.036999488830566404, 0.036512767791748044, 0.03650006484985351, 0.036468830108642575, 0.036341342926025394, 0.03680460739135742, 0.03632988739013672, 0.036402721405029294, 0.03628694534301758, 0.03643008041381836, 0.037005054473876954, 0.03640095901489258, 0.03643376159667969, 0.037300575256347654, 0.036450366973876956, 0.03648303985595703, 0.03627619171142578, 0.03624256134033203, 0.03640147018432617, 0.03725708770751953, 0.03653235244750976, 0.03647488021850586, 0.03647488021850586, 0.03640019226074219, 0.03630176162719727, 0.03639295959472656, 0.03627967834472656, 0.03638345718383789, 0.03644144058227539, 0.036366943359375, 0.03637039947509765, 0.036495361328125, 0.036386016845703126, 0.036305694580078124, 0.0363397102355957, 0.036361793518066406, 0.036380958557128903, 0.0363603515625, 0.036320575714111326, 0.03634630584716797, 0.0362064323425293, 0.03674972915649414, 0.03629875183105469, 0.03646985626220703, 0.036761886596679685, 0.03655129623413086, 0.036311294555664064, 0.036413185119628905, 0.03646057510375977, 0.03632944107055664, 0.036241153717041015, 0.03620684814453125, 0.036849056243896484, 0.036676193237304686, 0.03629785537719726, 0.03640585708618164, 0.036346145629882816, 0.03644416046142578, 0.0364031982421875, 0.03637619018554687, 0.03633606338500977, 0.03643590545654297, 0.03647689437866211, 0.036622142791748045, 0.0364233283996582, 0.037139007568359375, 0.03936438369750977, 0.03658755111694336, 0.03657648086547852, 0.036375808715820315, 0.03642118453979492, 0.0364791374206543, 0.037154815673828126, 0.03671654510498047, 0.03663193511962891, 0.036510337829589845, 0.036429824829101565, 0.03653017425537109, 0.03645964813232422, 0.03668672180175781, 0.036650081634521485, 0.03652288055419922, 0.03715116882324219, 0.03684524917602539, 0.03672208023071289, 0.036693023681640624, 0.03652364730834961, 0.036591392517089844, 0.03654908752441406, 0.03636627197265625, 0.036315231323242186, 0.03869692611694336, 0.036603904724121096, 0.03645347213745117, 0.03715779113769531, 0.03663622283935547, 0.03650809478759766, 0.03636147308349609, 0.03650624084472656, 0.03659174346923828, 0.036480640411376955, 0.03673740768432617, 0.03639459228515625, 0.036337310791015626, 0.03641980743408203, 0.03637680053710937, 0.036397632598876954, 0.036396800994873045, 0.03625574493408203, 0.036587520599365236, 0.03645180892944336, 0.036695903778076175, 0.036581569671630856, 0.036184574127197264, 0.036294654846191404, 0.036452350616455076, 0.03639936065673828, 0.03637631988525391, 0.03643392181396484, 0.03625094223022461, 0.03635065460205078, 0.03673702239990234, 0.036638046264648436, 0.03653494262695312, 0.036468734741210936, 0.03646223831176758, 0.03645833587646485, 0.03655523300170899, 0.03640838241577148, 0.03635481643676758, 0.03641980743408203, 0.03641872024536133, 0.036390750885009766, 0.03635507202148437, 0.03626803207397461, 0.036394432067871095, 0.03646262359619141, 0.03642761611938477, 0.036635326385498046, 0.03643392181396484, 0.03635919952392578, 0.03654083251953125, 0.03645907211303711, 0.036225025177001956, 0.036429183959960934, 0.037486270904541014, 0.03655916976928711, 0.03647075271606445, 0.03644419097900391, 0.03644211196899414, 0.036598079681396486, 0.03633760070800781, 0.036296001434326174, 0.036292800903320314, 0.03629081726074219, 0.036451904296875, 0.03647123336791992, 0.03645977783203125, 0.03676646423339844, 0.03647283172607422, 0.036708351135253905, 0.03639910507202149, 0.03663407897949219, 0.03628905487060547, 0.03656310272216797, 0.03639206314086914, 0.03638140869140625, 0.03658137512207031, 0.0364031982421875, 0.03643801498413086, 0.036509567260742185, 0.03682867050170899, 0.03713907241821289, 0.03682918548583984, 0.03710543823242188, 0.03838111877441406, 0.036628704071044925, 0.03715935897827148, 0.03674863815307617, 0.036608768463134767, 0.037275550842285156, 0.03681705474853516, 0.03661398315429688, 0.03674425506591797, 0.03687059020996094, 0.03675596618652344, 0.03652521514892578, 0.03693807983398437, 0.03692291259765625, 0.03664108657836914, 0.0369071044921875, 0.036778209686279296, 0.036904510498046876, 0.036893470764160156, 0.036781150817871096, 0.0365761604309082, 0.03721392059326172, 0.036571422576904294, 0.03704975891113281, 0.03695267105102539, 0.03987577438354492, 0.03680255889892578, 0.03696099090576172, 0.03661616134643555, 0.03713036727905274, 0.03707289505004883, 0.03662380981445312, 0.03666182327270508, 0.03752422332763672, 0.03681075286865235, 0.036790271759033204, 0.036511745452880856, 0.036685440063476564, 0.03746035385131836, 0.03695759963989258, 0.03664239883422851, 0.0368076171875, 0.03656710433959961, 0.036496673583984375, 0.03675619125366211, 0.037100929260253906, 0.03699564743041992, 0.03698080062866211, 0.03675279998779297, 0.03690131378173828, 0.03656828689575195, 0.036543422698974606, 0.03675078582763672, 0.03674153518676758, 0.03671206283569336, 0.03645289611816406, 0.03631232070922852, 0.03681488037109375, 0.03672342300415039, 0.03645254516601563, 0.03664672088623047, 0.036765697479248044, 0.03653174209594726, 0.03672111892700195, 0.03654227066040039, 0.0365467529296875, 0.0363966064453125, 0.03639545440673828, 0.03644163131713867, 0.03648969650268555, 0.03652403259277344, 0.036708351135253905, 0.03657318496704102, 0.03689814376831055, 0.03740943908691406, 0.036603904724121096, 0.03661529541015625, 0.036675712585449216, 0.03659225463867188, 0.036517696380615236, 0.03649728012084961, 0.036656959533691406, 0.036587806701660154, 0.036421985626220704, 0.03674262237548828, 0.0375027847290039, 0.03703881454467774, 0.03663267135620117, 0.03684956741333008, 0.03671615982055664, 0.036593631744384764, 0.03652790451049805, 0.03659360122680664, 0.036487873077392576, 0.036512832641601566, 0.0365366096496582, 0.03754873657226562, 0.03699737548828125, 0.03649331283569336, 0.03670771026611328, 0.036856449127197266, 0.036601150512695316, 0.03641376113891601, 0.03658172988891602, 0.03663056182861328, 0.036464000701904295, 0.03648780822753906, 0.03659775924682617, 0.03630284881591797, 0.03629676818847656, 0.036628414154052734, 0.03653558349609375, 0.03659772872924805, 0.03634444808959961, 0.036388992309570316, 0.03650886535644531, 0.0364920654296875, 0.03636137771606445, 0.03629759979248047, 0.036393985748291016, 0.03663471984863281, 0.03641027069091797, 0.03719193649291992, 0.03657638549804688, 0.036641151428222656, 0.03648489761352539, 0.036434398651123044, 0.036683200836181644, 0.03670483016967773, 0.03632332611083984, 0.036378623962402344, 0.036327423095703124, 0.036351295471191404, 0.03651449584960938, 0.03644211196899414, 0.03670016098022461, 0.037211456298828126, 0.03685548782348633, 0.036580352783203124, 0.036521217346191404, 0.03690367889404297, 0.03952767944335937, 0.03651264190673828, 0.036593536376953124, 0.03637369537353516, 0.03657401657104492, 0.03643801498413086, 0.03664896011352539, 0.03638886260986328, 0.036618240356445314, 0.03687974548339844, 0.03661273574829101, 0.036447742462158206, 0.03641759872436524, 0.03656060791015625, 0.03664316940307617, 0.036561023712158205, 0.036534526824951175, 0.03651583862304687, 0.038037086486816404, 0.03664668655395508, 0.03667622375488281, 0.0363287353515625, 0.03665584182739258, 0.03680198287963867, 0.036704193115234374, 0.03746060943603516, 0.03657033538818359, 0.036467327117919925, 0.036731040954589844, 0.036528129577636716, 0.03688211059570313, 0.036821407318115236, 0.036924446105957034, 0.03662876892089844, 0.03683135986328125, 0.03661257553100586, 0.036660736083984374, 0.036565502166748046, 0.037092609405517576, 0.03654646301269531, 0.036423839569091794, 0.036424705505371094, 0.03656060791015625, 0.03667555236816406, 0.03634380722045898, 0.0363737907409668, 0.036461280822753905, 0.03633475112915039, 0.036442047119140626, 0.03652044677734375, 0.03655311965942383, 0.03645644760131836, 0.03661756896972656, 0.03655337524414062, 0.036556255340576174, 0.036602241516113285, 0.03660524749755859, 0.03664368057250977, 0.03700735855102539, 0.03682304000854492, 0.036603904724121096, 0.03688143920898437, 0.036582366943359375, 0.03669401550292969, 0.03637247848510742, 0.03653200149536133, 0.036835521697998044, 0.03657068634033203, 0.036700607299804684, 0.03652547073364258, 0.03679702377319336, 0.03655478286743164, 0.03659980773925781, 0.036681278228759766, 0.03656345748901367, 0.036499393463134765, 0.036659328460693356, 0.03647475051879883, 0.03668787384033203, 0.03662335968017578, 0.036485153198242186]",tokens/s,27.265868357429394,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4370.98496,6253.576192,0.0,5851.05408,5850.293248,s,1,13.2494208984375,13.2494208984375,0.0,13.2494208984375,13.2494208984375,13.2494208984375,13.2494208984375,[13.2494208984375],,kWh,0.00012064934233333227,1.3301098215800752e-05,3.76044745279986e-05,0.00017155491507713161,,MB,4335.816704,6368.919552,0.0,5951.717376,5923.050496,s,10,2.0851052398681644,0.2085105239868164,0.0011794560568761084,0.20832423400878908,0.20970063323974608,0.2104054542541504,0.21096931106567382,"[0.20656643676757813, 0.20831829833984375, 0.2075572509765625, 0.20835081481933593, 0.20933074951171876, 0.20777023315429688, 0.2111102752685547, 0.2082270050048828, 0.20833016967773438, 0.20954400634765624]",tokens/s,1227.7557751290587,kWh,6.11213611701397e-06,6.739845911788873e-07,4.071502099791671e-06,1.0857622807984528e-05,tokens/kWh,23577905.083582528,MB,4340.355072,6383.599616,0.0,5966.39744,5923.053056,s,10,22.879815673828123,2.287981567382812,0.005160809802555283,2.288186279296875,2.2946955566406246,2.2952701416015624,2.2957298095703127,"[2.287844970703125, 2.286124755859375, 2.275664794921875, 2.29456787109375, 2.286869140625, 2.289166748046875, 2.2958447265625, 2.286433349609375, 2.288771728515625, 2.288527587890625]",tokens/s,27.5351868643176,kWh,6.714134671756912e-05,7.40549894833729e-06,4.435506441920847e-05,0.00011890191008511487,tokens/kWh,529848.5108851659,,s,630,22.87670812988281,0.03631223512679812,0.0005025744039973181,0.03622491264343262,0.036575476455688476,0.03677225685119629,0.037788888282775886,"[0.03734723281860351, 0.036168800354003904, 0.03623833465576172, 0.03631718444824219, 0.03633356857299805, 0.03619430541992188, 0.036364288330078126, 0.03622604751586914, 0.036137248992919924, 0.036281055450439456, 0.036257312774658206, 0.03637404632568359, 0.03624844741821289, 0.03612847900390625, 0.036272480010986326, 0.036275680541992185, 0.03629897689819336, 0.03622739028930664, 0.03609600067138672, 0.036289695739746095, 0.03637699127197266, 0.036176319122314456, 0.03663004684448242, 0.036405120849609375, 0.03617443084716797, 0.03636019134521484, 0.036316287994384765, 0.036721534729003906, 0.036490718841552736, 0.036241950988769533, 0.036163040161132816, 0.03623990249633789, 0.036257793426513675, 0.03644416046142578, 0.036302143096923825, 0.03625823974609375, 0.03664499282836914, 0.036081790924072266, 0.03617587280273438, 0.03627977752685547, 0.036388671875, 0.036297439575195316, 0.03629235076904297, 0.036216449737548825, 0.03668851089477539, 0.03625555038452148, 0.03635539245605469, 0.03625033569335938, 0.036296222686767576, 0.03622348785400391, 0.03641718292236328, 0.03626031875610351, 0.03615881729125976, 0.03630505752563477, 0.03669385528564453, 0.03628214263916016, 0.03632940673828125, 0.03626694488525391, 0.03615059280395508, 0.03617862319946289, 0.03602431869506836, 0.03629056167602539, 0.03610771179199219, 0.036738624572753904, 0.03609449768066406, 0.03611983871459961, 0.036415233612060546, 0.036381118774414065, 0.03629103851318359, 0.036374591827392576, 0.036697696685791016, 0.03668137741088867, 0.03624832153320313, 0.03630192184448242, 0.036233440399169925, 0.03636080169677734, 0.03653833770751953, 0.0362760009765625, 0.036222366333007815, 0.036528446197509765, 0.03626374435424805, 0.03620943832397461, 0.03620662307739258, 0.03658137512207031, 0.0362410888671875, 0.03619974517822266, 0.036127742767333985, 0.03627942276000977, 0.036160545349121095, 0.03617145538330078, 0.03614451217651367, 0.036090335845947265, 0.03644448089599609, 0.03661369705200195, 0.03625360107421875, 0.03619887924194336, 0.036076671600341795, 0.03620463943481445, 0.03627884674072265, 0.03633180618286133, 0.03814739227294922, 0.036517921447753905, 0.03625436782836914, 0.036034561157226565, 0.0362333755493164, 0.03598649597167969, 0.036088096618652345, 0.03611699295043945, 0.03627196884155273, 0.03604086303710938, 0.03624905776977539, 0.036294784545898434, 0.03610383987426758, 0.03602876663208008, 0.03609151840209961, 0.03601609420776367, 0.03621308898925781, 0.036006366729736325, 0.03601408004760742, 0.03609907150268555, 0.035915809631347655, 0.03626697540283203, 0.03674521636962891, 0.03623526382446289, 0.035915775299072264, 0.036364288330078126, 0.036670497894287106, 0.03611542510986328, 0.03589734268188476, 0.03598745727539063, 0.03617715072631836, 0.03661695861816406, 0.03636019134521484, 0.03621887969970703, 0.0361267204284668, 0.03606284713745117, 0.03614879989624024, 0.03604982376098633, 0.03604409790039063, 0.03604729461669922, 0.03597942352294922, 0.036087806701660154, 0.03641251373291016, 0.03601830291748047, 0.0360643196105957, 0.035913440704345705, 0.03610214233398437, 0.03609145736694336, 0.0360247688293457, 0.03599763107299805, 0.036046913146972656, 0.0360447998046875, 0.036007232666015625, 0.03627487945556641, 0.036138206481933596, 0.036200897216796875, 0.03603286361694336, 0.03608310317993164, 0.03617852783203125, 0.036205726623535155, 0.03628732681274414, 0.036166816711425784, 0.03612144088745117, 0.03617555236816406, 0.03614547348022461, 0.03615948867797852, 0.03609731292724609, 0.036059009552001954, 0.03626224136352539, 0.03611427307128906, 0.03607199859619141, 0.036053089141845705, 0.03609292984008789, 0.03607628631591797, 0.03609763336181641, 0.03608966445922852, 0.03646345520019531, 0.036122623443603515, 0.036038654327392575, 0.03601408004760742, 0.036071422576904294, 0.036065055847167966, 0.036108512878417966, 0.03600137710571289, 0.03595625686645508, 0.03600883102416992, 0.03590553665161133, 0.0360731201171875, 0.03602467346191406, 0.03724031829833984, 0.03623267364501953, 0.03625056076049805, 0.03666124725341797, 0.03616972732543945, 0.036155521392822264, 0.03611225509643555, 0.036116481781005856, 0.03611833572387695, 0.03601017761230469, 0.03607529449462891, 0.03611401748657227, 0.0360577278137207, 0.035966976165771485, 0.03656204986572266, 0.03606208038330078, 0.03607545471191406, 0.03604819107055664, 0.03603327941894531, 0.036106239318847655, 0.0360851821899414, 0.0360250244140625, 0.03592998504638672, 0.03599155044555664, 0.036036449432373045, 0.03744784164428711, 0.03679846572875976, 0.03693494415283203, 0.03598409652709961, 0.03616329574584961, 0.03618812942504883, 0.03600812911987305, 0.03632044982910156, 0.03598611068725586, 0.035999614715576174, 0.03615964889526367, 0.03645430374145508, 0.036270496368408206, 0.03619830322265625, 0.03619193649291992, 0.036160865783691404, 0.03614761734008789, 0.03754860687255859, 0.03601408004760742, 0.03616563034057617, 0.03599731063842773, 0.03602048110961914, 0.03628153610229492, 0.03928131103515625, 0.037843231201171876, 0.03637619018554687, 0.03637491226196289, 0.03647078323364258, 0.036904159545898436, 0.03668374252319336, 0.036304767608642576, 0.036748222351074215, 0.03672063827514648, 0.03678003311157227, 0.036245502471923825, 0.03646582412719727, 0.036467456817626955, 0.03888921737670899, 0.03751731109619141, 0.03629897689819336, 0.036759071350097657, 0.03628630447387695, 0.036507457733154294, 0.036371040344238284, 0.03629584121704101, 0.03632828903198242, 0.036541664123535156, 0.036418014526367185, 0.03623968124389648, 0.03659088134765625, 0.036192062377929685, 0.03614812850952148, 0.03610764694213867, 0.03605788803100586, 0.036101184844970706, 0.03614323043823242, 0.03602691268920898, 0.03608176040649414, 0.036081695556640626, 0.036190208435058595, 0.036412704467773435, 0.03639164733886719, 0.03636838531494141, 0.03626393508911133, 0.0363765754699707, 0.036222976684570314, 0.036569087982177735, 0.036259742736816404, 0.03633932876586914, 0.03628019332885742, 0.036106849670410154, 0.03618751907348633, 0.036294849395751956, 0.03628895950317383, 0.03622675323486328, 0.03616950225830078, 0.03631568145751953, 0.036206592559814454, 0.036378623962402344, 0.0361448974609375, 0.03630720138549805, 0.03620419311523437, 0.03657766342163086, 0.03617993545532226, 0.03622851181030273, 0.03616435241699219, 0.03624489593505859, 0.03614761734008789, 0.036192222595214846, 0.03606534576416016, 0.03619635009765625, 0.036246849060058595, 0.03638137435913086, 0.03673865509033203, 0.03630531311035156, 0.03621446228027344, 0.03638508987426758, 0.036239360809326174, 0.03616083145141601, 0.036141761779785155, 0.036163135528564455, 0.03694387054443359, 0.03625027084350586, 0.03605110549926758, 0.03644527816772461, 0.03613126373291015, 0.03636678314208985, 0.03630899047851562, 0.03635200119018555, 0.03619635009765625, 0.03645766448974609, 0.03601081466674805, 0.036222625732421875, 0.03605724716186524, 0.036012222290039066, 0.03630435180664063, 0.03592041778564453, 0.03608313751220703, 0.03608425521850586, 0.036023712158203124, 0.03595328140258789, 0.03618751907348633, 0.036065921783447266, 0.0360447998046875, 0.03609743881225586, 0.0359486083984375, 0.03619689559936524, 0.03624297714233399, 0.03611702346801758, 0.03609183883666992, 0.03621433639526367, 0.036046398162841796, 0.03632761764526367, 0.036027072906494144, 0.03619430541992188, 0.03600998306274414, 0.03621683120727539, 0.03641481781005859, 0.03603708648681641, 0.03619174575805664, 0.03911705780029297, 0.036281024932861325, 0.03620751953125, 0.0368361930847168, 0.03756032180786133, 0.03657523345947265, 0.036257537841796875, 0.03619168090820313, 0.03613126373291015, 0.0363873291015625, 0.0362105598449707, 0.0364031982421875, 0.03633356857299805, 0.036329471588134765, 0.03647078323364258, 0.0364496955871582, 0.03648940658569336, 0.03629817581176758, 0.036434913635253904, 0.03648419189453125, 0.03638364791870117, 0.03736297607421875, 0.03637295913696289, 0.036417472839355466, 0.04354147338867188, 0.03696739196777344, 0.03630483245849609, 0.036394878387451174, 0.03619561767578125, 0.036184993743896485, 0.036083198547363284, 0.0363587532043457, 0.03634985733032227, 0.03626710510253906, 0.036225120544433595, 0.03634259033203125, 0.036362239837646484, 0.03646464157104492, 0.036462593078613284, 0.03632332611083984, 0.03628790283203125, 0.036545120239257815, 0.036222110748291014, 0.03635068893432617, 0.03632550430297852, 0.036364288330078126, 0.036478977203369144, 0.036517887115478515, 0.03636380767822266, 0.037362144470214846, 0.036435966491699216, 0.03626521682739258, 0.036489982604980466, 0.03623446273803711, 0.03637123107910156, 0.03625983810424805, 0.03647238540649414, 0.03619680023193359, 0.03652608108520508, 0.03622707366943359, 0.03631846237182617, 0.036178688049316406, 0.03616947174072266, 0.03643008041381836, 0.03620217514038086, 0.036009376525878906, 0.036606529235839846, 0.03622681427001953, 0.03625225448608398, 0.03614720153808594, 0.036316192626953125, 0.03614316940307617, 0.0361993293762207, 0.036171775817871094, 0.03666124725341797, 0.03614720153808594, 0.03616748809814453, 0.036194110870361326, 0.03621507263183594, 0.03638691329956055, 0.03622470474243164, 0.03603897476196289, 0.036173824310302735, 0.03614720153808594, 0.03616563034057617, 0.03625936126708985, 0.03625814437866211, 0.03733760070800781, 0.03651583862304687, 0.03630624008178711, 0.03703878402709961, 0.03624345779418945, 0.036236640930175784, 0.036201118469238285, 0.03637452697753906, 0.036192256927490236, 0.03629372787475586, 0.03610835266113281, 0.03620131301879883, 0.03645849609375, 0.03629875183105469, 0.0362655029296875, 0.036147678375244144, 0.0361181755065918, 0.0362151985168457, 0.036218814849853516, 0.03624755096435547, 0.036328639984130856, 0.03622966384887695, 0.036161823272705076, 0.03618201446533203, 0.03618611145019531, 0.03630489730834961, 0.03653836822509766, 0.03617315292358399, 0.03615100860595703, 0.035951168060302734, 0.036166240692138675, 0.03623503875732422, 0.036098175048828125, 0.03611145782470703, 0.03607632064819336, 0.03614031982421875, 0.036321537017822265, 0.036349407196044924, 0.03632793426513672, 0.03608793640136719, 0.03611481475830078, 0.036083713531494144, 0.03628396987915039, 0.03601177597045899, 0.03612035369873047, 0.036063743591308595, 0.03605136108398437, 0.03607756805419922, 0.03765584182739258, 0.03711187362670899, 0.03623593521118164, 0.036257278442382815, 0.03595315170288086, 0.03605299377441406, 0.03617145538330078, 0.036118110656738284, 0.03608444976806641, 0.03604019165039062, 0.03632086563110352, 0.03609673690795898, 0.03635833740234375, 0.03627212905883789, 0.03744768142700195, 0.037115615844726564, 0.03654288101196289, 0.03627008056640625, 0.03647475051879883, 0.03624496078491211, 0.03622140884399414, 0.03616553497314453, 0.036609790802001954, 0.03612294387817383, 0.03630908966064453, 0.03634995269775391, 0.03623526382446289, 0.036183807373046876, 0.03624921417236328, 0.036159454345703126, 0.03615100860595703, 0.036063167572021486, 0.03621923065185547, 0.036102207183837894, 0.03611856079101562, 0.036134529113769534, 0.03651228713989258, 0.03624591827392578, 0.0360816650390625, 0.036122623443603515, 0.03616726303100586, 0.03629507064819336, 0.03607251358032226, 0.03635865783691406, 0.03637702560424805, 0.03612057495117187, 0.03641548919677735, 0.036171775817871094, 0.036124671936035156, 0.0360816650390625, 0.03634175872802734, 0.03612422561645508, 0.03622751998901367, 0.0361082878112793, 0.03598652648925781, 0.036060062408447266, 0.03607756805419922, 0.03647078323364258, 0.03624476623535156, 0.03666198348999023, 0.03621683120727539, 0.03670425415039062, 0.03658956909179688, 0.036633758544921874, 0.03717942428588867, 0.03619718551635742, 0.03621478271484375, 0.03633356857299805, 0.03681827163696289, 0.03644672012329102, 0.036536415100097655, 0.036714401245117184, 0.03632553482055664, 0.0364400634765625, 0.03653836822509766, 0.03630284881591797, 0.03630080032348633, 0.03646243286132812, 0.03682304000854492, 0.0363267822265625, 0.036246143341064456, 0.036762752532958985, 0.036364158630371096, 0.036294689178466795, 0.036309886932373046, 0.03632137680053711, 0.036294654846191404, 0.03649657440185547, 0.03639766311645508, 0.036189697265625, 0.03606140899658203, 0.03616204833984375, 0.03619446563720703, 0.03680633544921875, 0.036063392639160155, 0.03640447998046875, 0.03604336166381836, 0.0360893440246582, 0.03633785629272461, 0.03625827026367188, 0.036048801422119144, 0.035950687408447264, 0.03607551956176758, 0.036122177124023436, 0.03640278244018555, 0.036213600158691406, 0.0361794548034668, 0.03630451202392578, 0.03612940979003906, 0.03654265594482422, 0.036200511932373045, 0.03612876892089844, 0.036057086944580076, 0.036269855499267575, 0.036164894104003906, 0.03618297576904297, 0.036218463897705076, 0.03607593536376953, 0.042375102996826175, 0.03625151824951172, 0.03664096069335938, 0.036210494995117186, 0.036466590881347655, 0.03617411041259765, 0.03603635025024414, 0.03612067031860351, 0.03622927856445313, 0.036146495819091795, 0.036112449645996095, 0.03598348617553711, 0.036065792083740236, 0.036046783447265626, 0.03595884704589844, 0.03607961654663086, 0.03605913543701172, 0.03609804916381836, 0.03591372680664062, 0.03595654296875, 0.0360142707824707, 0.03633356857299805, 0.03642572784423828]",tokens/s,27.53892721029471,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4357.18144,6253.576192,0.0,5851.05408,5850.293248,s,1,13.1795390625,13.1795390625,0.0,13.1795390625,13.1795390625,13.1795390625,13.1795390625,[13.1795390625],,kWh,0.00011562083927917874,1.274651058404985e-05,3.827364172999739e-05,0.00016664099159322598,,MB,4171.259904,6368.919552,0.0,5951.717376,5922.919424,s,10,2.047044403076172,0.2047044403076172,0.0007226319883372104,0.20479068756103513,0.20548961944580077,0.20558373794555665,0.20565903274536132,"[0.20296409606933594, 0.20465525817871094, 0.20406918334960938, 0.204718017578125, 0.20500662231445313, 0.2056778564453125, 0.20505174255371095, 0.2048633575439453, 0.20456956481933594, 0.2054687042236328]",tokens/s,1250.583522347141,kWh,5.998542863095292e-06,6.615252707105331e-07,3.9763920700001135e-06,1.0636460203805937e-05,tokens/kWh,24068157.553807057,MB,4189.55264,6383.599616,0.0,5966.39744,5922.921984,s,10,20.286585693359374,2.0286585693359376,0.0059083928641125195,2.0276633911132813,2.0356366943359374,2.0373301269531248,2.038684873046875,"[2.0390235595703126, 2.0277518310546876, 2.0306876220703125, 2.027574951171875, 2.031797119140625, 2.0255284423828126, 2.0273616943359376, 2.0352603759765624, 2.0159462890625, 2.02565380859375]",tokens/s,31.055004007215693,kWh,5.785755772607069e-05,6.381545991050667e-06,3.839964183079963e-05,0.00010263874554792098,tokens/kWh,613803.2929346934,,s,630,20.283636955261237,0.03219624913533529,0.0005807505764321952,0.03208614349365234,0.03251917991638184,0.03281832885742188,0.03482389244079591,"[0.0329543685913086, 0.033123775482177736, 0.03222496032714844, 0.03202956771850586, 0.03216316986083984, 0.03204924774169922, 0.03530937576293945, 0.032948993682861326, 0.03237478256225586, 0.032871551513671875, 0.032100257873535154, 0.032043521881103515, 0.03200867080688476, 0.03206278228759766, 0.03196784019470215, 0.03201228713989258, 0.03229500961303711, 0.0321824951171875, 0.03265481567382812, 0.03218463897705078, 0.032077823638916016, 0.03201638412475586, 0.032157024383544924, 0.032836254119873044, 0.031940607070922854, 0.0320530891418457, 0.031869087219238285, 0.03194675254821777, 0.03195648002624512, 0.032285377502441405, 0.032163646697998045, 0.03220684814453125, 0.03218227386474609, 0.03204095840454101, 0.032061439514160156, 0.032118785858154295, 0.03208099365234375, 0.032287647247314456, 0.032204254150390625, 0.03207600021362305, 0.03238889694213867, 0.03227856063842773, 0.032207359313964845, 0.03232972717285156, 0.032389118194580076, 0.03224307250976562, 0.03235091018676758, 0.03228460693359375, 0.032308769226074216, 0.03264969635009766, 0.03301686477661133, 0.032422271728515625, 0.032188961029052734, 0.0323001594543457, 0.0322938232421875, 0.03208752059936523, 0.03217852783203125, 0.03488985443115234, 0.03248953628540039, 0.03231872177124023, 0.03217484664916992, 0.03249760055541992, 0.0323172492980957, 0.03297014236450195, 0.03231375885009766, 0.032129024505615236, 0.03216185760498047, 0.03216172790527344, 0.032145408630371096, 0.03224371337890625, 0.032355777740478514, 0.03222585678100586, 0.0321638412475586, 0.03255875015258789, 0.032075328826904295, 0.032143489837646484, 0.03198633575439453, 0.03204278564453125, 0.03208000183105469, 0.032637088775634766, 0.0321223030090332, 0.03205785751342773, 0.032048831939697264, 0.032014270782470704, 0.03211711883544922, 0.03209849548339844, 0.03210630416870117, 0.03235212707519531, 0.03254687881469726, 0.03211270523071289, 0.03234611129760742, 0.03230515289306641, 0.03202867126464844, 0.03195516777038574, 0.03194243240356445, 0.03195699119567871, 0.03228057479858398, 0.03212607955932617, 0.032148513793945316, 0.032073406219482424, 0.03199811172485351, 0.03201030349731445, 0.031950527191162106, 0.03217593765258789, 0.03251408004760742, 0.033401248931884765, 0.032323265075683595, 0.031959264755249024, 0.031966623306274415, 0.03199046325683594, 0.032121055603027346, 0.0320278091430664, 0.03186342430114746, 0.03197078323364258, 0.031946880340576175, 0.031883039474487306, 0.031926015853881835, 0.032519039154052735, 0.03216793441772461, 0.03267136001586914, 0.032419551849365236, 0.03225164794921875, 0.03194563293457031, 0.03201228713989258, 0.03223756790161133, 0.03207987213134766, 0.032900672912597656, 0.03219948959350586, 0.03219443130493164, 0.032532001495361326, 0.03231600189208984, 0.0323768310546875, 0.03209203338623047, 0.03207385635375976, 0.03200204849243164, 0.03224166488647461, 0.032202110290527344, 0.033088127136230466, 0.032210945129394535, 0.032124481201171874, 0.0319881591796875, 0.0321209602355957, 0.03221807861328125, 0.03203548812866211, 0.03252044677734375, 0.032500862121582035, 0.03258809661865234, 0.03275628662109375, 0.032524288177490236, 0.032038047790527345, 0.03214332962036133, 0.032471294403076174, 0.03239385604858398, 0.03215359878540039, 0.032042945861816406, 0.03243977737426758, 0.03206409454345703, 0.03197747230529785, 0.03210764694213867, 0.03206028747558594, 0.03239276885986328, 0.03259878540039062, 0.03203859329223633, 0.031922176361083986, 0.03202646255493164, 0.03193215942382813, 0.03278070449829101, 0.034648063659667966, 0.03224931335449219, 0.03231798553466797, 0.031989376068115236, 0.032231807708740234, 0.03188070487976074, 0.03196585655212402, 0.03193020820617676, 0.03228483200073242, 0.031958879470825194, 0.032368640899658206, 0.03199731254577637, 0.03186246490478516, 0.03192108726501465, 0.03187648010253906, 0.03186137580871582, 0.03204710388183594, 0.03176243209838867, 0.032115936279296875, 0.03183491134643555, 0.03189340782165527, 0.03201984024047851, 0.03310233688354492, 0.03221228790283203, 0.03190355110168457, 0.0320863037109375, 0.03207011032104492, 0.0320206413269043, 0.03198566436767578, 0.03198975944519043, 0.03188531112670898, 0.03183983993530273, 0.032622207641601564, 0.03251484680175781, 0.03192831993103027, 0.032249855041503905, 0.03226182556152344, 0.032724449157714844, 0.03277088165283203, 0.03211004638671875, 0.03229542541503906, 0.03211996841430664, 0.03201555252075195, 0.03206870269775391, 0.03197811126708985, 0.03226419067382812, 0.03228457641601563, 0.032186145782470706, 0.03217667388916016, 0.03206857681274414, 0.03236742401123047, 0.03230656051635742, 0.03206953430175781, 0.0323070068359375, 0.03223235321044922, 0.03213283157348633, 0.032125217437744144, 0.03213107299804688, 0.03209830474853516, 0.03223551940917969, 0.032231422424316404, 0.032159744262695314, 0.031940607070922854, 0.0320940170288086, 0.032358081817626956, 0.032045726776123044, 0.03221641540527344, 0.03217379379272461, 0.032287521362304686, 0.0322088623046875, 0.032257919311523435, 0.03258998489379883, 0.03262054443359375, 0.032144798278808596, 0.03207001495361328, 0.03187939262390137, 0.03201798248291016, 0.03200377655029297, 0.0320294075012207, 0.03194236755371094, 0.0321354866027832, 0.03208099365234375, 0.03210947036743164, 0.0319997444152832, 0.03193804740905762, 0.03277414321899414, 0.03233894348144531, 0.032249057769775394, 0.034616382598876956, 0.03228710556030273, 0.03222153472900391, 0.03209971237182617, 0.03215769577026367, 0.03213375854492188, 0.03232931137084961, 0.03203932952880859, 0.032247806549072264, 0.03194879913330078, 0.031887359619140625, 0.031953088760375975, 0.031991615295410156, 0.03199180793762207, 0.031869951248168944, 0.031828479766845705, 0.0318155517578125, 0.032016063690185545, 0.03241904067993164, 0.03231107330322266, 0.03211872100830078, 0.032036865234375, 0.03213846588134766, 0.03200694274902344, 0.03214863967895508, 0.03222998428344727, 0.03212255859375, 0.03202649688720703, 0.03206835174560547, 0.03227846527099609, 0.031979263305664064, 0.03221529769897461, 0.032392257690429686, 0.03206854248046875, 0.03217353439331055, 0.03222582244873047, 0.03536703872680664, 0.03237481689453125, 0.03230044937133789, 0.0322520637512207, 0.03210639953613281, 0.032156192779541015, 0.032194400787353514, 0.03209830474853516, 0.032144798278808596, 0.032242271423339845, 0.03213836669921875, 0.032105342864990234, 0.03219020843505859, 0.032159393310546874, 0.032242271423339845, 0.03223961639404297, 0.032210945129394535, 0.032365665435791016, 0.03214633560180664, 0.032110591888427735, 0.03211382293701172, 0.032109279632568356, 0.03220697784423828, 0.03215331268310547, 0.03295209503173828, 0.03219532775878906, 0.03209817504882812, 0.03212102508544922, 0.032179233551025394, 0.03211500930786133, 0.03208272171020508, 0.03203859329223633, 0.03206054306030273, 0.03223148727416992, 0.03211756896972656, 0.031952320098876955, 0.032080448150634766, 0.03236454391479492, 0.032073150634765624, 0.03217007827758789, 0.032145889282226565, 0.03210444641113281, 0.031989696502685544, 0.03197958374023437, 0.03200614547729492, 0.0324587516784668, 0.03466239929199219, 0.03208358383178711, 0.03189507293701172, 0.03195785522460937, 0.031968511581420896, 0.03283635330200195, 0.03202252960205078, 0.03194870376586914, 0.03177891159057617, 0.03232767868041992, 0.03406422424316406, 0.03196873664855957, 0.032012992858886716, 0.0319871997833252, 0.03218454360961914, 0.03196707153320313, 0.03197337532043457, 0.031930496215820316, 0.032076065063476565, 0.03188124847412109, 0.031847776412963866, 0.03202320098876953, 0.032059391021728514, 0.031968608856201175, 0.03219523239135742, 0.032176128387451174, 0.03205231857299805, 0.03279145431518555, 0.03201190567016601, 0.03192460823059082, 0.03197337532043457, 0.03188105583190918, 0.03186294364929199, 0.031692800521850584, 0.031899648666381834, 0.0317255687713623, 0.03177267265319824, 0.03244777679443359, 0.03191584014892578, 0.03191862487792969, 0.03205363082885742, 0.032814720153808596, 0.03202233505249023, 0.032280384063720705, 0.03209280014038086, 0.03203878402709961, 0.03217843246459961, 0.0340536003112793, 0.03201897430419922, 0.03222313690185547, 0.0321168327331543, 0.03206467056274414, 0.03202134323120117, 0.03200966262817383, 0.03192403221130371, 0.032080638885498045, 0.03206880187988281, 0.03188313674926758, 0.03186988830566406, 0.03218764877319336, 0.03252249526977539, 0.03237519836425781, 0.03228432083129883, 0.03202080154418945, 0.032084095001220704, 0.03213846588134766, 0.032166400909423826, 0.03210684967041016, 0.0322083854675293, 0.03255859375, 0.032078784942626955, 0.03232972717285156, 0.03215871810913086, 0.03204399871826172, 0.032165088653564454, 0.03213555145263672, 0.03207942581176758, 0.0322894401550293, 0.03197769546508789, 0.03201152038574219, 0.03214412689208984, 0.03224092864990234, 0.03215766525268555, 0.032113407135009764, 0.032061439514160156, 0.031907743453979495, 0.03179638481140137, 0.03232659149169922, 0.032020481109619144, 0.03181132888793945, 0.03251225662231445, 0.032008190155029294, 0.03217206573486328, 0.03221705627441406, 0.03202217483520508, 0.03199414443969727, 0.03203481674194336, 0.032188095092773435, 0.03192255973815918, 0.03202873611450195, 0.03293772888183594, 0.032061630249023435, 0.032161792755126956, 0.032521663665771486, 0.03282128143310547, 0.03222166442871094, 0.033956096649169924, 0.03199577522277832, 0.036619808197021486, 0.03528518295288086, 0.03221526336669922, 0.03212908935546875, 0.03197952079772949, 0.031897472381591796, 0.03193459129333496, 0.03204499053955078, 0.032132705688476565, 0.032367073059082034, 0.03175628852844238, 0.03184230422973633, 0.03200115203857422, 0.031961984634399414, 0.03252540969848633, 0.03189648056030273, 0.032292865753173826, 0.03183001518249512, 0.031916032791137694, 0.031880895614624025, 0.0318590087890625, 0.03398585510253906, 0.03297350311279297, 0.03213312149047851, 0.03204006576538086, 0.0320863037109375, 0.031912544250488284, 0.031872575759887695, 0.03193487930297852, 0.032067615509033205, 0.03222732925415039, 0.031883264541625975, 0.03220844650268555, 0.03185696029663086, 0.031952127456665036, 0.031914880752563476, 0.03186483192443847, 0.03180544090270996, 0.03209830474853516, 0.03207331085205078, 0.032335838317871096, 0.032008705139160154, 0.032007167816162106, 0.03256409454345703, 0.03215468978881836, 0.03200307083129883, 0.03222963333129883, 0.03259507369995117, 0.032150142669677736, 0.032039936065673826, 0.03207846450805664, 0.0324381103515625, 0.032257728576660157, 0.032085983276367185, 0.03251222229003906, 0.03220956802368164, 0.03237478256225586, 0.032534111022949216, 0.032135040283203124, 0.03287833786010742, 0.03214080047607422, 0.03209676742553711, 0.03202867126464844, 0.032094207763671875, 0.03219436645507812, 0.032408863067626956, 0.032143711090087894, 0.032168510437011716, 0.032335617065429687, 0.032299263000488285, 0.03219660949707031, 0.03216086578369141, 0.03228969573974609, 0.031952863693237306, 0.03200982284545899, 0.03218476867675781, 0.032077953338623046, 0.03204902267456055, 0.03201228713989258, 0.03204710388183594, 0.032075775146484374, 0.03195289611816406, 0.03184435272216797, 0.03196489524841309, 0.031834400177001954, 0.03205120086669922, 0.03195004844665527, 0.03179913520812988, 0.03190620803833008, 0.031830560684204104, 0.03172147178649903, 0.03180700874328613, 0.0317545280456543, 0.03185603141784668, 0.03181193542480469, 0.03304288101196289, 0.03194035148620605, 0.031844608306884764, 0.03177471923828125, 0.0316866569519043, 0.03178223991394043, 0.03176479911804199, 0.03178531265258789, 0.03192774391174316, 0.0317728328704834, 0.0318304328918457, 0.031692800521850584, 0.03165593528747559, 0.031835712432861325, 0.031793567657470705, 0.03193654441833496, 0.03184230422973633, 0.03197737693786621, 0.032368736267089845, 0.031883264541625975, 0.03184745597839356, 0.03274646377563477, 0.03186278343200684, 0.03180748748779297, 0.031768575668334964, 0.031835775375366214, 0.031701375961303714, 0.032723392486572266, 0.03207171249389648, 0.031908063888549804, 0.03519075012207031, 0.0322782096862793, 0.03201580810546875, 0.03196950340270996, 0.031948575973510744, 0.03178096008300781, 0.03188528060913086, 0.031727872848510745, 0.03176505661010742, 0.03172960090637207, 0.03173382377624512, 0.03168870353698731, 0.031848447799682614, 0.032024574279785153, 0.031831584930419925, 0.03186115264892578, 0.031832128524780276, 0.03199283218383789, 0.031886335372924804, 0.03182387161254883, 0.031971328735351565, 0.0318482551574707, 0.03196332740783692, 0.031922176361083986, 0.03237411117553711, 0.031913856506347656, 0.0320211181640625, 0.03195238494873047, 0.03197200012207031, 0.03184435272216797, 0.032015838623046876, 0.03181936073303222, 0.03198662376403809, 0.03195638465881347, 0.03217878341674805, 0.03214031982421875, 0.03207872009277344, 0.031815776824951174, 0.031935680389404295, 0.03185647964477539, 0.03196601676940918, 0.03194655990600586, 0.031951200485229495, 0.03184227180480957, 0.03243123245239258, 0.0321014404296875, 0.0319150390625, 0.03188204765319824, 0.031929471969604495, 0.03190668869018555, 0.03183206367492676, 0.03186278343200684, 0.031893503189086916, 0.03254035186767578, 0.040744640350341796, 0.03197811126708985, 0.03196502494812012, 0.03194281578063965, 0.03187424087524414, 0.031777727127075196]",tokens/s,31.059518635122714,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4358.832128,6253.576192,0.0,5851.05408,5850.293248,s,1,13.1768115234375,13.1768115234375,0.0,13.1768115234375,13.1768115234375,13.1768115234375,13.1768115234375,[13.1768115234375],,kWh,0.00011562817714583577,1.2747318508031471e-05,3.616725115599545e-05,0.0001645427468098627,,MB,4190.273536,6368.919552,0.0,5951.717376,5923.050496,s,10,2.1189997863769534,0.2118999786376953,0.0007643109222225875,0.21232765197753906,0.2125884521484375,0.21269720306396483,0.2127842037963867,"[0.21048255920410155, 0.21228947448730467, 0.21119007873535156, 0.21154307556152344, 0.21280595397949217, 0.21237107849121092, 0.21248854064941405, 0.21089891052246093, 0.21236582946777344, 0.2125642852783203]",tokens/s,1208.1171581319813,kWh,6.217143819237695e-06,6.856386188236466e-07,4.121238521574419e-06,1.102402095963576e-05,tokens/kWh,23222016.806511804,MB,4207.734784,6383.599616,0.0,5966.39744,5923.053056,s,10,22.6867314453125,2.26867314453125,0.005701179034237439,2.2692408447265624,2.2752718505859373,2.2762424926757814,2.2770190063476563,"[2.257482421875, 2.26962451171875, 2.272218505859375, 2.268497314453125, 2.263626220703125, 2.27505615234375, 2.271662109375, 2.262493896484375, 2.268857177734375, 2.277213134765625]",tokens/s,27.76953575347099,kWh,6.619905834034527e-05,7.301654518083359e-06,4.396388327982525e-05,0.00011746459613825386,tokens/kWh,536331.8146162956,,s,630,22.683655792236326,0.03600580284481957,0.00038825776554579037,0.035913904190063475,0.03630734176635742,0.036610500526428225,0.03790090881347656,"[0.0365030403137207, 0.03561036682128906, 0.03573632049560547, 0.03558297729492187, 0.03562579345703125, 0.03563977432250977, 0.03551203155517578, 0.035555328369140625, 0.03551567840576172, 0.035501087188720706, 0.03556729507446289, 0.03555862426757812, 0.03555817413330078, 0.03573964691162109, 0.035588096618652344, 0.03573548889160156, 0.035796478271484376, 0.035776641845703124, 0.03575033569335938, 0.036257793426513675, 0.035839584350585936, 0.035815841674804685, 0.03573555374145508, 0.03583929443359375, 0.03580179214477539, 0.0356673583984375, 0.035635326385498045, 0.035662303924560546, 0.03561260986328125, 0.03558720016479492, 0.036100929260253906, 0.03612435150146484, 0.035797409057617184, 0.03578812789916992, 0.03571577453613281, 0.035827713012695314, 0.03722649765014648, 0.0358337287902832, 0.036066814422607424, 0.036081310272216796, 0.03575228881835937, 0.03579520034790039, 0.03575836944580078, 0.03573564910888672, 0.03580723190307617, 0.035796993255615236, 0.03635200119018555, 0.036132865905761716, 0.03590329742431641, 0.035696830749511715, 0.0358441276550293, 0.03581875228881836, 0.03630153656005859, 0.03581257629394531, 0.0365676155090332, 0.035959007263183594, 0.03585638427734375, 0.035811294555664064, 0.03580521774291992, 0.03573142242431641, 0.035716480255126956, 0.035590816497802734, 0.03577459335327148, 0.038067134857177734, 0.03589100646972656, 0.03569664001464844, 0.03560243225097656, 0.03562451171875, 0.03571756744384766, 0.03594668960571289, 0.03581481552124023, 0.035936447143554685, 0.03558627319335937, 0.03572531127929687, 0.03590118408203125, 0.03649721527099609, 0.03700592041015625, 0.03933129501342773, 0.036099967956542967, 0.035840511322021484, 0.035952640533447267, 0.03593011093139648, 0.03576627349853516, 0.03591372680664062, 0.03582892990112305, 0.03569952011108399, 0.03577446365356445, 0.03583795166015625, 0.03578051376342774, 0.035853694915771485, 0.03598758316040039, 0.035885665893554686, 0.035860481262207033, 0.036001792907714845, 0.0359444465637207, 0.03576784133911133, 0.03612099075317383, 0.03593743896484375, 0.03654953765869141, 0.03575791931152344, 0.03572342300415039, 0.03602547073364258, 0.03587571334838867, 0.03571507263183594, 0.03678339385986328, 0.035775360107421876, 0.036199935913085936, 0.035858814239501956, 0.035917598724365236, 0.03595219039916992, 0.03572800064086914, 0.035759552001953125, 0.03580096054077148, 0.036254398345947264, 0.035833854675292966, 0.03625574493408203, 0.03586563110351562, 0.035986209869384764, 0.03585043334960938, 0.036450302124023434, 0.03584934234619141, 0.035775360107421876, 0.0357314567565918, 0.036042720794677734, 0.036030494689941406, 0.035843360900878904, 0.03657580947875977, 0.03587891387939453, 0.03580108642578125, 0.03597014236450195, 0.03585673522949219, 0.0359224967956543, 0.03584819030761719, 0.036057086944580076, 0.03591987228393555, 0.03592572784423828, 0.035942432403564456, 0.03600339126586914, 0.03593081665039063, 0.03589529418945313, 0.03651379013061523, 0.03596879959106445, 0.03593814468383789, 0.036114688873291015, 0.03613436889648437, 0.035955360412597656, 0.03642073440551758, 0.036108417510986326, 0.03607014465332031, 0.03620249557495117, 0.03706060791015625, 0.036708351135253905, 0.03608108901977539, 0.03610428619384766, 0.036114910125732425, 0.03624086380004883, 0.03608220672607422, 0.03616153717041016, 0.036073471069335936, 0.036122657775878905, 0.036077537536621095, 0.035982398986816405, 0.03604576110839844, 0.03597052764892578, 0.03606377410888672, 0.03640342330932617, 0.03602431869506836, 0.03585411071777344, 0.035952640533447267, 0.0359007682800293, 0.036032958984375, 0.036063488006591794, 0.035839969635009766, 0.03601606369018555, 0.0364601936340332, 0.03586294555664062, 0.03593795013427734, 0.03599599838256836, 0.03596492767333984, 0.03589865493774414, 0.035821758270263675, 0.035957248687744144, 0.03597030258178711, 0.03582022476196289, 0.03599491119384766, 0.035951423645019534, 0.03633561706542969, 0.036364288330078126, 0.03566592025756836, 0.0365362548828125, 0.0360489616394043, 0.03572531127929687, 0.03648863983154297, 0.03579116821289063, 0.035759552001953125, 0.0358223991394043, 0.03593753433227539, 0.03571174240112305, 0.03576422500610352, 0.03589468765258789, 0.03592252731323242, 0.03600998306274414, 0.035633087158203125, 0.03573331069946289, 0.03576652908325195, 0.03601961517333984, 0.03772003173828125, 0.03625151824951172, 0.03591030502319336, 0.03571516799926758, 0.03582352066040039, 0.03567161560058594, 0.03584012985229492, 0.03599196624755859, 0.035931838989257815, 0.03582748794555664, 0.035746337890625, 0.035870689392089844, 0.03590083312988281, 0.035977310180664065, 0.03639350509643555, 0.03581951904296875, 0.03590963363647461, 0.03597673416137695, 0.03586095809936524, 0.03591775894165039, 0.035814750671386716, 0.03589324951171875, 0.03587510299682617, 0.03592441558837891, 0.035874496459960936, 0.03600729751586914, 0.035963550567626956, 0.035985694885253904, 0.035818592071533206, 0.036057472229003906, 0.03587267303466797, 0.035960929870605465, 0.03589599990844727, 0.03601705551147461, 0.0358798713684082, 0.03628851318359375, 0.035917823791503906, 0.03581302261352539, 0.03577420806884766, 0.03578940963745117, 0.035858558654785155, 0.0384815673828125, 0.036157470703125, 0.035910911560058593, 0.03590377426147461, 0.03683190536499024, 0.03682160186767578, 0.03618016052246094, 0.03600761413574219, 0.03590224075317383, 0.03612409591674805, 0.035883296966552736, 0.03576019287109375, 0.03570284652709961, 0.035716190338134765, 0.03581228637695313, 0.03582550430297852, 0.036132865905761716, 0.035776512145996094, 0.03576627349853516, 0.03594854354858398, 0.03577446365356445, 0.03574595260620117, 0.035905376434326175, 0.03562236785888672, 0.035783073425292966, 0.03570905685424805, 0.036011520385742186, 0.03583599853515625, 0.03571343994140625, 0.03575193786621094, 0.03568435287475586, 0.03582566452026367, 0.03574358367919922, 0.03583715057373047, 0.036102241516113284, 0.03619228744506836, 0.03579987335205078, 0.03579084777832031, 0.03607961654663086, 0.03575936126708985, 0.03582419204711914, 0.03595692825317383, 0.0358520622253418, 0.03715708923339844, 0.0360098876953125, 0.0360428466796875, 0.035848033905029296, 0.03576438522338867, 0.03580316925048828, 0.035764190673828126, 0.03596192169189453, 0.035864959716796874, 0.035778270721435544, 0.03586947250366211, 0.03600185775756836, 0.035866622924804685, 0.035739265441894534, 0.03578508758544922, 0.03587481689453125, 0.03618563079833984, 0.035951072692871094, 0.03621683120727539, 0.03631923294067383, 0.036071422576904294, 0.03591372680664062, 0.03612083053588867, 0.03586841583251953, 0.03586777496337891, 0.03648710250854492, 0.036724735260009765, 0.03912470245361328, 0.0361126708984375, 0.03622092819213867, 0.0363100471496582, 0.03589532852172852, 0.036512577056884765, 0.03609203338623047, 0.03583590316772461, 0.03653340911865234, 0.036125537872314456, 0.03591891098022461, 0.03608867263793945, 0.03603007888793945, 0.03610025787353516, 0.037617984771728515, 0.03792812728881836, 0.03622377777099609, 0.03584617614746094, 0.03595241546630859, 0.03585590362548828, 0.035909503936767576, 0.03598009490966797, 0.03591372680664062, 0.036041759490966795, 0.03588310241699219, 0.035947391510009766, 0.03590326309204102, 0.03588528060913086, 0.03593830490112305, 0.03583180618286133, 0.03609126281738281, 0.03599983978271484, 0.03587535858154297, 0.03600694274902344, 0.03667452621459961, 0.03580531311035156, 0.035848129272460935, 0.03590342330932617, 0.03591987228393555, 0.03585139083862305, 0.036052894592285153, 0.035874942779541015, 0.035958911895751955, 0.03605481719970703, 0.03633660888671875, 0.035770336151123044, 0.03595430374145508, 0.03584969711303711, 0.035873184204101564, 0.03582598495483398, 0.0357910385131836, 0.03589708709716797, 0.03609414291381836, 0.03583366394042969, 0.035813472747802735, 0.035881118774414064, 0.03590150451660156, 0.0358276481628418, 0.03579510498046875, 0.03578617477416992, 0.03582527923583984, 0.036625823974609374, 0.035900001525878904, 0.03586412811279297, 0.03615379333496094, 0.035915775299072264, 0.036617504119873044, 0.036214912414550784, 0.03577622222900391, 0.03577126312255859, 0.03589324951171875, 0.03588502502441406, 0.035932193756103514, 0.03615900802612305, 0.03595280075073242, 0.03601628875732422, 0.035842208862304686, 0.03583932876586914, 0.035923999786376955, 0.035923839569091794, 0.03611929702758789, 0.03685171127319336, 0.03587686538696289, 0.03648102569580078, 0.036106239318847655, 0.03582361602783203, 0.03586867141723633, 0.0371354866027832, 0.03630374526977539, 0.035914974212646486, 0.035805984497070314, 0.03589270401000977, 0.03598102569580078, 0.035894176483154294, 0.0360975341796875, 0.036166240692138675, 0.03594220733642578, 0.035781761169433594, 0.03574367904663086, 0.03576483154296875, 0.03591408157348633, 0.03614310455322266, 0.035833854675292966, 0.03586838531494141, 0.03616592025756836, 0.03608575820922852, 0.03596003341674805, 0.03591203308105469, 0.03662220764160156, 0.03601260757446289, 0.0361912956237793, 0.036109249114990236, 0.035985057830810546, 0.035878753662109374, 0.03593072128295898, 0.036230369567871096, 0.03597286224365234, 0.036090816497802734, 0.03602022552490235, 0.035846145629882815, 0.03620249557495117, 0.03612470245361328, 0.03618608093261719, 0.03630284881591797, 0.03671654510498047, 0.03595468902587891, 0.0359189453125, 0.035920448303222656, 0.03591203308105469, 0.03604684829711914, 0.03597312164306641, 0.03606473541259766, 0.03585078430175781, 0.036091041564941403, 0.03594940948486328, 0.035902881622314455, 0.035870433807373044, 0.03595462417602539, 0.03630704116821289, 0.03583676910400391, 0.03568025588989258, 0.03574380874633789, 0.03668371200561524, 0.03783427047729492, 0.03589353561401367, 0.03601631927490234, 0.03581497573852539, 0.035911647796630856, 0.03587107086181641, 0.035888992309570315, 0.0360491828918457, 0.03575193786621094, 0.03574784088134766, 0.035673343658447265, 0.03579545593261719, 0.035784961700439454, 0.03587686538696289, 0.03610419082641601, 0.0358171501159668, 0.03568867111206055, 0.0356025276184082, 0.03583180618286133, 0.035597694396972655, 0.03572390365600586, 0.03564134216308594, 0.03565273666381836, 0.03580409622192383, 0.035755966186523436, 0.03620614242553711, 0.03575852966308594, 0.03591987228393555, 0.03574169540405273, 0.03595199966430664, 0.0357341423034668, 0.035729408264160156, 0.03583366394042969, 0.03578851318359375, 0.03565401458740235, 0.03618761444091797, 0.03577715301513672, 0.03569830322265625, 0.035704353332519534, 0.035768318176269534, 0.0356987190246582, 0.035763008117675785, 0.0360090560913086, 0.0357672004699707, 0.036517887115478515, 0.0357212142944336, 0.03571615982055664, 0.03580409622192383, 0.03583119964599609, 0.03583036804199219, 0.035805023193359375, 0.035997856140136716, 0.035913345336914065, 0.03584771347045899, 0.035866561889648436, 0.035920799255371096, 0.03603766250610352, 0.036018623352050784, 0.03591984176635742, 0.03581545639038086, 0.03591619110107422, 0.035893375396728516, 0.035882560729980466, 0.03595923233032226, 0.03589324951171875, 0.03592095947265625, 0.03577907180786133, 0.036612545013427734, 0.03625983810424805, 0.03589091110229492, 0.03589263916015625, 0.035899585723876956, 0.03599740982055664, 0.03592086410522461, 0.03581705474853516, 0.03581945419311523, 0.0359628791809082, 0.03587120056152344, 0.0360874252319336, 0.035980735778808594, 0.03596588897705078, 0.035827713012695314, 0.035794910430908204, 0.03581955337524414, 0.03704777526855469, 0.036588062286376954, 0.03609756851196289, 0.036139488220214844, 0.03619839859008789, 0.03588655853271484, 0.0360079345703125, 0.036358688354492186, 0.036083713531494144, 0.03609600067138672, 0.035982494354248044, 0.03585315322875977, 0.03641753768920898, 0.03615439987182617, 0.036053569793701175, 0.03601654434204102, 0.036584640502929686, 0.03587360000610352, 0.03605699157714844, 0.035884670257568356, 0.03594697570800781, 0.03594956970214844, 0.03604137420654297, 0.03695206451416016, 0.03604800033569336, 0.036170623779296876, 0.036351680755615234, 0.038136127471923825, 0.036046783447265626, 0.03603993606567383, 0.03598368072509766, 0.03589596939086914, 0.03678915023803711, 0.03599660873413086, 0.036157440185546875, 0.03592396926879883, 0.035839103698730466, 0.03592691040039062, 0.03586790466308594, 0.035833759307861326, 0.03592892837524414, 0.03596492767333984, 0.03832777786254883, 0.03616150283813477, 0.03607814407348633, 0.0359628791809082, 0.03612057495117187, 0.03589865493774414, 0.035869407653808597, 0.03585433578491211, 0.03600569534301758, 0.0363045768737793, 0.03603488159179687, 0.035788383483886715, 0.03588937759399414, 0.0358260498046875, 0.03590092849731445, 0.03605145645141602, 0.035953662872314454, 0.03588726425170898, 0.036090015411376956, 0.03585708618164062, 0.0358809585571289, 0.03582726287841797, 0.03589945602416992, 0.03594688034057617, 0.03662643051147461, 0.036098175048828125, 0.03612828826904297, 0.03591635131835937, 0.036163360595703124, 0.03660800170898437, 0.03618201446533203, 0.03602022552490235, 0.03604038238525391, 0.036249919891357424, 0.03650559997558594, 0.03628441619873047, 0.03605913543701172, 0.036205982208251955, 0.035985950469970704, 0.03629471969604492, 0.036134975433349606, 0.03605088043212891, 0.03601212692260742, 0.0360283203125]",tokens/s,27.77330099567208,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4347.600896,6253.576192,0.0,5851.05408,5850.293248,s,1,12.9122236328125,12.9122236328125,0.0,12.9122236328125,12.9122236328125,12.9122236328125,12.9122236328125,[12.9122236328125],,kWh,0.0001114394833416649,1.2285066039737044e-05,3.6329473508000046e-05,0.00016005402288940199,,MB,4017.430528,6368.919552,0.0,5951.717376,5922.919424,s,10,2.0477219085693363,0.2047721908569336,0.0007117141350038161,0.20462012481689454,0.20578769073486328,0.2060054328918457,0.20617962661743164,"[0.20464425659179689, 0.2037519073486328, 0.20422239685058594, 0.2042890625, 0.20423744201660157, 0.2045959930419922, 0.20504339599609375, 0.2057393035888672, 0.2049749755859375, 0.20622317504882812]",tokens/s,1250.169756589933,kWh,5.9911622315476214e-06,6.607205225405091e-07,3.988211807346903e-06,1.0640094561435032e-05,tokens/kWh,24059936.546792608,MB,4021.395456,6383.599616,0.0,5966.39744,5922.921984,s,10,20.286995971679687,2.028699597167969,0.007438085453538411,2.0250490112304687,2.037993420410156,2.0397414001464846,2.041139783935547,"[2.0414893798828126, 2.0355823974609377, 2.03760498046875, 2.022117919921875, 2.0250679931640625, 2.034987548828125, 2.025030029296875, 2.0237978515625, 2.020384033203125, 2.020933837890625]",tokens/s,31.05437595982518,kWh,5.9041073959286e-05,6.511410182841952e-06,3.911123933885327e-05,0.00010466372348098122,tokens/kWh,601927.7539982412,,s,630,20.283965847015388,0.0321967711857387,0.0006359865262471596,0.03210964965820312,0.03254350662231446,0.03270558910369873,0.034101577758789074,"[0.03298121643066406, 0.03236985778808594, 0.03252921676635742, 0.032239295959472655, 0.03225632095336914, 0.032464897155761716, 0.03248332977294922, 0.03335168075561523, 0.03354153442382812, 0.03236310577392578, 0.03255295944213867, 0.03258163070678711, 0.03247923278808594, 0.0323768310546875, 0.03376095962524414, 0.032685630798339846, 0.03255708694458008, 0.03257347106933594, 0.03247174453735351, 0.03225190353393555, 0.03234611129760742, 0.03240959930419922, 0.03235635375976562, 0.03243001556396485, 0.032270175933837894, 0.03210876846313476, 0.032266239166259765, 0.03234207916259765, 0.032265758514404295, 0.032110145568847656, 0.03203753662109375, 0.032050495147705076, 0.03195788764953613, 0.031838207244873046, 0.03213123321533203, 0.0321036491394043, 0.03209606552124023, 0.03208806228637695, 0.03210905456542969, 0.032257823944091796, 0.032180767059326175, 0.03215359878540039, 0.03216323089599609, 0.03210915374755859, 0.0321036491394043, 0.03469315338134766, 0.032668193817138674, 0.03228694534301758, 0.032161632537841794, 0.03233603286743164, 0.03222857666015625, 0.031986175537109376, 0.03221891021728516, 0.0325898551940918, 0.03350710296630859, 0.03226284790039063, 0.032145503997802735, 0.03225779342651367, 0.032245025634765626, 0.03210124969482422, 0.03211663818359375, 0.03218236923217774, 0.03205120086669922, 0.0326761589050293, 0.032215038299560544, 0.03209820938110351, 0.03196265602111816, 0.03205791854858398, 0.0321003532409668, 0.03199897575378418, 0.03194777679443359, 0.03206758499145508, 0.03212287902832031, 0.03231462478637695, 0.0321927375793457, 0.03194115257263184, 0.031887359619140625, 0.03201760101318359, 0.032354976654052736, 0.032122112274169924, 0.032100864410400394, 0.03203039932250976, 0.03204716873168945, 0.03207030487060547, 0.0318768310546875, 0.03202896118164063, 0.03225395202636719, 0.03186470413208008, 0.03207126235961914, 0.03537071990966797, 0.03231417465209961, 0.03212905502319336, 0.032355712890625, 0.03227065658569336, 0.032198944091796876, 0.03237887954711914, 0.03202060699462891, 0.03214902496337891, 0.0322599983215332, 0.032221057891845706, 0.032184894561767576, 0.03387542343139648, 0.03258390426635742, 0.032503360748291014, 0.03242467117309571, 0.032651294708251954, 0.03254886245727539, 0.0323155517578125, 0.03198755264282226, 0.032882686614990234, 0.03229705429077148, 0.032505760192871096, 0.03226403045654297, 0.0323785285949707, 0.03222351837158203, 0.032186592102050784, 0.03228803253173828, 0.03214406585693359, 0.032092193603515624, 0.03264102554321289, 0.03247679901123047, 0.032228736877441405, 0.03257241439819336, 0.03239692687988281, 0.0321901741027832, 0.0323263053894043, 0.03315785598754883, 0.032193599700927736, 0.03248633575439453, 0.0323642578125, 0.03264950561523437, 0.0357212142944336, 0.03259360122680664, 0.032505889892578126, 0.032916961669921876, 0.032534912109375, 0.03276844787597656, 0.03281423950195313, 0.03234275054931641, 0.03223078536987305, 0.03234070587158203, 0.032259521484375, 0.032255584716796876, 0.03205222320556641, 0.0321003532409668, 0.03215919876098633, 0.03202102279663086, 0.03223046493530273, 0.032115840911865236, 0.0322083854675293, 0.032419456481933596, 0.03227923202514649, 0.03235561752319336, 0.03248611068725586, 0.03214499282836914, 0.03241616058349609, 0.03205734252929687, 0.032290817260742184, 0.03221699142456055, 0.032353759765625, 0.03203750228881836, 0.03222246551513672, 0.03245747375488281, 0.032048927307128904, 0.03206140899658203, 0.03264332962036133, 0.032099777221679685, 0.03210707092285156, 0.0323421745300293, 0.03225727844238281, 0.03217878341674805, 0.03232271957397461, 0.03217276763916015, 0.032161407470703125, 0.03246131134033203, 0.03241984176635742, 0.03196259117126465, 0.03206198501586914, 0.03207987213134766, 0.03216310501098633, 0.03209404754638672, 0.03185712051391602, 0.031981983184814454, 0.03195699119567871, 0.032126270294189456, 0.0322968635559082, 0.0322215690612793, 0.032172351837158206, 0.032277793884277345, 0.03284086227416992, 0.031978336334228516, 0.03199180793762207, 0.03168412780761719, 0.031968767166137696, 0.03213347244262695, 0.03211328125, 0.032086017608642575, 0.03197743988037109, 0.031895263671875, 0.032356033325195314, 0.03231324768066406, 0.03216032028198242, 0.03225763320922852, 0.03216214370727539, 0.03225158309936523, 0.03214390563964844, 0.0320975341796875, 0.031920896530151364, 0.031938560485839845, 0.03195465660095215, 0.03196751976013184, 0.031798336029052736, 0.031887744903564455, 0.032113216400146485, 0.031899648666381834, 0.031747808456420896, 0.031843839645385744, 0.03188787269592285, 0.03246928024291992, 0.03224576187133789, 0.032290817260742184, 0.03292303848266601, 0.03243465423583984, 0.03240940856933594, 0.032200481414794924, 0.032172576904296875, 0.034370815277099606, 0.03215436935424805, 0.03203811264038086, 0.03194319915771485, 0.03239961624145508, 0.031988895416259766, 0.031787872314453125, 0.031940607070922854, 0.03194879913330078, 0.032221023559570315, 0.032067745208740235, 0.03209609603881836, 0.03211206436157227, 0.03213488006591797, 0.031939584732055666, 0.0320714225769043, 0.031848703384399414, 0.03192543983459473, 0.03188931274414063, 0.03172358322143555, 0.03167523193359375, 0.03176467132568359, 0.03188102340698242, 0.03181158447265625, 0.03186185646057129, 0.031715423583984374, 0.03290556716918945, 0.03220889663696289, 0.03184988784790039, 0.031814239501953126, 0.031850496292114255, 0.0318460807800293, 0.03176479911804199, 0.03172147178649903, 0.03211260986328125, 0.03191577529907227, 0.0319019832611084, 0.03176038360595703, 0.031817728042602536, 0.03185628890991211, 0.031936704635620115, 0.032081409454345705, 0.03200902557373047, 0.032347007751464846, 0.03183318328857422, 0.031743871688842774, 0.031965152740478515, 0.03197750473022461, 0.03243622589111328, 0.032107967376708985, 0.0321910400390625, 0.032201889038085935, 0.03198566436767578, 0.032137088775634766, 0.03255599975585938, 0.032236927032470705, 0.03202656173706055, 0.0319901123046875, 0.032194080352783205, 0.03207455825805664, 0.032185375213623045, 0.0321033935546875, 0.03193446350097656, 0.031866880416870115, 0.03186483192443847, 0.03162224006652832, 0.032105377197265625, 0.033073150634765625, 0.032231422424316404, 0.032236961364746096, 0.032110240936279295, 0.03223852920532227, 0.03217932891845703, 0.03222617721557617, 0.0327712631225586, 0.03262752151489258, 0.032514049530029294, 0.03230825424194336, 0.03218531036376953, 0.03225100708007812, 0.032576385498046874, 0.03235430526733398, 0.03265740966796875, 0.03238092803955078, 0.03213257598876953, 0.032162334442138674, 0.03228847885131836, 0.03217388916015625, 0.0320495376586914, 0.03310966491699219, 0.03206966400146485, 0.03196963119506836, 0.03206550216674805, 0.03197952079772949, 0.03216787338256836, 0.03206051254272461, 0.03205014419555664, 0.032004096984863284, 0.03201375961303711, 0.03224224090576172, 0.03224899291992187, 0.03207049560546875, 0.03211990356445313, 0.032100257873535154, 0.032011550903320314, 0.03217158508300781, 0.03231964874267578, 0.03224121475219727, 0.03218211364746094, 0.03204534530639649, 0.0320546875, 0.032061534881591795, 0.03192911911010742, 0.03214748764038086, 0.03204915237426758, 0.03191776084899902, 0.03205971145629883, 0.03247004699707031, 0.03188425636291504, 0.031696895599365234, 0.0317541446685791, 0.03168681526184082, 0.03191187286376953, 0.03183001518249512, 0.0317255687713623, 0.03180454444885254, 0.031887903213500976, 0.03212636947631836, 0.03205971145629883, 0.03217232131958008, 0.0323106575012207, 0.03216006469726562, 0.03206390380859375, 0.03210675048828125, 0.03201228713989258, 0.032119873046875, 0.0325662727355957, 0.03250710296630859, 0.032436416625976565, 0.032343681335449216, 0.03199068832397461, 0.03209577560424805, 0.03260201644897461, 0.03224198532104492, 0.03197260856628418, 0.03206655883789063, 0.03189673614501953, 0.03193737602233887, 0.03204710388183594, 0.0440437126159668, 0.03234668731689453, 0.0323135986328125, 0.03270649719238281, 0.03222060775756836, 0.03321916961669922, 0.03200368118286133, 0.03185456085205078, 0.0318691520690918, 0.03188528060913086, 0.03169718360900879, 0.03195609664916992, 0.03199270439147949, 0.03258726501464844, 0.03221324920654297, 0.03210671997070313, 0.032083999633789065, 0.032266014099121096, 0.03207331085205078, 0.031879776000976565, 0.031993888854980466, 0.032294273376464844, 0.03221158218383789, 0.03234406280517578, 0.03230624008178711, 0.032301406860351566, 0.03217388916015625, 0.032045406341552736, 0.03181561660766601, 0.031961599349975583, 0.031905792236328126, 0.03187830352783203, 0.03192646408081055, 0.031699712753295896, 0.031522592544555667, 0.03161100769042969, 0.03221404647827148, 0.03217097473144531, 0.031936511993408204, 0.0320365104675293, 0.031918432235717775, 0.033476608276367184, 0.03235635375976562, 0.032192417144775394, 0.032348545074462894, 0.032269344329833985, 0.03230985641479492, 0.03215779113769531, 0.0323768310546875, 0.03241366577148438, 0.03194883155822754, 0.031932416915893554, 0.032013790130615234, 0.03197583961486816, 0.0318599681854248, 0.032266368865966795, 0.03214547348022461, 0.03216659164428711, 0.03242156982421875, 0.03238860702514648, 0.03225683212280273, 0.032198654174804685, 0.03217203140258789, 0.03207920074462891, 0.0319901123046875, 0.032147777557373046, 0.03287859344482422, 0.031864511489868165, 0.032043262481689455, 0.032239070892333986, 0.032701183319091796, 0.03221440124511719, 0.03201276779174805, 0.03211673736572265, 0.03198089599609375, 0.03211945724487305, 0.032107616424560545, 0.03211324691772461, 0.032371009826660156, 0.03231760025024414, 0.03196502494812012, 0.03202803039550781, 0.03216857528686524, 0.03236598587036133, 0.032422496795654294, 0.03201638412475586, 0.03196537590026855, 0.03178886413574219, 0.031692800521850584, 0.031922176361083986, 0.03601606369018555, 0.03196115112304688, 0.03207689666748047, 0.03178774452209473, 0.031733951568603515, 0.03183958435058594, 0.03171395111083984, 0.031669376373291015, 0.031963071823120116, 0.03209024047851562, 0.03251424026489258, 0.03218694305419922, 0.03200953674316406, 0.03191475105285645, 0.03187507247924805, 0.031868640899658206, 0.03248716735839844, 0.031797792434692385, 0.03170416069030762, 0.03201731109619141, 0.032025825500488284, 0.03187318420410156, 0.03169516754150391, 0.0319245433807373, 0.03249139022827149, 0.03210383987426758, 0.03217216110229492, 0.032193119049072266, 0.03209363174438477, 0.032336158752441405, 0.03208220672607422, 0.03189760017395019, 0.03206758499145508, 0.03204643249511719, 0.031896223068237306, 0.03184236717224121, 0.031918176651000975, 0.03221926498413086, 0.03197228813171387, 0.03336982345581055, 0.03347027206420899, 0.032062015533447265, 0.031824928283691406, 0.03202751922607422, 0.032290241241455075, 0.0321910400390625, 0.03185379219055176, 0.03197007942199707, 0.031834112167358396, 0.031920032501220705, 0.03202054214477539, 0.032667678833007814, 0.03251609420776367, 0.03241286468505859, 0.032535327911376956, 0.03267523193359375, 0.03234431838989258, 0.032554527282714844, 0.032199520111083985, 0.03213516616821289, 0.03186892890930176, 0.03194793510437012, 0.031816127777099606, 0.031768224716186524, 0.031809951782226564, 0.031760736465454104, 0.03249158477783203, 0.03205523300170898, 0.03200921630859375, 0.031857503890991214, 0.031658143997192384, 0.031755775451660154, 0.03193068885803223, 0.031973567962646485, 0.03184780883789062, 0.03187366485595703, 0.031848447799682614, 0.03199590492248535, 0.03194428825378418, 0.03179334449768066, 0.03170432090759277, 0.03166499137878418, 0.031737503051757814, 0.03161692810058594, 0.031852672576904294, 0.032542911529541016, 0.031709440231323244, 0.03161248016357422, 0.031774400711059574, 0.032169822692871095, 0.0325846061706543, 0.03215331268310547, 0.031975519180297854, 0.03193798446655274, 0.03203881454467773, 0.0319640007019043, 0.031931936264038084, 0.03217660903930664, 0.032086017608642575, 0.03196441650390625, 0.03209446334838867, 0.0319237117767334, 0.03282076644897461, 0.03192495918273926, 0.03191372871398926, 0.03216588973999023, 0.03196463966369629, 0.03419395065307617, 0.0318275203704834, 0.03183865547180176, 0.031944351196289064, 0.03191606330871582, 0.03160204887390137, 0.03166316795349121, 0.03201830291748047, 0.03245008087158203, 0.03222905731201172, 0.032178016662597654, 0.03248838424682617, 0.03190784072875977, 0.03237273788452148, 0.032020481109619144, 0.0329554557800293, 0.03258988952636719, 0.03199622344970703, 0.03186131286621094, 0.031559680938720705, 0.031817728042602536, 0.03182796859741211, 0.03176243209838867, 0.03263199996948242, 0.03196806335449219, 0.03166742324829101, 0.031640352249145506, 0.032118785858154295, 0.03182796859741211, 0.031866880416870115, 0.03183206367492676, 0.03165913581848145, 0.03165478324890137, 0.03199180793762207, 0.031997247695922854, 0.032062145233154295, 0.03191375923156738, 0.03184796714782715, 0.032021183013916016, 0.03231740951538086, 0.03218403244018555, 0.03191839981079102, 0.03180940818786621, 0.03169907188415527, 0.03181711959838867, 0.031912544250488284, 0.0319180793762207, 0.03230028915405273, 0.03210316848754883, 0.03186073684692383, 0.03176652717590332, 0.032124160766601566, 0.03241651153564453, 0.03224492645263672, 0.032244544982910156, 0.032287967681884765, 0.032704479217529295, 0.03247596740722656]",tokens/s,31.059015024554448,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4356.66944,6253.576192,0.0,5851.05408,5850.293248,s,1,13.111685546875,13.111685546875,0.0,13.111685546875,13.111685546875,13.111685546875,13.111685546875,[13.111685546875],,kWh,0.00011269785566666427,1.2424118045375226e-05,3.743641883800081e-05,0.0001625583925500403,,MB,4117.561344,6368.919552,0.0,5951.717376,5922.919424,s,10,2.0137477874755856,0.2013747787475586,0.0008798425285559686,0.20126168060302735,0.20233099212646485,0.2026062614440918,0.20282647689819336,"[0.2000020446777344, 0.2008468780517578, 0.200891845703125, 0.20163151550292968, 0.20052703857421875, 0.2020814971923828, 0.20060762023925782, 0.20288153076171875, 0.2022698211669922, 0.20200799560546875]",tokens/s,1271.2614836484515,kWh,5.8927233703333475e-06,6.498529488023014e-07,3.9083420155600065e-06,1.0450918334695656e-05,tokens/kWh,24495455.0214132,MB,4121.698304,6383.599616,0.0,5966.39744,5922.921984,s,10,20.473501953124998,2.0473501953124997,0.006410835938575396,2.0461842041015625,2.0533028076171873,2.0579009155273438,2.0615794018554685,"[2.0486923828125, 2.049921630859375, 2.0624990234375, 2.043676025390625, 2.0435858154296875, 2.052281005859375, 2.048716552734375, 2.041701416015625, 2.0387620849609376, 2.043666015625]",tokens/s,30.771482154953915,kWh,5.950253166549985e-05,6.562723034723367e-06,3.952885940083986e-05,0.00010559411410106309,tokens/kWh,596624.163537215,,s,630,20.47035171508789,0.03249262176998079,0.0004230996839948017,0.03239868927001953,0.03273987426757813,0.03309699401855468,0.034611082649230976,"[0.03330047988891602, 0.03262847900390625, 0.032382335662841794, 0.03250374221801758, 0.03262112045288086, 0.032325153350830076, 0.03246969604492188, 0.032333984375, 0.03233996963500976, 0.03234377670288086, 0.03239145660400391, 0.03238089752197266, 0.03239324951171875, 0.03252633666992188, 0.03246694564819336, 0.032392894744873044, 0.03245497512817383, 0.032247806549072264, 0.032269344329833985, 0.032521183013916016, 0.032350208282470705, 0.03226931381225586, 0.03232460784912109, 0.03226419067382812, 0.03253247833251953, 0.03241718292236328, 0.03235491180419922, 0.0323109130859375, 0.032346622467041015, 0.03243836975097656, 0.03239712142944336, 0.03241980743408203, 0.035866622924804685, 0.03233996963500976, 0.03237420654296875, 0.032395294189453125, 0.03273907089233399, 0.03249158477783203, 0.03247382354736328, 0.032411361694335936, 0.0325819206237793, 0.03240959930419922, 0.03250380706787109, 0.03236851119995117, 0.03249164962768555, 0.032497055053710935, 0.032473793029785154, 0.03248118209838867, 0.032317440032958986, 0.03239299011230469, 0.03308975982666015, 0.03237887954711914, 0.03239321517944336, 0.032470111846923826, 0.0325313606262207, 0.03268998336791992, 0.032344257354736325, 0.03248521423339844, 0.03242614364624023, 0.032522239685058595, 0.032702369689941405, 0.0325013427734375, 0.03251865768432617, 0.0329601936340332, 0.03257535934448242, 0.03236262512207031, 0.032600223541259764, 0.03247465515136719, 0.03236140823364258, 0.03245260620117187, 0.03243212890625, 0.03256681442260742, 0.0325715217590332, 0.03214780807495117, 0.03234406280517578, 0.032323135375976565, 0.03246716690063477, 0.03244076919555664, 0.032588897705078126, 0.0324918098449707, 0.03260662460327148, 0.03245260620117187, 0.032643070220947264, 0.032440319061279296, 0.03250790405273438, 0.03232563018798828, 0.03235430526733398, 0.032505630493164066, 0.032212799072265624, 0.03235881423950195, 0.03246284866333008, 0.033193790435791015, 0.03251628875732422, 0.03232767868041992, 0.032384544372558596, 0.032428096771240235, 0.03229123306274414, 0.03272294235229492, 0.03239718246459961, 0.03239948654174805, 0.03243193435668945, 0.03238931274414063, 0.03253452682495117, 0.03266668701171875, 0.03253753662109375, 0.032599296569824215, 0.03243395233154297, 0.0325579833984375, 0.032284767150878906, 0.032591678619384765, 0.03344604873657227, 0.035081920623779295, 0.032423328399658204, 0.03237760162353515, 0.03238694381713867, 0.032645694732666014, 0.03260531234741211, 0.03229756927490234, 0.03236022567749024, 0.03244464111328125, 0.032479137420654294, 0.032540767669677735, 0.03241984176635742, 0.032337505340576174, 0.032327903747558596, 0.03269241714477539, 0.032927230834960936, 0.0323507194519043, 0.03257548904418945, 0.03245673751831055, 0.032523681640625, 0.03241427230834961, 0.03239116668701172, 0.03237478256225586, 0.03245011138916016, 0.03272249603271484, 0.03244847869873047, 0.03240761566162109, 0.032678176879882816, 0.0324815673828125, 0.032546817779541014, 0.03257987213134766, 0.03253247833251953, 0.03239487838745117, 0.03233321762084961, 0.032512992858886716, 0.03262054443359375, 0.032917503356933595, 0.032389118194580076, 0.0324136962890625, 0.03246080017089844, 0.032849246978759766, 0.03253519821166992, 0.03354828643798828, 0.032271678924560544, 0.032414398193359374, 0.03260015869140625, 0.032950176239013675, 0.03475491333007812, 0.03401801681518555, 0.03413087844848633, 0.0329169921875, 0.03262857437133789, 0.03238358306884766, 0.032537952423095706, 0.032512641906738284, 0.03269222259521484, 0.03242927932739258, 0.03237267303466797, 0.03244537734985352, 0.03385343933105469, 0.03377056121826172, 0.032432926177978515, 0.03341123199462891, 0.0325591049194336, 0.03254886245727539, 0.03370345687866211, 0.032405887603759766, 0.03210044860839844, 0.03310291290283203, 0.035089630126953125, 0.0324188461303711, 0.03239801788330078, 0.03234553527832031, 0.03238355255126953, 0.03226419067382812, 0.03237478256225586, 0.032761856079101564, 0.032364448547363284, 0.03305535888671875, 0.032758975982666014, 0.03233612823486328, 0.032145984649658205, 0.032161792755126956, 0.03257753753662109, 0.03344319915771484, 0.032469631195068356, 0.03213033676147461, 0.032311614990234376, 0.032241535186767575, 0.03231148910522461, 0.032583393096923825, 0.03241024017333984, 0.032428001403808596, 0.03232566452026367, 0.032380702972412106, 0.0323458251953125, 0.032321025848388675, 0.03234220886230469, 0.0323072624206543, 0.03237350463867188, 0.032290817260742184, 0.03208806228637695, 0.03236044692993164, 0.03226806259155274, 0.032079360961914063, 0.032382720947265624, 0.033571487426757814, 0.03238284683227539, 0.03234041595458984, 0.03239084625244141, 0.03264748764038086, 0.032166015625, 0.0325645751953125, 0.032143230438232424, 0.03227900695800781, 0.032729278564453124, 0.03232972717285156, 0.0326901741027832, 0.03231948852539063, 0.03222739028930664, 0.032366497039794925, 0.03259363174438477, 0.03261062240600586, 0.03238217544555664, 0.032525089263916014, 0.03264495849609375, 0.032545150756835936, 0.03255068969726563, 0.03226963043212891, 0.03236118316650391, 0.03262665557861328, 0.032421886444091795, 0.032444576263427734, 0.03237209701538086, 0.03236812973022461, 0.03240975952148437, 0.032250686645507814, 0.03251763153076172, 0.032376319885253906, 0.03237734222412109, 0.032347713470458984, 0.033201984405517575, 0.03258179092407226, 0.03238694381713867, 0.032894462585449216, 0.03224643325805664, 0.032941566467285154, 0.03248588943481445, 0.03226828765869141, 0.03248659133911133, 0.03228140640258789, 0.03226419067382812, 0.03256729507446289, 0.032373825073242185, 0.03226419067382812, 0.03265798568725586, 0.03232396697998047, 0.0324048957824707, 0.032395870208740234, 0.03237887954711914, 0.032307201385498044, 0.0324956169128418, 0.03256934356689453, 0.03257548904418945, 0.03241331100463867, 0.03254105758666992, 0.03246406555175781, 0.032448734283447266, 0.03239177703857422, 0.03234969711303711, 0.03297894287109375, 0.03238102340698242, 0.03252371215820313, 0.032480224609375, 0.03219839859008789, 0.03223318481445313, 0.0323807373046875, 0.03264710235595703, 0.03259267044067383, 0.03261644744873047, 0.03261779022216797, 0.03224374389648438, 0.03234678268432617, 0.032462718963623044, 0.03215315246582031, 0.03229539108276367, 0.03224380874633789, 0.03228992080688477, 0.03229171371459961, 0.03229695892333984, 0.03243420791625977, 0.032327262878417966, 0.032564769744873046, 0.03212534332275391, 0.03227423858642578, 0.03251673507690429, 0.0323438720703125, 0.03237292861938477, 0.03223551940917969, 0.03227443313598633, 0.03229520034790039, 0.03232329559326172, 0.03243212890625, 0.032487422943115234, 0.033266815185546875, 0.0324719352722168, 0.032245376586914065, 0.032477119445800784, 0.032395713806152346, 0.03234159851074219, 0.032426433563232424, 0.032409088134765625, 0.03236415863037109, 0.03244323348999024, 0.03250569534301758, 0.03254246520996094, 0.03250831985473633, 0.032464897155761716, 0.03240361785888672, 0.03241353607177734, 0.03250742340087891, 0.03281353759765625, 0.03253071975708008, 0.032558815002441406, 0.03235372924804687, 0.03237126541137696, 0.032785598754882815, 0.032598686218261716, 0.032618175506591796, 0.032647647857666016, 0.03265276718139649, 0.032438270568847655, 0.03269836807250977, 0.03270915222167969, 0.032589824676513675, 0.03269446563720703, 0.03268179321289062, 0.032537792205810545, 0.03270479965209961, 0.032913951873779296, 0.03260755157470703, 0.032639678955078126, 0.03258777618408203, 0.03253247833251953, 0.032548736572265625, 0.03278041458129883, 0.03263907241821289, 0.032628734588623046, 0.03258911895751953, 0.03275641632080078, 0.03242588806152344, 0.03254886245727539, 0.03257676696777344, 0.03253529739379883, 0.03296255874633789, 0.03246899032592773, 0.032380222320556644, 0.03244246292114258, 0.032562942504882814, 0.03275862503051758, 0.03268771362304688, 0.03244620895385742, 0.032422462463378904, 0.032812801361083985, 0.03270281600952148, 0.03243337631225586, 0.03239606475830078, 0.032835678100585936, 0.032247264862060546, 0.0322808952331543, 0.03256185531616211, 0.03238479995727539, 0.03237705612182617, 0.032324928283691406, 0.03226867294311524, 0.03267820739746094, 0.0356577262878418, 0.03266073608398438, 0.03273379135131836, 0.03268534469604492, 0.03249411010742188, 0.032727294921875, 0.03274710464477539, 0.03249203109741211, 0.03248758316040039, 0.032502975463867184, 0.03226012802124024, 0.03254288101196289, 0.032448734283447266, 0.03231155014038086, 0.032353279113769534, 0.03226700973510742, 0.032340225219726564, 0.03251324844360352, 0.03237968063354492, 0.03233350372314453, 0.032327999114990236, 0.032489215850830075, 0.03233612823486328, 0.03219625473022461, 0.03239302444458008, 0.03231798553466797, 0.032466911315917966, 0.03233590316772461, 0.03233087921142578, 0.03252108764648438, 0.032311264038085936, 0.03263059234619141, 0.03220297622680664, 0.03235635375976562, 0.03262259292602539, 0.032399360656738284, 0.032524288177490236, 0.03233587265014649, 0.032529502868652346, 0.0324310073852539, 0.03259392166137695, 0.03241779327392578, 0.0324956169128418, 0.032608318328857425, 0.03250579071044922, 0.03227033615112305, 0.032755615234375, 0.03231343841552734, 0.03246080017089844, 0.032630527496337894, 0.03236598587036133, 0.032586273193359376, 0.032394718170166016, 0.03305353546142578, 0.03321420669555664, 0.032280128479003904, 0.032594593048095706, 0.03227027130126953, 0.032333919525146484, 0.032323585510253904, 0.03339878463745117, 0.032473087310791016, 0.03425894546508789, 0.033230846405029296, 0.03247513580322266, 0.03237273788452148, 0.03398860931396484, 0.03214915084838867, 0.032465248107910155, 0.033955841064453124, 0.0326935043334961, 0.032325759887695316, 0.03225065612792969, 0.03235561752319336, 0.03230064010620117, 0.0329205436706543, 0.03227603149414063, 0.032350654602050784, 0.03231862258911133, 0.032199520111083985, 0.03219657516479492, 0.03213641738891602, 0.032311744689941406, 0.032473472595214846, 0.03211468887329102, 0.03229695892333984, 0.03221686553955078, 0.032215072631835935, 0.03234614562988281, 0.0322674560546875, 0.03206646347045899, 0.03227654266357422, 0.03221491241455078, 0.03216191864013672, 0.032486495971679685, 0.03227040100097656, 0.03203567886352539, 0.03228851318359375, 0.03209996795654297, 0.03259449768066406, 0.032618431091308596, 0.03200140762329102, 0.03210316848754883, 0.03208348846435547, 0.03232201766967773, 0.03215081787109375, 0.03209299087524414, 0.03210435104370117, 0.032054527282714844, 0.03226675033569336, 0.032119041442871095, 0.03212252807617187, 0.032282974243164064, 0.0320634880065918, 0.03208524703979492, 0.03206371307373047, 0.03201267242431641, 0.0331629753112793, 0.03256095886230469, 0.03227081680297852, 0.03249151992797852, 0.0324771842956543, 0.03218764877319336, 0.03220352172851562, 0.032263744354248045, 0.032159393310546874, 0.032113441467285155, 0.03220822525024414, 0.032303775787353516, 0.032292865753173826, 0.0321839370727539, 0.03229324722290039, 0.032827262878417966, 0.03243824005126953, 0.03214083099365234, 0.03255990219116211, 0.03234595108032227, 0.03218636703491211, 0.03222310256958008, 0.03230303955078125, 0.03221113586425781, 0.03243622589111328, 0.03382476806640625, 0.03227961730957031, 0.032336353302001956, 0.032438751220703124, 0.03227983856201172, 0.03266572952270508, 0.03237039947509766, 0.03232838439941406, 0.03299142456054688, 0.03241519927978516, 0.032565185546875, 0.03269692611694336, 0.03221299362182617, 0.03251609420776367, 0.032266239166259765, 0.03217407989501953, 0.03229462432861328, 0.032237377166748044, 0.03239724731445313, 0.032204864501953125, 0.0322196159362793, 0.03226841735839844, 0.03210636901855469, 0.03221299362182617, 0.03231532669067383, 0.032294975280761716, 0.032392353057861326, 0.032183231353759764, 0.03222224044799805, 0.032102657318115235, 0.032330463409423825, 0.03211868667602539, 0.0321814079284668, 0.03223212814331055, 0.032116897583007814, 0.03227391815185547, 0.03222566223144531, 0.03231142425537109, 0.032917503356933595, 0.03223961639404297, 0.032176128387451174, 0.034088958740234376, 0.03489177703857422, 0.03225932693481445, 0.03221321487426758, 0.032162464141845704, 0.03227769470214844, 0.03235295867919922, 0.03221004867553711, 0.03245929718017578, 0.03239324951171875, 0.03244204711914062, 0.03246931076049805, 0.032401153564453125, 0.03235897445678711, 0.03215462493896484, 0.03262355041503906, 0.0322371826171875, 0.032384830474853514, 0.032263904571533206, 0.0321646728515625, 0.03215679931640625, 0.03217852783203125, 0.032512161254882814, 0.03357948684692383, 0.03545510482788086, 0.03285964965820312, 0.03226252746582031, 0.032266239166259765, 0.03230838394165039, 0.032027488708496095, 0.03235337448120117, 0.032162464141845704, 0.032293121337890626, 0.032143360137939454, 0.032102401733398435, 0.03236982345581055, 0.031986015319824215, 0.032084480285644534, 0.03208508682250977, 0.03234905624389649, 0.03221263885498047, 0.03226819229125977, 0.0321297607421875, 0.032007648468017576, 0.03203305435180664, 0.032237247467041014, 0.03211705780029297, 0.03237478256225586, 0.0321063346862793, 0.032034976959228516, 0.03267724609375, 0.03217062377929687, 0.03296412658691406, 0.03215151977539062, 0.032602527618408206, 0.03223971176147461, 0.0322949104309082, 0.03252617645263672, 0.032309310913085934, 0.032222911834716796]",tokens/s,30.77621766193942,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4365.078528,6253.576192,0.0,5851.05408,5850.293248,s,1,12.8986787109375,12.8986787109375,0.0,12.8986787109375,12.8986787109375,12.8986787109375,12.8986787109375,[12.8986787109375],,kWh,0.00011790121930416906,1.299762745122736e-05,3.700752960600004e-05,0.00016790637636139645,,MB,4181.327872,6368.919552,0.0,5951.717376,5923.050496,s,10,2.1172724914550782,0.21172724914550783,0.0006729191699409909,0.21170772552490236,0.21253331909179687,0.21269190673828126,0.21281877685546877,"[0.21043116760253905, 0.2109115447998047, 0.21155130004882813, 0.2120173797607422, 0.21249807739257812, 0.21150015258789062, 0.21160044860839844, 0.21181500244140625, 0.212096923828125, 0.21285049438476564]",tokens/s,1209.1027538173232,kWh,6.210755198492844e-06,6.849310328937576e-07,4.108679409872378e-06,1.1004365641258978e-05,tokens/kWh,23263494.538946614,MB,4196.622336,6383.599616,0.0,5966.39744,5923.053056,s,10,21.949365234374994,2.1949365234375002,0.01020899787348749,2.1905736083984375,2.210012353515625,2.2122805908203125,2.2140951806640623,"[2.204841796875, 2.187163330078125, 2.189942626953125, 2.18753857421875, 2.19008154296875, 2.192485107421875, 2.191065673828125, 2.182189453125, 2.214548828125, 2.20950830078125]",tokens/s,28.702424570044244,kWh,6.404588771192427e-05,7.063696530710777e-06,4.261535796932743e-05,0.00011372494221196247,tokens/kWh,553968.1865265715,,s,630,21.946715660095197,0.03483605660332574,0.0007071402531819514,0.03467228889465332,0.035227783966064456,0.035701141929626465,0.03774696048736573,"[0.03587456130981445, 0.03493094253540039, 0.03491635131835937, 0.034761985778808596, 0.0349766731262207, 0.03572652816772461, 0.03485353469848633, 0.03475046539306641, 0.03463148880004883, 0.03477027130126953, 0.034785663604736325, 0.03500694274902344, 0.034715648651123046, 0.035784702301025394, 0.035059711456298825, 0.03484262466430664, 0.03531305694580078, 0.03482275390625, 0.03460095977783203, 0.03478124618530273, 0.03523097610473633, 0.035068607330322264, 0.0347852783203125, 0.03463987350463867, 0.03467673492431641, 0.03467209625244141, 0.03472848129272461, 0.03493273544311523, 0.03888127899169922, 0.035547134399414065, 0.034780353546142576, 0.034681663513183594, 0.03461324691772461, 0.03492800140380859, 0.03473468780517578, 0.03475254440307617, 0.03487654495239258, 0.03481414413452148, 0.03484048080444336, 0.03479811096191406, 0.03493417739868164, 0.034875553131103514, 0.034790081024169923, 0.034976993560791016, 0.03488643264770508, 0.035153408050537106, 0.03507455825805664, 0.034995742797851566, 0.0346602554321289, 0.034904640197753904, 0.03468492889404297, 0.0346847038269043, 0.03467695999145508, 0.03462963104248047, 0.03502899169921875, 0.034668479919433594, 0.034578495025634766, 0.03487539291381836, 0.034661598205566406, 0.03480166244506836, 0.035238689422607425, 0.036636863708496094, 0.0351926383972168, 0.03569385528564453, 0.034820831298828125, 0.03468864059448242, 0.03466668701171875, 0.0352289924621582, 0.03507699203491211, 0.034793407440185546, 0.034616382598876956, 0.03560755157470703, 0.03495935821533203, 0.03464944076538086, 0.03460275268554688, 0.0345076789855957, 0.0347852783203125, 0.035062942504882816, 0.034609470367431644, 0.034783584594726566, 0.03455366516113281, 0.03458457565307617, 0.03448787307739258, 0.034913089752197264, 0.034813953399658204, 0.03461529541015625, 0.03458374404907227, 0.03466940689086914, 0.03598947143554688, 0.035135326385498045, 0.034770942687988284, 0.0346618881225586, 0.03470608139038086, 0.034737632751464846, 0.03463631820678711, 0.034640926361083985, 0.035107807159423826, 0.03501055908203125, 0.034729984283447264, 0.03450806427001953, 0.03444400024414063, 0.03464521789550781, 0.034330944061279296, 0.03442940902709961, 0.034419742584228516, 0.034332832336425784, 0.034648895263671875, 0.034393470764160154, 0.034746273040771485, 0.03472819137573242, 0.034458080291748044, 0.03446783828735352, 0.034406494140625, 0.034508766174316405, 0.0345968017578125, 0.034491935729980466, 0.03463625717163086, 0.03446745681762695, 0.034527198791503906, 0.034701057434082035, 0.03481235122680664, 0.03457865524291992, 0.03457344055175781, 0.03455680084228516, 0.034462718963623046, 0.034544639587402344, 0.035778785705566404, 0.035972129821777346, 0.03642166519165039, 0.035154624938964846, 0.03478144073486328, 0.03531161499023437, 0.03476028823852539, 0.0347171516418457, 0.03446176147460937, 0.03466944122314453, 0.034881633758544923, 0.03538467025756836, 0.03467116928100586, 0.03472793579101562, 0.03461939239501953, 0.034645729064941407, 0.034624927520751955, 0.03466534423828125, 0.03470284652709961, 0.034643550872802735, 0.03482921600341797, 0.03452463912963867, 0.03447657775878906, 0.03458649444580078, 0.03476895904541016, 0.03477695846557617, 0.0344837760925293, 0.034544254302978514, 0.034551807403564457, 0.03460710525512695, 0.03514572906494141, 0.034746368408203124, 0.03481804656982422, 0.034532798767089846, 0.03477305603027344, 0.034447872161865234, 0.03427679824829102, 0.034495040893554686, 0.034354209899902344, 0.03451145553588867, 0.034679168701171874, 0.03453747177124023, 0.03455599975585937, 0.03476233673095703, 0.03437331390380859, 0.034992031097412106, 0.03511574554443359, 0.03491635131835937, 0.03456396865844726, 0.0344508171081543, 0.03608448028564453, 0.03462758255004883, 0.034592769622802735, 0.03462863922119141, 0.035015457153320315, 0.03465343856811524, 0.034546207427978516, 0.03439215850830078, 0.03476108932495117, 0.03441206359863281, 0.03435152053833008, 0.03451903915405274, 0.03429785537719727, 0.03553817749023438, 0.034640705108642575, 0.0345456657409668, 0.03462508773803711, 0.03458895874023438, 0.03460879898071289, 0.0344925765991211, 0.034447521209716794, 0.03434454345703125, 0.03440496063232422, 0.03437158584594727, 0.03440553665161133, 0.03453811264038086, 0.03446540832519531, 0.03442684936523437, 0.034742721557617186, 0.03464166259765625, 0.03447587203979492, 0.03465891265869141, 0.03448831939697266, 0.034713600158691404, 0.034471168518066406, 0.034501087188720705, 0.03570710372924805, 0.03523590469360351, 0.03454556655883789, 0.03444745635986328, 0.03472297668457031, 0.03453424072265625, 0.03462963104248047, 0.034643646240234374, 0.034516864776611325, 0.03458911895751953, 0.03467248153686524, 0.03573980712890625, 0.03474560165405274, 0.03467955017089844, 0.03450470352172851, 0.03485843276977539, 0.034519615173339846, 0.03464601516723633, 0.03459241485595703, 0.0350351676940918, 0.03483596801757813, 0.03464691162109375, 0.034727489471435544, 0.03469753646850586, 0.034569343566894534, 0.03588544082641602, 0.035787326812744144, 0.03479142379760742, 0.03509209442138672, 0.034896095275878905, 0.03472313690185547, 0.034638687133789064, 0.03461849594116211, 0.03470348739624023, 0.03476089477539063, 0.034605342864990236, 0.03462928009033203, 0.034603649139404294, 0.03460684967041015, 0.03475276947021484, 0.03500787353515625, 0.034843265533447264, 0.03483884811401367, 0.0347845458984375, 0.0347685432434082, 0.035012737274169925, 0.03469990539550781, 0.034705406188964845, 0.03521843338012695, 0.034761119842529296, 0.034455265045166016, 0.035004512786865234, 0.03464611053466797, 0.03452099227905273, 0.03463183975219727, 0.03463436889648437, 0.034576255798339846, 0.03457174301147461, 0.034486942291259766, 0.03458438491821289, 0.03466873550415039, 0.0346512336730957, 0.034907039642333985, 0.03475804901123047, 0.034655998229980466, 0.03450147247314453, 0.03464988708496094, 0.03437558364868164, 0.03443948745727539, 0.034506752014160154, 0.03441616058349609, 0.03467638397216797, 0.03503987121582031, 0.03453091049194336, 0.03453196716308594, 0.03462960052490234, 0.03461734390258789, 0.034438400268554686, 0.034616352081298825, 0.03435830307006836, 0.03655136108398437, 0.04157952117919922, 0.03466310501098633, 0.03453737640380859, 0.03444572830200195, 0.03434700775146484, 0.03447542572021484, 0.034477825164794924, 0.034535263061523436, 0.034440193176269535, 0.03456598281860351, 0.03461750411987305, 0.034589920043945316, 0.03457344055175781, 0.03466921615600586, 0.03446476745605469, 0.034508800506591795, 0.03446169662475586, 0.03439811325073242, 0.03453142547607422, 0.03450844955444336, 0.03465852737426758, 0.03451478576660156, 0.035536705017089845, 0.034713855743408205, 0.03462144088745117, 0.03664281463623047, 0.03522969436645508, 0.034945022583007815, 0.034885631561279294, 0.03461852645874024, 0.03439497756958008, 0.034524799346923825, 0.03459324645996094, 0.03479132843017578, 0.03457660675048828, 0.03450204849243164, 0.03431212615966797, 0.034595264434814456, 0.03457062530517578, 0.03445926284790039, 0.03476684951782227, 0.03465625762939453, 0.034869247436523435, 0.03469622421264648, 0.03453641510009765, 0.03472307205200195, 0.03458329772949219, 0.03491609573364258, 0.034820350646972656, 0.03460220718383789, 0.03500012969970703, 0.03476316833496094, 0.03464249420166016, 0.03459423828125, 0.034757183074951174, 0.03466976165771484, 0.03449939346313476, 0.03457251358032227, 0.03525571060180664, 0.03772623825073242, 0.0348757438659668, 0.034780670166015625, 0.03490771102905273, 0.03497071838378906, 0.0348834228515625, 0.03485238265991211, 0.03473398590087891, 0.03489571380615234, 0.03469180679321289, 0.03527056121826172, 0.034647232055664064, 0.0346080322265625, 0.03463577651977539, 0.034662303924560545, 0.03445564651489258, 0.03449622344970703, 0.03433910369873047, 0.03479507064819336, 0.03466617584228516, 0.035001087188720706, 0.03460636901855469, 0.034861793518066404, 0.034400161743164064, 0.03445971298217773, 0.03455353546142578, 0.035655681610107424, 0.034953216552734374, 0.0348326416015625, 0.03465347290039063, 0.03456480026245117, 0.03465593719482422, 0.034465438842773435, 0.03448697662353516, 0.03474393463134766, 0.03466457748413086, 0.03467206573486328, 0.03476118469238281, 0.03449020767211914, 0.03446195220947266, 0.03471318435668945, 0.03474060821533203, 0.034696769714355466, 0.03492476654052734, 0.034597118377685546, 0.03450265502929688, 0.03448012924194336, 0.03458006286621094, 0.03471971130371094, 0.034568641662597654, 0.0344535026550293, 0.034359169006347653, 0.03433824157714844, 0.034396865844726565, 0.03455152130126953, 0.03482815933227539, 0.03486352157592774, 0.034748382568359375, 0.0348078384399414, 0.03448771286010742, 0.034785663604736325, 0.034659969329833985, 0.03442339324951172, 0.03436556625366211, 0.034436511993408206, 0.034496097564697265, 0.034630016326904295, 0.03941836929321289, 0.034495872497558595, 0.034557567596435544, 0.034334880828857425, 0.034744384765625, 0.03555344009399414, 0.034965438842773436, 0.03478345489501953, 0.03471203231811523, 0.0346951675415039, 0.03461027145385742, 0.03445382308959961, 0.03537161636352539, 0.03485660934448242, 0.03456342315673828, 0.03453231811523438, 0.03490204620361328, 0.034721790313720705, 0.03460467147827148, 0.034679168701171874, 0.03630393600463867, 0.03472889709472656, 0.03558102416992188, 0.03480966567993164, 0.03463731384277344, 0.03448416137695313, 0.0345994873046875, 0.03504051208496094, 0.03478195190429687, 0.03491020965576172, 0.034694145202636716, 0.03453635025024414, 0.03472758483886719, 0.03462086486816406, 0.03452521514892578, 0.03459990310668945, 0.034450721740722653, 0.03446857452392578, 0.03444736099243164, 0.03451715087890625, 0.03455315017700195, 0.034737728118896485, 0.03460752105712891, 0.03453580856323242, 0.034494655609130856, 0.03455939102172852, 0.034517471313476565, 0.034719070434570315, 0.0344620475769043, 0.03454611206054688, 0.03455184173583985, 0.03449020767211914, 0.03464588928222656, 0.03442230224609375, 0.03455411148071289, 0.03435772705078125, 0.0347072639465332, 0.034543807983398435, 0.03479702377319336, 0.034539329528808595, 0.03501737594604492, 0.034709121704101564, 0.03455414581298828, 0.034680992126464846, 0.034546878814697264, 0.03443404769897461, 0.034291519165039065, 0.03554329681396484, 0.03442015838623047, 0.03450912094116211, 0.03451644897460938, 0.03440591812133789, 0.034628223419189454, 0.03464790344238281, 0.03439596939086914, 0.034400993347167966, 0.03465011215209961, 0.03445555114746094, 0.03452928161621094, 0.03449216079711914, 0.03462380981445313, 0.03483232116699219, 0.035418174743652345, 0.034738113403320316, 0.03470454406738281, 0.03585782241821289, 0.03502345657348633, 0.03474431991577148, 0.034869247436523435, 0.034633216857910154, 0.035242591857910156, 0.034748321533203126, 0.03468425750732422, 0.03459139251708984, 0.034715648651123046, 0.03464396667480469, 0.03467059326171875, 0.03458457565307617, 0.03463372802734375, 0.03470131301879883, 0.034754558563232424, 0.034928638458251955, 0.035351680755615233, 0.034521633148193356, 0.0348941764831543, 0.03456732940673828, 0.03451580810546875, 0.034508800506591795, 0.035227649688720705, 0.043069087982177734, 0.03503510284423828, 0.03947967910766602, 0.034953121185302735, 0.03484681701660156, 0.03440639877319336, 0.03464191818237305, 0.03487276840209961, 0.03458224105834961, 0.03573955154418945, 0.03478607940673828, 0.03473833465576172, 0.034822017669677734, 0.0346901741027832, 0.03458560180664062, 0.03465184020996094, 0.03461260986328125, 0.034937793731689454, 0.03507318496704102, 0.03478409576416015, 0.03531545639038086, 0.03506774520874024, 0.034807552337646486, 0.03479993438720703, 0.035231201171875, 0.03518508911132812, 0.03471200180053711, 0.034813953399658204, 0.037674270629882815, 0.03508911895751953, 0.0347437744140625, 0.03775542449951172, 0.036356063842773435, 0.03488095855712891, 0.03458323287963867, 0.03457155227661133, 0.03446233749389648, 0.03460300827026367, 0.034726913452148435, 0.03578102493286133, 0.03481948852539062, 0.034687873840332034, 0.03448035049438476, 0.03507791900634766, 0.03491635131835937, 0.034907520294189455, 0.03494118499755859, 0.034918174743652344, 0.03474288177490235, 0.03491625595092773, 0.0353895378112793, 0.035087905883789065, 0.03548003387451172, 0.037702751159667966, 0.03535961532592773, 0.039641120910644534, 0.03473625564575195, 0.03478720092773437, 0.034609153747558595, 0.03465420913696289, 0.035009632110595705, 0.03508492660522461, 0.035055553436279294, 0.03505801773071289, 0.03518463897705078, 0.035108673095703126, 0.03507628631591797, 0.03468492889404297, 0.03461228942871094, 0.03513951873779297, 0.034449630737304685, 0.034710304260253906, 0.035092384338378906, 0.034682975769042966, 0.034635135650634766, 0.0345863037109375, 0.03473503875732422, 0.034577793121337894, 0.03505984115600586, 0.03502950286865234, 0.034823200225830075, 0.034759456634521485, 0.034737823486328125, 0.03447657775878906, 0.03759308624267578, 0.03546742248535156, 0.03667516708374023, 0.035297534942626954, 0.03470336151123047, 0.03470249557495117, 0.03464278411865234, 0.034729022979736325, 0.03525839996337891, 0.0346530876159668, 0.03460300827026367, 0.03467264175415039, 0.034643936157226565, 0.03447196960449219, 0.03460710525512695, 0.03458867263793945, 0.035141632080078124, 0.034799617767333986]",tokens/s,28.705889744837872,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4357.083136,6253.576192,0.0,5851.05408,5850.293248,s,1,13.36433984375,13.36433984375,0.0,13.36433984375,13.36433984375,13.36433984375,13.36433984375,[13.36433984375],,kWh,0.00011515703033332916,1.2695310477692034e-05,3.825364171399989e-05,0.00016610598252502108,,MB,4176.535552,6368.919552,0.0,5951.717376,5922.919424,s,10,2.04351936340332,0.20435193634033202,0.00037293361748616803,0.20436190032958984,0.204795751953125,0.20487872619628908,0.20494510559082033,"[0.2041588134765625, 0.20383331298828125, 0.2043966064453125, 0.20434538269042968, 0.2043778533935547, 0.20368357849121094, 0.20463885498046874, 0.20496170043945314, 0.20477731323242188, 0.204345947265625]",tokens/s,1252.7407598117993,kWh,5.991025058333273e-06,6.606983694699484e-07,3.977888669836732e-06,1.0629612097639953e-05,tokens/kWh,24083663.415792808,MB,4178.808832,6383.599616,0.0,5966.39744,5922.921984,s,10,20.187072021484376,2.0187072021484376,0.0082884953911922,2.0212205810546875,2.025838342285156,2.026246319580078,2.0265727014160153,"[2.0252432861328127, 2.0257476806640624, 2.0224720458984375, 2.0199691162109374, 2.026654296875, 2.0192947998046873, 2.0254910888671875, 2.0047191162109375, 2.00261376953125, 2.0148668212890626]",tokens/s,31.20809195754162,kWh,5.878429165208348e-05,6.4828625773564496e-06,3.89057567481632e-05,0.0001041729109776031,tokens/kWh,604763.747204346,,s,630,20.18412456130981,0.03203829295446002,0.0004979787885458713,0.031965407371520996,0.03237290840148926,0.03264731922149658,0.03466078159332276,"[0.03306291198730469, 0.032524288177490236, 0.032043006896972655, 0.03208723068237305, 0.03213318252563477, 0.032041728973388674, 0.03221644973754883, 0.0321866569519043, 0.03224611282348633, 0.032440319061279296, 0.032118785858154295, 0.0320563850402832, 0.03203168106079102, 0.03475251388549805, 0.03324860763549805, 0.03197599983215332, 0.0319749755859375, 0.03214313507080078, 0.03376403045654297, 0.03195401573181152, 0.031777759552001954, 0.03180748748779297, 0.03188675117492676, 0.0317541446685791, 0.03188601684570312, 0.03175222396850586, 0.03168867111206055, 0.03186073684692383, 0.031614879608154296, 0.03213430404663086, 0.03182649612426758, 0.03237225723266601, 0.032190719604492185, 0.031859296798706055, 0.03173075294494629, 0.03186783981323242, 0.03192556762695312, 0.032094913482666014, 0.03201200103759766, 0.03211088180541992, 0.032045055389404296, 0.03194675254821777, 0.03182908821105957, 0.031923103332519534, 0.031767744064331055, 0.03194486427307129, 0.03159107208251953, 0.03154156875610352, 0.031604223251342774, 0.03163715171813965, 0.03166790390014648, 0.03207254409790039, 0.0320552978515625, 0.033976318359375, 0.032198654174804685, 0.032456703186035156, 0.03229695892333984, 0.03224934387207031, 0.032395774841308594, 0.03208396911621094, 0.03186025619506836, 0.03213900756835938, 0.03252297592163086, 0.03306496047973633, 0.032215038299560544, 0.032423328399658204, 0.03209843063354492, 0.03236505508422852, 0.03197743988037109, 0.031932416915893554, 0.03192214393615723, 0.03174403190612793, 0.03188076782226563, 0.031706720352172854, 0.03171209526062012, 0.031469472885131834, 0.03185391998291016, 0.03215846252441406, 0.032231327056884765, 0.03211884689331055, 0.031963167190551756, 0.031985151290893556, 0.03158627128601074, 0.0317775993347168, 0.03221894454956055, 0.031825216293334964, 0.032098686218261716, 0.03214518356323242, 0.03213555145263672, 0.032073089599609375, 0.031980224609375, 0.03235353469848633, 0.0321229133605957, 0.0321195182800293, 0.032298816680908206, 0.03222751998901367, 0.032394718170166016, 0.03210723114013672, 0.03194160079956055, 0.031966047286987306, 0.03198518371582031, 0.031970848083496095, 0.03196134376525879, 0.031814239501953126, 0.03179308891296387, 0.032018558502197265, 0.03171462440490723, 0.03635459136962891, 0.03473775863647461, 0.032034591674804686, 0.0319803524017334, 0.03177836799621582, 0.03177107238769531, 0.031792543411254884, 0.03203702545166016, 0.03201887893676758, 0.0319398078918457, 0.03216988754272461, 0.03199270439147949, 0.03214950561523437, 0.032210945129394535, 0.03214665603637695, 0.032381313323974606, 0.032104862213134765, 0.03194675254821777, 0.032454654693603514, 0.03286646270751953, 0.03200780868530274, 0.03206387329101563, 0.031905792236328126, 0.03170099258422852, 0.03172352027893066, 0.032086017608642575, 0.032200702667236326, 0.03206246566772461, 0.031955968856811526, 0.032159072875976566, 0.031857311248779295, 0.03488735961914063, 0.03253689575195313, 0.03210649490356445, 0.032142784118652346, 0.03247705459594727, 0.03218502426147461, 0.03211468887329102, 0.03199795150756836, 0.032143230438232424, 0.032118335723876956, 0.03206790542602539, 0.03170329666137695, 0.03201763153076172, 0.03211718368530273, 0.03202406311035156, 0.03219747161865234, 0.0319180793762207, 0.0320552978515625, 0.03204438400268555, 0.031812255859375, 0.03206553649902344, 0.031834112167358396, 0.03174336051940918, 0.031676959991455075, 0.03171913528442383, 0.03180112075805664, 0.03201494216918945, 0.03198566436767578, 0.0317890567779541, 0.031696895599365234, 0.031735807418823245, 0.0317901439666748, 0.031785247802734375, 0.03176281547546387, 0.032818462371826174, 0.032039936065673826, 0.03211606216430664, 0.03208595275878906, 0.032000736236572264, 0.03204095840454101, 0.03231948852539063, 0.03191747283935547, 0.03196784019470215, 0.0317061767578125, 0.03251491165161133, 0.03219055938720703, 0.03223961639404297, 0.03254272079467774, 0.032643070220947264, 0.0322476806640625, 0.03213734436035156, 0.03303702545166016, 0.032077823638916016, 0.032368446350097654, 0.03199542427062988, 0.03207644653320312, 0.031845632553100585, 0.032054046630859374, 0.03191190338134765, 0.03180681610107422, 0.031883327484130856, 0.03202233505249023, 0.03207017517089844, 0.03218457412719727, 0.03234604644775391, 0.032650558471679685, 0.032185375213623045, 0.03231510543823242, 0.031932287216186524, 0.03203903961181641, 0.03224371337890625, 0.032083553314208986, 0.032163902282714846, 0.03195529556274414, 0.03199372863769531, 0.03181785583496094, 0.031821823120117186, 0.031692800521850584, 0.031530208587646484, 0.03197635269165039, 0.031983488082885744, 0.03211788940429688, 0.03283363342285156, 0.03190863990783691, 0.03184435272216797, 0.03187420845031738, 0.03188412857055664, 0.032167617797851565, 0.031858816146850585, 0.031854591369628905, 0.032245342254638674, 0.03178463935852051, 0.031793216705322265, 0.03172822380065918, 0.03237475204467773, 0.031915552139282224, 0.033622783660888673, 0.03183564758300781, 0.03185436820983887, 0.03185737609863281, 0.03237270355224609, 0.032255680084228515, 0.03187542343139648, 0.032212928771972654, 0.03180550384521484, 0.031950176239013674, 0.031975360870361326, 0.03178099250793457, 0.032201313018798826, 0.03185372734069824, 0.03222793579101563, 0.0319368953704834, 0.03171248054504395, 0.03204751968383789, 0.03289779281616211, 0.032787582397460935, 0.032039806365966796, 0.03207167816162109, 0.0318156795501709, 0.032375968933105466, 0.03203715133666992, 0.03220291137695312, 0.03225436782836914, 0.03212457656860351, 0.03180988883972168, 0.03195289611816406, 0.03197068786621094, 0.03196518325805664, 0.03185043144226074, 0.03193683242797852, 0.03207001495361328, 0.03229391860961914, 0.03225884628295898, 0.0324571533203125, 0.0321363525390625, 0.032223838806152344, 0.03231948852539063, 0.031938560485839845, 0.03211264038085938, 0.03196435165405274, 0.03199395179748535, 0.03234070587158203, 0.032004096984863284, 0.03225775909423828, 0.03205353546142578, 0.03207696151733398, 0.0322108154296875, 0.031987808227539063, 0.03178585624694824, 0.03177465629577637, 0.031690816879272464, 0.032425537109375, 0.03216003036499023, 0.03294428634643555, 0.03192422485351563, 0.03181727981567383, 0.03195743942260742, 0.032, 0.032007648468017576, 0.03187478446960449, 0.03201683044433594, 0.03447232055664062, 0.033181697845458984, 0.032307201385498044, 0.03228208160400391, 0.0320882568359375, 0.031862112045288084, 0.03201891326904297, 0.0319597110748291, 0.032120704650878906, 0.032198654174804685, 0.03188073539733887, 0.032242145538330075, 0.03236454391479492, 0.03212255859375, 0.032129344940185545, 0.0319703369140625, 0.03304995346069336, 0.03197151947021484, 0.031785440444946286, 0.03171123123168945, 0.03174195289611816, 0.032288799285888674, 0.032276447296142576, 0.0321715202331543, 0.03262515258789062, 0.0323768310546875, 0.031971328735351565, 0.03203481674194336, 0.03201814270019531, 0.03171356773376465, 0.03184595108032227, 0.03178275108337402, 0.03188591957092285, 0.032093376159667966, 0.03189030456542969, 0.031596736907958986, 0.03167820739746094, 0.031743135452270504, 0.03157843208312988, 0.03522000122070312, 0.03191776084899902, 0.03219692611694336, 0.03206758499145508, 0.032081249237060544, 0.03207030487060547, 0.03180764770507812, 0.032414913177490234, 0.03241177749633789, 0.03207980728149414, 0.03242390441894531, 0.032344223022460934, 0.03195471954345703, 0.03226483154296875, 0.03202191925048828, 0.0318756160736084, 0.031756416320800784, 0.03180486488342285, 0.031926240921020504, 0.031799455642700196, 0.03199948883056641, 0.03218239974975586, 0.03193939208984375, 0.03172512054443359, 0.03179974365234375, 0.032064640045166015, 0.0319102725982666, 0.031869024276733396, 0.03177670478820801, 0.03172406387329101, 0.03169030380249024, 0.031781248092651364, 0.03251587295532227, 0.032119007110595704, 0.03196675109863281, 0.03174652862548828, 0.0318791675567627, 0.032010238647460935, 0.0319213752746582, 0.03210492706298828, 0.0327729606628418, 0.03191200065612793, 0.031872768402099606, 0.03210841751098633, 0.03185903930664063, 0.03191401672363281, 0.03214950561523437, 0.03226959991455078, 0.03201279830932617, 0.036603424072265626, 0.0340118408203125, 0.03236044692993164, 0.03191100883483887, 0.03192716789245605, 0.03209983825683594, 0.03188547134399414, 0.03177408027648926, 0.03177779197692871, 0.03179724884033203, 0.03172761535644531, 0.031662080764770506, 0.0317828483581543, 0.032065601348876954, 0.03214313507080078, 0.03216604614257813, 0.03206067276000977, 0.031982112884521484, 0.03227785491943359, 0.03187116813659668, 0.03181644821166992, 0.031745216369628904, 0.031681631088256834, 0.03170684814453125, 0.03174982452392578, 0.031877439498901365, 0.03172719955444336, 0.03178454399108887, 0.031779647827148434, 0.03201228713989258, 0.03196723175048828, 0.03202022552490234, 0.032045055389404296, 0.032069889068603516, 0.03247283172607422, 0.03193404769897461, 0.031864896774291995, 0.03489388656616211, 0.03212547302246094, 0.031942655563354495, 0.03228220748901367, 0.03223183822631836, 0.03197337532043457, 0.03248672103881836, 0.031947456359863284, 0.032022014617919925, 0.03216230392456055, 0.03210841751098633, 0.03197913551330566, 0.03209862518310547, 0.03222147369384765, 0.03193436813354492, 0.031866880416870115, 0.031887359619140625, 0.0327573127746582, 0.03188729667663574, 0.03205376052856445, 0.032128639221191406, 0.032207263946533206, 0.03202790451049805, 0.031970016479492186, 0.03201433563232422, 0.03207942581176758, 0.03197120094299316, 0.03203286361694336, 0.03196976089477539, 0.03192422485351563, 0.03172326469421387, 0.031672576904296874, 0.03167193603515625, 0.031859071731567386, 0.03188307189941406, 0.03179948806762695, 0.0318791675567627, 0.03226367950439453, 0.03178726387023926, 0.031572223663330075, 0.03171878433227539, 0.031646335601806644, 0.03165983963012695, 0.031566015243530275, 0.03175750350952149, 0.03153798484802246, 0.03165526390075683, 0.031578784942626954, 0.03148963165283203, 0.031400096893310546, 0.031465152740478515, 0.031719999313354494, 0.03199590492248535, 0.031678176879882815, 0.03158051109313965, 0.03163871955871582, 0.0317608642578125, 0.03145347213745117, 0.031680511474609374, 0.03156787109375, 0.03159241676330567, 0.03315222549438476, 0.031888191223144534, 0.03157606315612793, 0.03164275169372559, 0.03169564819335938, 0.03177891159057617, 0.03184639930725098, 0.03177267265319824, 0.0318767032623291, 0.03170140838623047, 0.031887359619140625, 0.03188688087463379, 0.031648223876953124, 0.032454654693603514, 0.03175638389587403, 0.03161183929443359, 0.03156067276000977, 0.03175369644165039, 0.0316646728515625, 0.032949825286865235, 0.03198224067687988, 0.03183712005615234, 0.031613792419433594, 0.03214307022094726, 0.032643360137939455, 0.03218841552734375, 0.03228230285644531, 0.031940448760986326, 0.03179104042053223, 0.03320681762695313, 0.031770656585693356, 0.03173315238952637, 0.03183593559265137, 0.031916736602783206, 0.03199804878234863, 0.03196108818054199, 0.03161039924621582, 0.03160521507263184, 0.031493120193481446, 0.03196006393432617, 0.0314052791595459, 0.03149699211120605, 0.03151468849182129, 0.031463359832763674, 0.03134623908996582, 0.03149996757507324, 0.031425088882446287, 0.032129215240478515, 0.0317071361541748, 0.03174604797363281, 0.0319422721862793, 0.03176281547546387, 0.03184416007995605, 0.03169503974914551, 0.03164159965515137, 0.03251753616333008, 0.031636064529418945, 0.03164879989624023, 0.031554527282714843, 0.03145449638366699, 0.031367904663085935, 0.03196313667297363, 0.03163324737548828, 0.03171958351135254, 0.03169225692749023, 0.03165238380432129, 0.031769887924194336, 0.03189423942565918, 0.03166374397277832, 0.031613567352294925, 0.03164454460144043, 0.03196563148498535, 0.031414495468139646, 0.031495744705200196, 0.0317938232421875, 0.0315863037109375, 0.031356927871704104, 0.03155939292907715, 0.03164742469787598, 0.03167647933959961, 0.03155971145629883, 0.031772512435913086, 0.03255337524414063, 0.03169935989379883, 0.03167756843566895, 0.03163225555419922, 0.03213865661621094, 0.03189311981201172, 0.031691263198852536, 0.032029151916503906, 0.03177043151855469, 0.0320445442199707, 0.03190239906311035, 0.032021888732910155, 0.03193900871276856, 0.03252633666992188, 0.03333868789672852, 0.032140159606933595, 0.03207167816162109, 0.03191398429870605, 0.03208176040649414, 0.03221929550170898, 0.031942495346069334, 0.031897279739379884, 0.03182572746276856, 0.03193420791625977, 0.03170601654052734, 0.03186483192443847, 0.031894655227661134, 0.03200617599487305, 0.03191465568542481, 0.03217427062988281, 0.03189142417907715, 0.031991264343261716, 0.03170771217346192, 0.03175596809387207, 0.03170284843444824, 0.03197798347473144, 0.03172966384887695, 0.03179929542541504, 0.03198566436767578, 0.0319815673828125, 0.032042526245117185, 0.032811489105224606, 0.0320819206237793, 0.03221913528442383, 0.03196108818054199, 0.03222323226928711, 0.03195084762573242, 0.03183616065979004, 0.03176243209838867, 0.03194675254821777, 0.0317270393371582, 0.0319266242980957, 0.03163603210449219, 0.03237247848510742, 0.03175823974609375, 0.031680511474609374, 0.031801343917846676, 0.03184639930725098, 0.031981407165527345, 0.03196944046020508, 0.032236961364746096, 0.03199148750305176, 0.031835039138793944]",tokens/s,31.21264923263619,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4365.832192,6253.576192,0.0,5851.05408,5850.293248,s,1,13.0685205078125,13.0685205078125,0.0,13.0685205078125,13.0685205078125,13.0685205078125,13.0685205078125,[13.0685205078125],,kWh,0.00011580727004583953,1.2761112594354581e-05,3.740058547600095e-05,0.00016596896811619507,,MB,4192.93184,6368.919552,0.0,5951.717376,5922.919424,s,10,2.013729766845703,0.20137297668457035,0.0010140632727339764,0.20115490722656248,0.20281064300537108,0.2030287422180176,0.20320322158813478,"[0.2002224578857422, 0.19991952514648437, 0.20127349853515625, 0.20324684143066407, 0.20074684143066407, 0.20276217651367187, 0.2022227478027344, 0.20103631591796875, 0.20096438598632813, 0.20133497619628907]",tokens/s,1271.2728600173455,kWh,5.891790137916644e-06,6.497557639618761e-07,3.907425348160034e-06,1.0448971250038554e-05,tokens/kWh,24500019.559251387,MB,4196.0448,6383.599616,0.0,5966.39744,5922.921984,s,10,20.322168823242187,2.0322168823242186,0.005755235273532582,2.0313340454101563,2.0399268432617186,2.040764385986328,2.0414344201660155,"[2.02314599609375, 2.027095947265625, 2.0349510498046874, 2.03025244140625, 2.0416019287109375, 2.029526123046875, 2.03974072265625, 2.0371474609375, 2.0324156494140624, 2.02629150390625]",tokens/s,31.000628204578128,kWh,5.976910774333372e-05,6.59239745692459e-06,3.9729720672639944e-05,0.00010609122587289825,tokens/kWh,593828.5610487398,,s,630,20.318945117950427,0.032252293838016574,0.000516369991313119,0.03216062355041504,0.03253230705261231,0.03286303730010986,0.034626441764831546,"[0.032835582733154296, 0.03238467025756836, 0.03198924827575684, 0.032000862121582034, 0.03221728134155273, 0.03191379165649414, 0.03195462417602539, 0.03193014335632324, 0.031965311050415036, 0.03185500717163086, 0.03230515289306641, 0.032316703796386716, 0.03241238403320312, 0.03194441604614258, 0.0319019832611084, 0.03225727844238281, 0.032078304290771485, 0.035848480224609375, 0.0321638412475586, 0.03202252960205078, 0.03196108818054199, 0.031919488906860354, 0.03188508796691895, 0.03204118347167969, 0.03200678253173828, 0.032130878448486326, 0.03219065475463867, 0.03234611129760742, 0.03192831993103027, 0.031974399566650394, 0.031855615615844726, 0.03182796859741211, 0.032059391021728514, 0.0317071361541748, 0.03289424133300781, 0.03229967880249023, 0.03183555221557617, 0.03192278480529785, 0.03190380859375, 0.032015838623046876, 0.03190633583068848, 0.0320015983581543, 0.031969728469848635, 0.03226419067382812, 0.031987136840820315, 0.031953664779663084, 0.03194041633605957, 0.03189145660400391, 0.03196518325805664, 0.031893503189086916, 0.03198108863830566, 0.03194883155822754, 0.03193881607055664, 0.03182355117797851, 0.032164352416992184, 0.03192380714416504, 0.03183247947692871, 0.03185663986206055, 0.03260825729370117, 0.032069633483886716, 0.032046207427978514, 0.03190182495117187, 0.0319680004119873, 0.03272499084472656, 0.032282623291015625, 0.03198067283630371, 0.03228556823730469, 0.03208806228637695, 0.03211452865600586, 0.03211894226074219, 0.03205766296386719, 0.032046943664550784, 0.03212195205688476, 0.03191884803771973, 0.03197276878356933, 0.03179887962341309, 0.03186147117614746, 0.0320146255493164, 0.03214131164550781, 0.03238886260986328, 0.03219276809692383, 0.03204886245727539, 0.032067039489746096, 0.0320909423828125, 0.032198654174804685, 0.034648063659667966, 0.03265126419067383, 0.032231422424316404, 0.032333824157714845, 0.03221635055541992, 0.03258611297607422, 0.03246044921875, 0.03208467102050781, 0.031960927963256835, 0.03203907012939453, 0.032093952178955075, 0.03206576156616211, 0.032268032073974606, 0.03240300750732422, 0.03247708892822266, 0.03211884689331055, 0.0322977294921875, 0.03195840072631836, 0.032063678741455076, 0.03215814590454102, 0.03203891372680664, 0.031959039688110355, 0.032159744262695314, 0.03205734252929687, 0.032161598205566404, 0.032032958984375, 0.03232166290283203, 0.03195078468322754, 0.03193644714355469, 0.031987520217895506, 0.03218451309204102, 0.03225190353393555, 0.032143360137939454, 0.031920127868652344, 0.0320425910949707, 0.03197583961486816, 0.032004096984863284, 0.032198654174804685, 0.03197475242614746, 0.031840768814086914, 0.032001823425292966, 0.03277139282226563, 0.03203164672851563, 0.03199795150756836, 0.03188505554199219, 0.031865087509155274, 0.03196313667297363, 0.03196854400634765, 0.0320662727355957, 0.03292684936523437, 0.03289955139160156, 0.03291164779663086, 0.03246092987060547, 0.032343902587890626, 0.032188575744628904, 0.03206147384643555, 0.03217808151245117, 0.03224185562133789, 0.03189952087402344, 0.03220412826538086, 0.03194700813293457, 0.03214992141723633, 0.032206623077392575, 0.03191846466064453, 0.03243183898925781, 0.032103584289550784, 0.03211670303344726, 0.033205055236816404, 0.03221273422241211, 0.03190617561340332, 0.03472390365600586, 0.03230860900878906, 0.0322380485534668, 0.032403358459472655, 0.03234751892089844, 0.032129310607910154, 0.03217871856689453, 0.032282272338867185, 0.03220697784423828, 0.03226243209838867, 0.03242393493652344, 0.032263553619384766, 0.03218806457519531, 0.03237542343139648, 0.032391040802001954, 0.032174560546875, 0.03220681762695313, 0.032185504913330075, 0.032254238128662106, 0.03245091247558594, 0.03250131225585937, 0.03217068862915039, 0.03225804901123047, 0.032365726470947265, 0.03254140853881836, 0.032126495361328125, 0.03208252716064453, 0.032046367645263675, 0.03215433502197266, 0.03232505416870117, 0.03221529769897461, 0.032409919738769534, 0.03227212905883789, 0.032108798980712894, 0.03286329650878906, 0.03227443313598633, 0.03210540771484375, 0.03266969680786133, 0.03218947219848633, 0.032107616424560545, 0.03205721664428711, 0.03220275115966797, 0.03212656021118164, 0.03203932952880859, 0.03255295944213867, 0.032247806549072264, 0.03220479965209961, 0.03256934356689453, 0.0324956169128418, 0.03209043121337891, 0.0322658576965332, 0.032105953216552734, 0.032043582916259764, 0.0321495361328125, 0.032031967163085935, 0.032000862121582034, 0.031995296478271484, 0.032010238647460935, 0.032461345672607424, 0.032217086791992186, 0.03220620727539063, 0.0321368637084961, 0.03213340759277344, 0.0321354866027832, 0.03203110504150391, 0.03281062316894531, 0.03233161544799805, 0.032078048706054685, 0.03231545639038086, 0.03221939086914063, 0.03216793441772461, 0.03218227386474609, 0.03212214279174805, 0.032194271087646484, 0.03206041717529297, 0.03229196929931641, 0.0323265266418457, 0.032292865753173826, 0.03255481719970703, 0.03232355117797851, 0.03225398254394531, 0.03279225540161133, 0.03202864074707031, 0.03203535842895508, 0.03237833786010742, 0.03216019058227539, 0.03225955200195312, 0.032016063690185545, 0.03220479965209961, 0.032508224487304685, 0.03223567962646484, 0.03205900955200195, 0.03195171165466309, 0.0320687370300293, 0.032001953125, 0.032003135681152345, 0.032003616333007814, 0.03351772689819336, 0.03213107299804688, 0.03204095840454101, 0.032126976013183595, 0.03191910362243652, 0.03191500854492187, 0.03256320190429687, 0.032315391540527344, 0.032126976013183595, 0.03210201644897461, 0.03194416046142578, 0.03194563293457031, 0.03204095840454101, 0.032024574279785153, 0.03207551956176758, 0.032034111022949216, 0.032111553192138674, 0.03236000061035156, 0.032145854949951175, 0.03223961639404297, 0.03214060974121094, 0.03209603118896484, 0.03219039916992188, 0.03219660949707031, 0.032158687591552736, 0.03237212753295898, 0.032196895599365234, 0.03224710464477539, 0.03220764923095703, 0.032069854736328125, 0.0320819206237793, 0.03219852828979492, 0.032026752471923825, 0.03208806228637695, 0.032405662536621097, 0.032056575775146486, 0.03213369750976563, 0.03232128143310547, 0.03250204849243164, 0.03213312149047851, 0.03198345565795899, 0.0319718074798584, 0.032116416931152345, 0.032333824157714845, 0.03202812957763672, 0.032328224182128905, 0.032008190155029294, 0.03206553649902344, 0.032243679046630856, 0.032062591552734374, 0.033102752685546875, 0.034573505401611325, 0.04050368118286133, 0.032498111724853514, 0.03298044967651367, 0.03277673721313477, 0.03241548919677734, 0.03201007843017578, 0.0321495361328125, 0.032266624450683595, 0.03216537475585937, 0.032567264556884766, 0.03258390426635742, 0.032970977783203126, 0.03247923278808594, 0.032581409454345706, 0.032121055603027346, 0.032134559631347655, 0.0323671989440918, 0.031901311874389646, 0.031968767166137696, 0.03200908660888672, 0.03197257614135742, 0.031842336654663086, 0.03230998229980469, 0.03200844955444336, 0.03197724723815918, 0.031987712860107424, 0.03207167816162109, 0.03202764892578125, 0.03197625541687012, 0.032264385223388675, 0.03207372665405273, 0.032165569305419923, 0.032291072845458985, 0.033003585815429684, 0.03215769577026367, 0.03198975944519043, 0.03215359878540039, 0.03229916763305664, 0.03236185455322266, 0.032027103424072265, 0.031886783599853516, 0.03208028793334961, 0.032231582641601565, 0.032161056518554686, 0.03250038528442383, 0.032978912353515626, 0.03229091262817383, 0.032499713897705076, 0.03196643257141113, 0.032156448364257816, 0.031987712860107424, 0.032307201385498044, 0.03204444885253906, 0.03216217422485351, 0.032390625, 0.03214412689208984, 0.032003616333007814, 0.0321839370727539, 0.032559967041015624, 0.03212895965576172, 0.03226739120483398, 0.03200032043457031, 0.03195968055725098, 0.03212310409545899, 0.03199897575378418, 0.032244415283203126, 0.03327129745483398, 0.032240222930908204, 0.032142433166503906, 0.03214227294921875, 0.03207984161376953, 0.032145408630371096, 0.03213516616821289, 0.03231129455566406, 0.03275734329223633, 0.03206777572631836, 0.03247164916992187, 0.03340902328491211, 0.03228057479858398, 0.03220275115966797, 0.0336814079284668, 0.032129024505615236, 0.03213872146606445, 0.03192380714416504, 0.03204806518554688, 0.03288380813598633, 0.031947263717651365, 0.03188368034362793, 0.03204095840454101, 0.03194675254821777, 0.03192848014831543, 0.032484607696533205, 0.03191049575805664, 0.03203481674194336, 0.03199795150756836, 0.03183776092529297, 0.03199020767211914, 0.032342273712158205, 0.032048641204833986, 0.03192652893066406, 0.032094207763671875, 0.03193036842346191, 0.03243119812011719, 0.03191695976257324, 0.031942655563354495, 0.03234815979003906, 0.03508633422851563, 0.032239742279052734, 0.03257535934448242, 0.03219046401977539, 0.03198361587524414, 0.03209625625610352, 0.03225804901123047, 0.03215359878540039, 0.032247806549072264, 0.032911361694335936, 0.033050624847412106, 0.03227008056640625, 0.032304641723632815, 0.032285438537597654, 0.03204272079467774, 0.032058944702148436, 0.032129215240478515, 0.03211727905273438, 0.03224166488647461, 0.03219046401977539, 0.032053249359130856, 0.03232767868041992, 0.032276481628417966, 0.03217407989501953, 0.03295353698730469, 0.032645950317382814, 0.03214950561523437, 0.03358060836791992, 0.032049121856689455, 0.03262511825561523, 0.03516416168212891, 0.032942337036132814, 0.03241952133178711, 0.03214368057250976, 0.03236249542236328, 0.03226214218139648, 0.03223270416259766, 0.032266719818115235, 0.03239350509643555, 0.03232563018798828, 0.032143360137939454, 0.03232995223999023, 0.03208556747436524, 0.032513599395751956, 0.03223782348632812, 0.03223388671875, 0.03222528076171875, 0.032758785247802735, 0.03219353485107422, 0.03226166534423828, 0.03237926483154297, 0.03220284652709961, 0.0327426872253418, 0.032389854431152346, 0.032376190185546876, 0.032350654602050784, 0.03206291198730469, 0.03221948623657227, 0.032518302917480466, 0.032213088989257815, 0.03225718307495117, 0.03216691207885742, 0.032286689758300784, 0.03223734283447265, 0.032239871978759764, 0.03228876876831055, 0.03246873474121094, 0.03356902313232422, 0.0324587516784668, 0.03238092803955078, 0.03200380706787109, 0.03216342544555664, 0.03210105514526367, 0.03221635055541992, 0.032299488067626954, 0.03229289627075195, 0.032554878234863284, 0.03232393646240234, 0.032341537475585935, 0.03233203125, 0.03215097427368164, 0.032225406646728516, 0.032182945251464846, 0.03219660949707031, 0.03236249542236328, 0.03248643112182617, 0.032469982147216796, 0.03210444641113281, 0.032163681030273436, 0.03260432052612305, 0.03220073699951172, 0.03232553482055664, 0.032428096771240235, 0.03213312149047851, 0.033054080963134766, 0.03210931015014649, 0.03226828765869141, 0.032173408508300784, 0.03244889450073242, 0.03232735824584961, 0.032340576171875, 0.03257699203491211, 0.03229919815063476, 0.03221744155883789, 0.03326976013183594, 0.03214070510864258, 0.032389503479003906, 0.03269020843505859, 0.032262336730957034, 0.03213523101806641, 0.032358047485351565, 0.032342144012451175, 0.032395423889160155, 0.0321638412475586, 0.03234815979003906, 0.032163200378417967, 0.03212060928344727, 0.03253129577636719, 0.03233123016357422, 0.032115230560302736, 0.032059391021728514, 0.03198137664794922, 0.03216812896728516, 0.03217203140258789, 0.03220684814453125, 0.03223868942260742, 0.03286272048950195, 0.032172351837158206, 0.03214140701293945, 0.032440319061279296, 0.03201228713989258, 0.03251318359375, 0.03232851028442383, 0.03207376098632812, 0.03227033615112305, 0.032276481628417966, 0.03201350402832031, 0.0320909423828125, 0.032052734375, 0.03224627304077148, 0.03215542221069336, 0.03210031890869141, 0.03231887817382813, 0.03228742218017578, 0.03226835250854492, 0.03237078475952149, 0.03207987213134766, 0.03218412780761719, 0.032370880126953126, 0.032109886169433596, 0.03204166412353516, 0.0319704647064209, 0.03194147109985351, 0.03183772850036621, 0.03203887939453125, 0.032084480285644534, 0.032026622772216795, 0.03273897552490234, 0.031953664779663084, 0.03222220611572266, 0.03192521667480469, 0.031931936264038084, 0.03205769729614258, 0.0320715217590332, 0.032069023132324216, 0.032013214111328125, 0.03206118392944336, 0.03194291114807129, 0.03220870590209961, 0.03227414321899414, 0.03223548889160156, 0.032086177825927736, 0.03222972869873047, 0.0349409294128418, 0.032109630584716796, 0.03198662376403809, 0.03213721466064453, 0.03206265640258789, 0.0320643196105957, 0.0323682861328125, 0.03216169738769531, 0.03186118316650391, 0.031921344757080077, 0.03187363243103027, 0.03197283172607422, 0.032025344848632814, 0.03231948852539063, 0.03204201507568359, 0.03217097473144531, 0.03189263916015625, 0.032250720977783205, 0.03203811264038086, 0.031967647552490236, 0.03201196670532227, 0.03219935989379883, 0.032016193389892575, 0.03205366516113281, 0.03205916976928711, 0.0319180793762207, 0.032089214324951175, 0.0324268798828125, 0.032199871063232424, 0.03235027313232422, 0.03210726547241211, 0.03204915237426758, 0.0324730224609375, 0.03210160064697266, 0.032076641082763674, 0.03221027374267578, 0.03207644653320312, 0.03211164855957031, 0.0320849609375, 0.03216323089599609, 0.03212464141845703, 0.032039806365966796, 0.03220479965209961, 0.03208499145507813, 0.03234905624389649, 0.03215340805053711, 0.03205068969726563]",tokens/s,31.0055466139055,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4365.643776,6253.576192,0.0,5851.05408,5850.293248,s,1,13.032169921875,13.032169921875,0.0,13.032169921875,13.032169921875,13.032169921875,13.032169921875,[13.032169921875],,kWh,0.00011799595707083484,1.3003129347328666e-05,3.815364163400198e-05,0.00016915272805216548,,MB,4292.349952,6368.919552,0.0,5951.717376,5923.050496,s,10,2.093436538696289,0.2093436538696289,0.0007996070383562015,0.20955230712890627,0.21022120361328125,0.21029170837402344,0.2103481121826172,"[0.20777949523925782, 0.20840982055664062, 0.2096646728515625, 0.20921200561523437, 0.20861616516113282, 0.21036221313476564, 0.21020553588867188, 0.21008201599121093, 0.20947200012207032, 0.2096326141357422]",tokens/s,1222.869646478163,kWh,6.1299801761284905e-06,6.759268100498021e-07,4.067914133958304e-06,1.0873821120136595e-05,tokens/kWh,23542781.98727479,MB,4305.391616,6383.599616,0.0,5966.39744,5923.053056,s,10,23.651231201171875,2.3651231201171874,0.004953714186526709,2.366255859375,2.370170068359375,2.371331982421875,2.372261513671875,"[2.369911865234375, 2.372493896484375, 2.368401611328125, 2.36024853515625, 2.36708837890625, 2.364062255859375, 2.368226806640625, 2.355646728515625, 2.36542333984375, 2.359727783203125]",tokens/s,26.637091094386015,kWh,6.899916195970458e-05,7.610626855421737e-06,4.574784794084177e-05,0.00012235763675596807,tokens/kWh,514884.0862761035,,s,630,23.64808427810668,0.037536641711280454,0.00042073990505116004,0.037417919158935546,0.03795195465087891,0.03825074710845947,0.039480639686584475,"[0.03826492691040039, 0.03822700881958008, 0.037348350524902346, 0.037571678161621096, 0.037292030334472655, 0.037319583892822264, 0.037664768218994144, 0.037416961669921874, 0.03754169464111328, 0.037436798095703126, 0.03739699172973633, 0.037675327301025394, 0.03766172790527344, 0.03755868911743164, 0.037406528472900394, 0.03769756698608399, 0.037565185546875, 0.03731884765625, 0.03730316925048828, 0.038153118133544925, 0.03803692626953125, 0.037574878692626955, 0.03754345703125, 0.03751200103759766, 0.03788412857055664, 0.03748636627197265, 0.03755587387084961, 0.03736201477050781, 0.03762995147705078, 0.03735756683349609, 0.03736371231079102, 0.0373043212890625, 0.037313793182373045, 0.037415679931640626, 0.03727769470214844, 0.037506591796875, 0.03759766387939453, 0.03754348754882812, 0.03771574401855469, 0.037579521179199216, 0.03775888061523437, 0.037572608947753904, 0.03731564712524414, 0.037418014526367185, 0.03728307342529297, 0.037370529174804684, 0.03748454284667969, 0.03765043258666992, 0.03736332702636719, 0.037322944641113284, 0.03740281677246094, 0.03751023864746094, 0.037534622192382815, 0.037498878479003905, 0.03745328140258789, 0.03749884796142578, 0.03801964950561523, 0.037564414978027344, 0.03749788665771484, 0.03754288101196289, 0.04079001617431641, 0.03830294418334961, 0.03806288146972656, 0.038604801177978515, 0.03755155181884766, 0.037394081115722656, 0.037472190856933596, 0.03733769607543945, 0.03749311828613281, 0.03736576080322265, 0.03740367889404297, 0.03740915298461914, 0.038267326354980466, 0.03746627044677735, 0.03797196960449219, 0.03739852905273437, 0.03897734451293945, 0.037505184173583984, 0.0374354248046875, 0.039787616729736325, 0.03781523132324219, 0.037519329071044924, 0.03744153594970703, 0.037407936096191405, 0.037404640197753906, 0.037647201538085935, 0.03759513473510742, 0.03737177658081055, 0.03834483337402344, 0.03771187210083008, 0.03753555297851562, 0.03752569580078125, 0.037391937255859375, 0.0374271354675293, 0.03749324798583984, 0.037369342803955076, 0.037474815368652346, 0.0373309440612793, 0.03758607864379883, 0.03741782379150391, 0.03747161483764649, 0.03784563064575195, 0.03734636688232422, 0.037229248046875, 0.037832447052001957, 0.037274112701416014, 0.03767219161987305, 0.03811814498901367, 0.037555999755859375, 0.03774403381347656, 0.0374136962890625, 0.03737702560424805, 0.03788022232055664, 0.037728862762451174, 0.03774259185791016, 0.037771263122558595, 0.037531070709228516, 0.038311519622802735, 0.03771696090698242, 0.03745792007446289, 0.03737366485595703, 0.0373353271484375, 0.03757875061035156, 0.03764019012451172, 0.037400577545166014, 0.038168575286865236, 0.03821420669555664, 0.03768044662475586, 0.037577342987060544, 0.037571998596191404, 0.03764847946166992, 0.0375096321105957, 0.03750870513916016, 0.03768975830078125, 0.03775692749023438, 0.03801676940917969, 0.037833118438720705, 0.03754172897338867, 0.038055038452148436, 0.037432193756103516, 0.03748371124267578, 0.03771270370483398, 0.03755811309814453, 0.037732318878173826, 0.03743353652954102, 0.03745792007446289, 0.03817267227172851, 0.037480289459228516, 0.03733676910400391, 0.03754822540283203, 0.03797862243652344, 0.038381343841552736, 0.03747020721435547, 0.03734723281860351, 0.037462112426757815, 0.037615615844726565, 0.037695392608642575, 0.037913856506347654, 0.03732361602783203, 0.037486209869384765, 0.037467937469482425, 0.03801353454589844, 0.038133758544921875, 0.03742454528808594, 0.037243488311767575, 0.03730790328979492, 0.03733270263671875, 0.03730307388305664, 0.037416961669921874, 0.03727155303955078, 0.037997695922851564, 0.0373256950378418, 0.037736446380615234, 0.03775657653808594, 0.0374310417175293, 0.037243488311767575, 0.03739161682128906, 0.03740339279174805, 0.037599456787109374, 0.037486209869384765, 0.03739049530029297, 0.0375623664855957, 0.03769331359863281, 0.037367935180664065, 0.03727500915527344, 0.03738687896728515, 0.03746201705932617, 0.03749903869628906, 0.03754095840454102, 0.03831024169921875, 0.03730284881591797, 0.03774771118164062, 0.037364734649658206, 0.03738828659057617, 0.03747430419921875, 0.03726540756225586, 0.03724697494506836, 0.037310462951660156, 0.03719715118408203, 0.03769356918334961, 0.03724342346191406, 0.03766819381713867, 0.03747292709350586, 0.03740671920776367, 0.03746406555175781, 0.03728326416015625, 0.037340831756591794, 0.03732368087768555, 0.03733299255371094, 0.037375999450683595, 0.037548030853271484, 0.037558273315429686, 0.03751715087890625, 0.03734249496459961, 0.037265697479248044, 0.0371943359375, 0.03755168151855469, 0.03726326370239258, 0.03744822311401367, 0.03750092697143555, 0.037616928100585936, 0.03793174362182617, 0.037444862365722656, 0.03740105438232422, 0.037488929748535155, 0.03722585678100586, 0.03737651062011719, 0.03736924743652344, 0.0373983039855957, 0.037926910400390625, 0.03746416091918945, 0.037555038452148436, 0.03727155303955078, 0.03733628845214844, 0.037223201751708984, 0.03727974319458008, 0.03738623809814453, 0.03811056137084961, 0.03745036697387695, 0.03772822570800781, 0.03738425445556641, 0.037373023986816405, 0.037270431518554685, 0.03723263931274414, 0.03751430511474609, 0.03800764846801758, 0.03734230422973633, 0.037477375030517575, 0.03786956787109375, 0.03727155303955078, 0.03722444915771484, 0.03756342315673828, 0.038053951263427734, 0.03736771011352539, 0.0373292465209961, 0.037227104187011716, 0.03730022430419922, 0.03756012725830078, 0.03795817565917969, 0.03748739242553711, 0.037429439544677735, 0.03772198486328125, 0.03751961517333984, 0.03948806381225586, 0.037416065216064456, 0.037397377014160155, 0.037307968139648436, 0.03727199935913086, 0.03794124984741211, 0.037556224822998044, 0.037209217071533206, 0.037192577362060546, 0.0376995849609375, 0.03735100936889649, 0.037283649444580076, 0.03725513458251953, 0.037235328674316406, 0.037603134155273436, 0.03747257614135742, 0.037465057373046874, 0.038300575256347655, 0.03742105484008789, 0.03728531265258789, 0.037337665557861326, 0.03766998291015625, 0.03758720016479492, 0.03735414505004883, 0.03736070251464844, 0.03756496047973633, 0.03770127868652344, 0.037364479064941405, 0.03731660842895508, 0.03741596984863281, 0.03732579040527344, 0.037311649322509764, 0.03732361602783203, 0.03728793716430664, 0.03735551834106445, 0.03729404830932617, 0.03726921463012695, 0.0375483512878418, 0.03733046340942383, 0.03738876724243164, 0.03724857711791992, 0.03722857666015625, 0.03810755157470703, 0.0373125114440918, 0.03724070358276367, 0.03752972793579101, 0.037240287780761716, 0.03740848159790039, 0.040599681854248046, 0.03967251205444336, 0.03751059341430664, 0.03745951843261719, 0.03834275054931641, 0.037951263427734375, 0.03763235092163086, 0.037841472625732425, 0.037394142150878905, 0.038510879516601565, 0.03763545608520508, 0.03776575851440429, 0.03946246337890625, 0.03748876953125, 0.0377715835571289, 0.03745382308959961, 0.03725516891479492, 0.03740403366088867, 0.0374112319946289, 0.03727996826171875, 0.03729625701904297, 0.03768307113647461, 0.0386677131652832, 0.037378623962402345, 0.037722110748291016, 0.03734444808959961, 0.0374136962890625, 0.038117374420166016, 0.037469791412353515, 0.03743936157226563, 0.037251232147216796, 0.037297664642333986, 0.037337631225585935, 0.03726889419555664, 0.0372210578918457, 0.03711116790771484, 0.03765542221069336, 0.03770115280151367, 0.037396800994873046, 0.037230751037597654, 0.03717657470703125, 0.037257247924804685, 0.037104320526123044, 0.037142559051513674, 0.03720579147338867, 0.03747862243652344, 0.03718348693847656, 0.0370997428894043, 0.03780790328979492, 0.037864704132080075, 0.037362430572509764, 0.03722649765014648, 0.03735884857177734, 0.03723955154418945, 0.037363201141357424, 0.03753539276123047, 0.03726217651367188, 0.03786547088623047, 0.03832572937011719, 0.037298721313476564, 0.037302272796630856, 0.03728572845458984, 0.03727740859985352, 0.03731296157836914, 0.03727753448486328, 0.03715907287597656, 0.037369857788085936, 0.0381539192199707, 0.03728976058959961, 0.03796476745605469, 0.03747948837280273, 0.03739744186401367, 0.03739219284057617, 0.03917843246459961, 0.03726969528198242, 0.03757241439819336, 0.037371616363525394, 0.03773878479003906, 0.03835446548461914, 0.03772991943359375, 0.037518177032470706, 0.03748188781738281, 0.03727215957641602, 0.0372490234375, 0.037470046997070315, 0.03731881713867188, 0.037427200317382815, 0.03725516891479492, 0.03726665496826172, 0.03726825714111328, 0.03806412887573242, 0.03726268768310547, 0.03721897506713867, 0.03718143844604492, 0.03738828659057617, 0.03745526504516602, 0.037532257080078124, 0.03737587356567383, 0.037283935546875, 0.037295265197753905, 0.03733113479614258, 0.03747091293334961, 0.03730636978149414, 0.03724240112304687, 0.03740492630004883, 0.038254878997802735, 0.03783196640014649, 0.037529632568359374, 0.03748070526123047, 0.03739888000488281, 0.03741084671020508, 0.03725516891479492, 0.03749478530883789, 0.037253120422363284, 0.03930499267578125, 0.03752304077148438, 0.03751913452148437, 0.03732931137084961, 0.03745123291015625, 0.037391326904296876, 0.03740262222290039, 0.03738780975341797, 0.03770521545410156, 0.037493217468261716, 0.03752531051635742, 0.03873177719116211, 0.03989574432373047, 0.0374128646850586, 0.037381473541259765, 0.03732665634155274, 0.03824569702148437, 0.037557182312011717, 0.03767855834960938, 0.037388736724853516, 0.03746556854248047, 0.037463615417480466, 0.037252063751220706, 0.037466110229492186, 0.03723689651489258, 0.03720505523681641, 0.03720191955566406, 0.037217247009277345, 0.03716281509399414, 0.037502174377441404, 0.03734796905517578, 0.03732495880126953, 0.03737705612182617, 0.037462337493896485, 0.03721692657470703, 0.03735257720947266, 0.0373441276550293, 0.03721420669555664, 0.037152767181396484, 0.03739852905273437, 0.03759308624267578, 0.03749273681640625, 0.03828047943115234, 0.03723660659790039, 0.03725110244750977, 0.03730294418334961, 0.037357601165771484, 0.037193153381347654, 0.037294784545898435, 0.03727974319458008, 0.03725107192993164, 0.03719171142578125, 0.03722991943359375, 0.03723737716674805, 0.037197662353515626, 0.037414592742919923, 0.03789836883544922, 0.03721583938598633, 0.03741155242919922, 0.03776310348510742, 0.03727523040771484, 0.03731679916381836, 0.03751081466674805, 0.03739910507202148, 0.037185440063476564, 0.03727983856201172, 0.0372408332824707, 0.0381399040222168, 0.03750502395629883, 0.037332672119140625, 0.03801705551147461, 0.03748483276367188, 0.03721033477783203, 0.03713167953491211, 0.037159294128417966, 0.03710892868041992, 0.03717612838745117, 0.037275646209716795, 0.03725721740722656, 0.038652511596679685, 0.03762623977661133, 0.03742499160766601, 0.039311519622802736, 0.03755408096313476, 0.03745132827758789, 0.03745455932617187, 0.03748128128051758, 0.037768192291259765, 0.03749264144897461, 0.03802326583862305, 0.03772147369384766, 0.037415550231933596, 0.03744371032714844, 0.03747212982177734, 0.03731795120239258, 0.03721696090698242, 0.037285537719726564, 0.03748489761352539, 0.037283744812011715, 0.037394527435302735, 0.037252769470214844, 0.03735958480834961, 0.03733132934570312, 0.03736521530151367, 0.03767875289916992, 0.037340030670166016, 0.037427200317382815, 0.0375513916015625, 0.037405406951904294, 0.037402145385742186, 0.037320320129394534, 0.037286334991455075, 0.03731907272338867, 0.037246814727783205, 0.037273536682128905, 0.03757814407348633, 0.037261886596679686, 0.03732915115356445, 0.037515262603759765, 0.037609054565429685, 0.037259681701660156, 0.03725107192993164, 0.037280990600585935, 0.03726620864868164, 0.03783599853515625, 0.03729283142089844, 0.03721420669555664, 0.0373043212890625, 0.03736940765380859, 0.037198272705078125, 0.039276607513427736, 0.03980847930908203, 0.03780860900878906, 0.037525505065917966, 0.03757056045532227, 0.03746752166748047, 0.03741759872436524, 0.03715798568725586, 0.037286815643310545, 0.03729318237304687, 0.037587841033935546, 0.03750297546386719, 0.039411487579345705, 0.03799881744384766, 0.03750905609130859, 0.03731180953979492, 0.037200641632080075, 0.03727155303955078, 0.03734230422973633, 0.03723971176147461, 0.03727769470214844, 0.037375553131103516, 0.03759356689453125, 0.03782447814941406, 0.037506881713867186, 0.037348705291748045, 0.03734975814819336, 0.03748838424682617, 0.03732144165039063, 0.03750707244873047, 0.037541374206542966, 0.03723929595947266, 0.037466110229492186, 0.037236736297607424, 0.03726131057739258, 0.037311744689941403, 0.03733369445800781, 0.03824012756347656, 0.037289344787597656, 0.03729081726074219, 0.0372592658996582, 0.037220352172851565, 0.037185535430908204, 0.037408321380615235, 0.0372105598449707, 0.03742454528808594, 0.037265472412109375, 0.037161312103271484, 0.03738771057128906, 0.03760809707641601, 0.03739839935302734, 0.037439262390136716, 0.037400318145751954, 0.037302879333496096, 0.037359710693359374, 0.03727750396728516, 0.03767724609375, 0.03751699066162109, 0.037494880676269535, 0.0375871696472168, 0.0375623664855957, 0.03758694458007812, 0.037432735443115234, 0.037376609802246094, 0.037449726104736326, 0.037623809814453124, 0.03749273681640625, 0.037416961669921874, 0.0373493766784668, 0.037416255950927735, 0.03733731079101563, 0.03747641754150391, 0.037456417083740236, 0.037299999237060545, 0.037452960968017576]",tokens/s,26.64063577375068,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4356.452352,6253.576192,0.0,5851.05408,5850.293248,s,1,12.8834697265625,12.8834697265625,0.0,12.8834697265625,12.8834697265625,12.8834697265625,12.8834697265625,[12.8834697265625],,kWh,0.00011435995739583215,1.2607476621075015e-05,3.7221418665999914e-05,0.0001641888526829071,,MB,4137.033728,6368.919552,0.0,5951.717376,5923.050496,s,10,2.1210641479492187,0.21210641479492187,0.0008017626402148525,0.2122968215942383,0.2129480514526367,0.21304607009887697,0.21312448501586914,"[0.21183786010742187, 0.2108753204345703, 0.21240730285644532, 0.21076031494140626, 0.21292626953125, 0.21279090881347656, 0.2126641845703125, 0.21218634033203124, 0.2114715576171875, 0.2131440887451172]",tokens/s,1206.9413376654227,kWh,6.223482930496406e-06,6.863158493829669e-07,4.128502120765991e-06,1.1038300900645365e-05,tokens/kWh,23191975.133150496,MB,4151.443456,6383.599616,0.0,5966.39744,5923.053056,s,10,22.651348388671874,2.265134838867188,0.004121363797203295,2.2656754150390626,2.2703095214843754,2.2703642333984377,2.270408002929688,"[2.2704189453125, 2.259505615234375, 2.264183837890625, 2.264888916015625, 2.268658447265625, 2.25852587890625, 2.27029736328125, 2.2608095703125, 2.267597900390625, 2.2664619140625]",tokens/s,27.812913791704695,kWh,6.78677450524203e-05,7.485778365520116e-06,4.488228708783407e-05,0.00012023581050577446,tokens/kWh,523970.35238494404,,s,630,22.648571426391584,0.03595011337522477,0.00043886579464091387,0.03587470436096191,0.036367044830322263,0.03662165870666504,0.037713587570190436,"[0.037054241180419924, 0.03593875122070313, 0.036136287689208985, 0.03949347305297852, 0.03605871963500976, 0.03611155319213867, 0.03617971038818359, 0.03575356674194336, 0.035858112335205077, 0.035838752746582034, 0.0356495361328125, 0.036050369262695316, 0.0361148796081543, 0.03603235244750977, 0.036171295166015624, 0.035824382781982425, 0.03577811050415039, 0.035751583099365235, 0.035996448516845705, 0.03601366424560547, 0.03605337524414062, 0.035928031921386716, 0.03588307189941406, 0.036003681182861326, 0.03583961486816406, 0.03586102294921875, 0.03587295913696289, 0.035813182830810544, 0.035966686248779293, 0.035706336975097654, 0.03563398361206055, 0.03582563018798828, 0.03629436874389649, 0.03605126571655273, 0.03600182342529297, 0.03606073760986328, 0.03580931091308594, 0.035760257720947264, 0.036353759765625, 0.03591139221191406, 0.03615324783325195, 0.03604886245727539, 0.03620534515380859, 0.03576643371582031, 0.03602780914306641, 0.03617657470703125, 0.036245407104492186, 0.03587276840209961, 0.035961151123046875, 0.035659454345703126, 0.035889217376708984, 0.03566521453857422, 0.03565631866455078, 0.03564134216308594, 0.035821407318115235, 0.03621494293212891, 0.035827713012695314, 0.0359950065612793, 0.03619481658935547, 0.036560897827148435, 0.036427425384521483, 0.03583574295043945, 0.035867263793945316, 0.03657318496704102, 0.03577219009399414, 0.03574396896362304, 0.035388607025146485, 0.035447616577148434, 0.035689697265625, 0.03602329635620117, 0.035925662994384766, 0.03578278350830078, 0.035885055541992186, 0.03586028671264648, 0.035829761505126956, 0.03573369598388672, 0.035606529235839846, 0.035950111389160155, 0.035916255950927733, 0.03652345657348633, 0.0361798095703125, 0.03605561447143555, 0.03570892715454101, 0.036303009033203125, 0.035968158721923826, 0.03582038497924805, 0.03587071990966797, 0.03584204864501953, 0.0366278076171875, 0.03582223892211914, 0.03565356826782227, 0.03553497695922852, 0.035964736938476564, 0.035522689819335936, 0.0355799674987793, 0.03560851287841797, 0.03556147384643555, 0.03586361694335938, 0.03601299285888672, 0.03646012878417969, 0.03594838333129883, 0.036369983673095706, 0.035955711364746096, 0.035762176513671876, 0.03599359893798828, 0.0359090576171875, 0.03608019256591797, 0.035692543029785154, 0.03580313491821289, 0.03607513427734375, 0.035760513305664064, 0.03616307067871094, 0.03587942504882812, 0.03575193786621094, 0.03587465667724609, 0.035899200439453126, 0.035998046875, 0.03569203186035156, 0.035580127716064454, 0.03570719909667969, 0.03571260833740234, 0.03548998260498047, 0.03556780624389649, 0.03549996948242187, 0.035689983367919925, 0.035755615234375, 0.03726144027709961, 0.03661414337158203, 0.035978847503662106, 0.03591961669921875, 0.036072097778320315, 0.03613695907592773, 0.035927902221679686, 0.036187358856201175, 0.036197311401367185, 0.03592192077636719, 0.03612195205688477, 0.036160160064697265, 0.036038654327392575, 0.03606528091430664, 0.036412704467773435, 0.036082401275634765, 0.0360335693359375, 0.036889568328857425, 0.03589731216430664, 0.03578208160400391, 0.036765281677246096, 0.036858688354492186, 0.03637456130981445, 0.03597846221923828, 0.035935169219970704, 0.03603257751464844, 0.03625894546508789, 0.035918655395507815, 0.03634368133544922, 0.03626793670654297, 0.03595647811889648, 0.036571712493896485, 0.03640505599975586, 0.03623251342773438, 0.036401664733886716, 0.03601974487304688, 0.03567248153686523, 0.03553420639038086, 0.03552355194091797, 0.035606849670410154, 0.03539936065673828, 0.03545907211303711, 0.0356126708984375, 0.035588096618652344, 0.03560243225097656, 0.03565158462524414, 0.035571712493896485, 0.03538940811157226, 0.03541974258422852, 0.035334590911865235, 0.0355491828918457, 0.03554089736938477, 0.03539465713500976, 0.03612979125976563, 0.03555241775512695, 0.03540019226074219, 0.035776382446289064, 0.03550064086914063, 0.035432319641113284, 0.035438591003417966, 0.03540959930419922, 0.03573328018188476, 0.03565622329711914, 0.036716705322265626, 0.036251903533935544, 0.03627939224243164, 0.03617270278930664, 0.035886112213134765, 0.03573030471801758, 0.03590768051147461, 0.0360079345703125, 0.03587071990966797, 0.0358579216003418, 0.03586841583251953, 0.03618892669677734, 0.03575983810424805, 0.03561705780029297, 0.03578220748901367, 0.03608160018920899, 0.03625830459594727, 0.035811328887939455, 0.03611033630371094, 0.03588700866699219, 0.03640041732788086, 0.03777439880371094, 0.03585993576049805, 0.03551363372802734, 0.03558243179321289, 0.03565740966796875, 0.03557257461547852, 0.035661342620849606, 0.0358098258972168, 0.035815361022949216, 0.03599539184570313, 0.035932193756103514, 0.03590371322631836, 0.03574700927734375, 0.03597107315063477, 0.03630163192749023, 0.03609747314453125, 0.03595660781860351, 0.03600454330444336, 0.036071422576904294, 0.035868896484375, 0.03614028930664062, 0.03708918380737305, 0.03605107116699219, 0.03597772979736328, 0.035652801513671874, 0.035695423126220704, 0.03570998382568359, 0.03566281509399414, 0.03601203155517578, 0.03562668609619141, 0.03545942306518555, 0.03550595092773438, 0.03551824188232422, 0.03538934326171875, 0.03535295867919922, 0.035493728637695315, 0.03562249755859375, 0.03632790374755859, 0.03606512069702148, 0.03608134460449219, 0.036268096923828125, 0.03633830261230469, 0.03670627212524414, 0.03596428680419922, 0.0359471664428711, 0.03591910552978515, 0.03563801574707031, 0.036396095275878906, 0.03842758560180664, 0.03595635223388672, 0.03595443344116211, 0.035744384765625, 0.03601408004760742, 0.035836959838867186, 0.03591881561279297, 0.03649331283569336, 0.035878177642822265, 0.035729503631591795, 0.03569887924194336, 0.035850528717041016, 0.035722846984863284, 0.03573929595947266, 0.03557059097290039, 0.035466625213623044, 0.03557542419433594, 0.03558448028564453, 0.03564393615722656, 0.03620975875854492, 0.0362628173828125, 0.03673881530761719, 0.0358067512512207, 0.035883743286132815, 0.03587276840209961, 0.035702560424804686, 0.03562921524047852, 0.03574176025390625, 0.0356470718383789, 0.03569091033935547, 0.03562905502319336, 0.03564064025878906, 0.035653537750244144, 0.0355266227722168, 0.03552249526977539, 0.03561151885986328, 0.0355552978515625, 0.03554089736938477, 0.03593756866455078, 0.03646966552734375, 0.03641952133178711, 0.03600492858886719, 0.036235424041748045, 0.03620534515380859, 0.035983009338378905, 0.03606358337402344, 0.035913185119628904, 0.03585017776489258, 0.03969513702392578, 0.03673443222045898, 0.036288864135742185, 0.0359785270690918, 0.036039390563964845, 0.035915775299072264, 0.035762176513671876, 0.03578281784057617, 0.03585808181762695, 0.037166046142578124, 0.036466625213623045, 0.036116481781005856, 0.03611199951171875, 0.035762561798095706, 0.03559372711181641, 0.03563935852050781, 0.03564998245239258, 0.03603401565551758, 0.03570127868652344, 0.035639297485351565, 0.035770111083984375, 0.03556377410888672, 0.03561881637573242, 0.035784702301025394, 0.03557360076904297, 0.03550019073486328, 0.03589120101928711, 0.036087806701660154, 0.036015457153320315, 0.03591439819335938, 0.0358073616027832, 0.036017887115478514, 0.03658316802978516, 0.036140766143798825, 0.03585270309448242, 0.03581100845336914, 0.03691312026977539, 0.0356890869140625, 0.035727359771728515, 0.03553279876708984, 0.0359334716796875, 0.03564822387695313, 0.035676158905029294, 0.035643070220947266, 0.03569696044921875, 0.03559423828125, 0.035547134399414065, 0.035465217590332034, 0.03543756866455078, 0.03571513748168945, 0.03574435043334961, 0.03558230209350586, 0.03557936096191406, 0.03619075012207031, 0.03630694580078125, 0.03585817718505859, 0.03567232131958008, 0.03574169540405273, 0.03604067230224609, 0.03603081512451172, 0.0365359992980957, 0.03565155029296875, 0.03568848037719727, 0.035622913360595705, 0.035661823272705076, 0.03596492767333984, 0.036019935607910156, 0.03577884674072265, 0.0357674560546875, 0.03565039825439453, 0.035422206878662106, 0.03570899200439453, 0.037177345275878904, 0.0360816650390625, 0.03602342224121094, 0.03587776184082031, 0.03593830490112305, 0.03570073699951172, 0.03549593734741211, 0.0356495361328125, 0.03560249710083008, 0.03591727828979492, 0.036077854156494144, 0.036007583618164064, 0.035840545654296875, 0.03628646469116211, 0.03593625640869141, 0.03584748840332031, 0.0358408317565918, 0.03572671890258789, 0.036143680572509766, 0.03587475204467774, 0.03589529418945313, 0.035745792388916016, 0.03591939163208008, 0.03660460662841797, 0.03665488052368164, 0.03588438415527344, 0.035834110260009766, 0.035843616485595704, 0.035991455078125, 0.03591167831420899, 0.03638371276855469, 0.0359628791809082, 0.03610598373413086, 0.03594812774658203, 0.03586320114135742, 0.03578060913085938, 0.03596847915649414, 0.036037151336669924, 0.03598713684082031, 0.03605740737915039, 0.03624051284790039, 0.03612128067016602, 0.03862054443359375, 0.03622528076171875, 0.03609657669067383, 0.03601625442504883, 0.03595222473144531, 0.03587100982666016, 0.03618345642089844, 0.03579555130004883, 0.035487743377685545, 0.03614310455322266, 0.035864032745361325, 0.035979713439941406, 0.036224639892578125, 0.03597747039794922, 0.0359323844909668, 0.035885055541992186, 0.03604275131225586, 0.03587823867797851, 0.03622979354858399, 0.03601939010620117, 0.03579987335205078, 0.036612319946289065, 0.03583327865600586, 0.035574111938476566, 0.0356451530456543, 0.035592639923095706, 0.036103710174560544, 0.03551900863647461, 0.03558195114135742, 0.035669662475585937, 0.03578300857543945, 0.03551641464233399, 0.03576979064941406, 0.03568492889404297, 0.035748863220214845, 0.035765247344970705, 0.03571494293212891, 0.035460670471191405, 0.03570467376708984, 0.035690689086914064, 0.035694591522216795, 0.03591017532348633, 0.03569385528564453, 0.03555737686157227, 0.035967041015625, 0.03802982330322266, 0.03571260833740234, 0.03587670516967773, 0.03592265701293945, 0.03584960174560547, 0.03605363082885742, 0.035980865478515624, 0.03570732879638672, 0.035777824401855465, 0.03562979125976563, 0.03589734268188476, 0.035514366149902346, 0.03566515350341797, 0.035746177673339846, 0.0356497917175293, 0.03579507064819336, 0.03563494491577148, 0.03566617584228516, 0.035770240783691405, 0.03581113433837891, 0.0362841911315918, 0.03706451034545898, 0.03621091079711914, 0.0365327033996582, 0.03600592041015625, 0.03555942535400391, 0.03592547225952149, 0.03611225509643555, 0.03756470489501953, 0.035670398712158206, 0.035588096618652344, 0.0356512336730957, 0.03594185638427734, 0.036116737365722656, 0.035719520568847654, 0.035523967742919924, 0.036057823181152346, 0.03594873428344727, 0.03557580947875977, 0.03693475341796875, 0.0357889289855957, 0.035687198638916014, 0.03623321533203125, 0.03579523086547851, 0.03573116683959961, 0.03595033645629883, 0.03574195098876953, 0.037925983428955076, 0.03653315353393555, 0.03654019165039062, 0.03588323211669922, 0.03607756805419922, 0.035983360290527344, 0.035972320556640625, 0.03579779052734375, 0.03584617614746094, 0.0362158088684082, 0.036035007476806644, 0.035985950469970704, 0.0362204475402832, 0.03601628875732422, 0.0358238410949707, 0.0359158706665039, 0.03594035339355469, 0.03590505599975586, 0.0357828483581543, 0.03566211318969727, 0.03571830368041992, 0.03561360168457031, 0.03717318344116211, 0.035706207275390624, 0.03592787170410156, 0.03619132614135742, 0.03626367950439453, 0.03644185638427734, 0.03591324615478515, 0.03598361587524414, 0.03622457504272461, 0.03641379165649414, 0.036059711456298826, 0.03585228729248047, 0.03598083114624023, 0.03592035293579102, 0.03627008056640625, 0.036225025177001956, 0.03582345581054688, 0.036296798706054685, 0.03582572937011719, 0.036906047821044924, 0.03565049743652344, 0.03558195114135742, 0.03561881637573242, 0.03578879928588867, 0.035626911163330076, 0.035342430114746096, 0.03554860687255859, 0.0354268798828125, 0.035399681091308595, 0.0354703369140625, 0.03573062515258789, 0.03580060958862305, 0.03568668746948242, 0.03681228637695313, 0.036018016815185544, 0.03566640090942383, 0.035741470336914063, 0.03561228942871094, 0.03565785598754883, 0.0354714241027832, 0.0355700798034668, 0.0355676155090332, 0.035489791870117186, 0.0356126708984375, 0.03566796875, 0.03584819030761719, 0.03589023971557617, 0.03583276748657226, 0.035622913360595705, 0.0357212142944336, 0.035999744415283204, 0.036071422576904294, 0.03638643264770508, 0.036366718292236325, 0.0360643196105957, 0.03643449783325195, 0.03625766372680664, 0.03605145645141602, 0.03697235107421875, 0.0361921615600586, 0.03656268692016602, 0.03601414489746094, 0.035832286834716794, 0.03726540756225586, 0.03599564743041992, 0.035726879119873045, 0.03576646423339844, 0.036069313049316404, 0.03566012954711914, 0.035765281677246095, 0.035681312561035156, 0.03614854431152344, 0.036647552490234374, 0.036239360809326174, 0.03616966247558594, 0.03581865692138672, 0.035820095062255856, 0.03560889434814453, 0.03588703918457031, 0.03617184066772461, 0.03602230453491211, 0.03608700942993164, 0.03585036849975586, 0.035848865509033205, 0.03563008117675781, 0.035759105682373046, 0.03704956817626953, 0.03564575958251953, 0.03562057495117187, 0.03565011215209961, 0.03583100891113281, 0.03614204788208008, 0.03612255859375, 0.03572947311401367, 0.035762176513671876, 0.03598659133911133]",tokens/s,27.816323958776607,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpp2ijstvm/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp0u0m9yzt/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4385.583104,4566.482944,0.0,4188.012544,4187.049984,s,1,10.2750205078125,10.2750205078125,0.0,10.2750205078125,10.2750205078125,10.2750205078125,10.2750205078125,[10.2750205078125],,kWh,9.746523692499522e-05,1.0743721955271504e-05,3.126280278799992e-05,0.00013947176166826666,,MB,4391.743488,4962.844672,0.0,4555.014144,4514.269184,s,10,7.850749145507812,0.7850749145507813,0.0029818719380852246,0.7838614196777344,0.789292431640625,0.7898505920410157,0.7902971203613282,"[0.7818594360351563, 0.7877095947265625, 0.7823174438476562, 0.7825433959960938, 0.7844056396484375, 0.7904087524414063, 0.7833171997070313, 0.7865393676757813, 0.7824799194335937, 0.7891683959960938]",tokens/s,326.08353069908344,kWh,2.281054614999978e-05,2.5155927895983764e-06,1.517390102799981e-05,4.0500039967597965e-05,tokens/kWh,6320981.4164334815,MB,4399.607808,4979.621888,0.0,4571.79136,4514.271744,s,10,466.8263046875,46.68263046875,0.015457591777333574,46.686087890625004,46.693671093750005,46.69438828125,46.69496203125,"[46.64066015625, 46.670609375, 46.685453125, 46.69510546875, 46.69034375, 46.69351171875, 46.68563671875, 46.69313671875, 46.6865390625, 46.68530859375]",tokens/s,1.3495383479337797,kWh,0.0013616506224445842,0.00015019915050282654,0.0009056978078910003,0.0024175475808384107,tokens/kWh,26059.466419333705,,s,630,466.81600244140645,0.7409777816530257,0.0005184305986670322,0.7410007019042969,0.7414539123535155,0.7416112060546874,0.7419606964111328,"[0.7400110473632813, 0.7402894897460938, 0.7400440063476562, 0.7403200073242188, 0.7396430053710937, 0.7401265869140625, 0.7395269165039062, 0.7397890625, 0.7398823852539063, 0.73982421875, 0.740220947265625, 0.7401492309570312, 0.7397346801757813, 0.7396583862304688, 0.7403153076171874, 0.7400753784179688, 0.7398667602539063, 0.7393792114257812, 0.7404309692382812, 0.74035400390625, 0.7401881103515625, 0.7396765747070313, 0.7399342041015625, 0.7405410766601562, 0.7398911743164063, 0.7407472534179688, 0.7407882080078125, 0.7400386352539062, 0.73987890625, 0.7409293212890625, 0.7406143798828125, 0.7407343139648438, 0.7397560424804688, 0.7399384155273437, 0.740581298828125, 0.7402973022460938, 0.740220947265625, 0.740360107421875, 0.7411057739257813, 0.740449462890625, 0.7406700439453126, 0.7400977172851563, 0.7408890991210938, 0.740763671875, 0.7403888549804688, 0.7405721435546875, 0.7406007080078125, 0.7408599853515625, 0.740433349609375, 0.7407367553710937, 0.7409295654296875, 0.7406866455078125, 0.7404031982421875, 0.7403448486328125, 0.7409674072265625, 0.7407156982421875, 0.7408065795898438, 0.740592529296875, 0.7401984252929688, 0.7403945922851562, 0.7403984985351563, 0.741126953125, 0.7402785034179687, 0.7403516235351563, 0.7404179077148437, 0.7406781005859375, 0.7401555786132813, 0.7410413208007812, 0.74025439453125, 0.7404482421875, 0.7400786743164063, 0.74094482421875, 0.7405525512695312, 0.7404586791992187, 0.7402434692382812, 0.7404423217773437, 0.74058935546875, 0.740398681640625, 0.7412412719726562, 0.7406448364257813, 0.7404994506835938, 0.7403804931640625, 0.7406102294921875, 0.7411128540039063, 0.7417538452148438, 0.7404564819335937, 0.7408516845703125, 0.7410953979492187, 0.740627685546875, 0.7406206665039062, 0.740828857421875, 0.741005859375, 0.7405958862304688, 0.7401922607421875, 0.7412469482421875, 0.74111181640625, 0.7408843994140625, 0.7404359741210937, 0.7411138916015625, 0.7408823852539063, 0.7407124633789063, 0.74094384765625, 0.7413555297851563, 0.7404906616210938, 0.7410133666992188, 0.7409793701171875, 0.740929443359375, 0.7408640747070312, 0.7405813598632812, 0.7402782592773437, 0.7413414916992187, 0.7409857788085937, 0.7406785888671875, 0.7406057739257812, 0.7411691284179688, 0.7411315307617188, 0.74105859375, 0.7411226806640625, 0.7413324584960937, 0.7406558227539063, 0.740982666015625, 0.74103369140625, 0.7415252685546875, 0.7407310180664063, 0.7412965698242188, 0.7410989990234375, 0.740869873046875, 0.740468994140625, 0.7411036376953125, 0.740706298828125, 0.7408719482421875, 0.740638916015625, 0.7408571166992187, 0.7408074340820312, 0.7406796875, 0.7405250854492188, 0.7408786010742188, 0.741176025390625, 0.740675048828125, 0.7408501586914062, 0.74096435546875, 0.741158935546875, 0.741158935546875, 0.7413923950195312, 0.740759521484375, 0.7410171508789063, 0.7406097412109375, 0.7411217651367188, 0.7407523803710937, 0.7410889892578125, 0.7411448974609375, 0.740943359375, 0.7407252197265625, 0.7412610473632812, 0.7411159057617187, 0.7411837768554688, 0.7409470825195312, 0.7410205688476562, 0.7412899780273438, 0.740505615234375, 0.7411202392578125, 0.7413648071289063, 0.7410611572265625, 0.7406178588867187, 0.7403484497070313, 0.7415132446289062, 0.7412777099609374, 0.7411558837890625, 0.7409234619140626, 0.7417967529296875, 0.7408416748046875, 0.7407122802734375, 0.7415451049804688, 0.74143994140625, 0.7403668823242188, 0.7410130004882812, 0.7412227783203125, 0.7412770385742188, 0.7410735473632812, 0.7407284545898437, 0.7417527465820313, 0.7415567626953125, 0.7406766967773437, 0.7416492309570313, 0.7416729736328125, 0.7409459228515625, 0.7413637084960938, 0.7415271606445313, 0.7411019287109375, 0.7406522827148437, 0.7408670654296875, 0.7413903198242188, 0.7409971313476562, 0.7406673583984374, 0.7407609252929688, 0.7414934692382813, 0.7408578491210938, 0.7410231323242188, 0.7411513671875, 0.7412428588867187, 0.74096435546875, 0.7406705322265625, 0.7410676879882813, 0.7414188232421876, 0.741134521484375, 0.74067333984375, 0.740796630859375, 0.7407656860351562, 0.7411671142578125, 0.7410687255859375, 0.740404541015625, 0.7409191284179687, 0.7406130981445312, 0.7412982788085938, 0.7409271240234375, 0.7409646606445313, 0.7411522827148438, 0.7412577514648437, 0.7407513427734375, 0.7421317138671875, 0.7406898193359375, 0.741083251953125, 0.741148681640625, 0.7414967041015625, 0.741117919921875, 0.741251220703125, 0.7408323364257813, 0.7416058349609375, 0.7406943969726563, 0.7411171264648437, 0.7410963134765625, 0.74192236328125, 0.74168505859375, 0.7406979370117187, 0.7414682006835938, 0.7410567016601562, 0.7410836181640625, 0.7407222290039063, 0.7419583129882813, 0.7410271606445312, 0.7411206665039063, 0.74096435546875, 0.7410503540039063, 0.7414824829101563, 0.7414549560546875, 0.7413988037109375, 0.7414192504882813, 0.741542236328125, 0.7408271484375, 0.7461724243164063, 0.7407713623046875, 0.741369873046875, 0.741060791015625, 0.7408067016601563, 0.741339599609375, 0.7412830810546875, 0.7406840209960938, 0.740358642578125, 0.7411712036132813, 0.741201904296875, 0.7406981201171875, 0.7412203369140625, 0.7410372924804688, 0.7412518920898438, 0.74081689453125, 0.74126953125, 0.740822021484375, 0.7409920043945313, 0.7408836669921876, 0.741301025390625, 0.7412136840820313, 0.7412188110351563, 0.7408919067382812, 0.7412188720703125, 0.7414063720703125, 0.7408522338867187, 0.741738525390625, 0.7410501098632812, 0.7414287719726562, 0.7408514404296875, 0.7411904907226563, 0.7410075073242187, 0.7409111328125, 0.7416375732421875, 0.741001708984375, 0.7411466674804688, 0.7413488159179688, 0.7409137573242187, 0.7413770751953125, 0.7412225952148438, 0.7407398071289063, 0.7414620361328125, 0.7413800659179688, 0.7403970336914063, 0.7408448486328125, 0.7415630493164063, 0.7409124755859375, 0.7413480224609375, 0.7406469116210938, 0.741533203125, 0.7410672607421875, 0.7411220703125, 0.7410269165039063, 0.7413400268554687, 0.7415316772460937, 0.740600830078125, 0.74132373046875, 0.7415449829101562, 0.7412333984375, 0.740911376953125, 0.7413792114257812, 0.7409547119140625, 0.7410911254882813, 0.7410734252929687, 0.741003173828125, 0.74100146484375, 0.7405606689453125, 0.7409389038085937, 0.7408321533203125, 0.741001220703125, 0.7408599243164062, 0.7409613647460938, 0.7415075073242188, 0.7406945190429688, 0.7410227661132812, 0.7412909545898437, 0.7417645263671875, 0.7411779174804688, 0.7408681030273437, 0.7413229370117187, 0.7407265625, 0.7409848022460938, 0.7410198364257813, 0.7413305053710938, 0.7408888549804687, 0.7409436645507812, 0.7412243041992187, 0.741204345703125, 0.7410892944335937, 0.7410191650390625, 0.7411611938476562, 0.74098095703125, 0.7409533081054688, 0.7411494750976563, 0.7408394165039063, 0.7410585327148438, 0.7416481323242188, 0.7412449951171876, 0.740628662109375, 0.74061962890625, 0.7409568481445312, 0.7417200927734375, 0.7409229736328125, 0.7412781372070313, 0.7406406860351562, 0.7419451293945313, 0.7410582275390625, 0.7408953247070312, 0.7413330078125, 0.7406510009765624, 0.7417876586914063, 0.7407821044921875, 0.741222412109375, 0.7412777099609374, 0.7410585327148438, 0.741105712890625, 0.7408578491210938, 0.740893798828125, 0.7417701416015625, 0.7412296752929688, 0.740856689453125, 0.7409622802734375, 0.74039501953125, 0.741365234375, 0.7414215698242187, 0.7406849365234375, 0.7411270141601562, 0.7410870361328125, 0.7406585083007813, 0.7405780029296875, 0.74112939453125, 0.7410717163085937, 0.7406264038085938, 0.7405916137695312, 0.7411134033203125, 0.740692626953125, 0.74117236328125, 0.7411555786132813, 0.7407922973632812, 0.74039501953125, 0.7411712036132813, 0.7411712036132813, 0.7413074951171875, 0.7409837036132813, 0.7411322631835937, 0.7407247314453125, 0.7409061889648437, 0.741374755859375, 0.7408959350585937, 0.7425195922851563, 0.740439453125, 0.7409522094726563, 0.7411123046875, 0.7409574584960937, 0.741372802734375, 0.7410932006835937, 0.7408209838867188, 0.7407821044921875, 0.7407656860351562, 0.74102685546875, 0.7413851928710937, 0.7412572021484375, 0.741064697265625, 0.741011474609375, 0.7408129272460937, 0.7410192260742188, 0.74131640625, 0.7405298461914063, 0.7413767700195313, 0.74063671875, 0.7410964965820312, 0.7412127075195313, 0.7412821655273437, 0.7412142333984375, 0.7414312744140625, 0.7404844360351562, 0.7407822265625, 0.7410919189453125, 0.7412132568359375, 0.7407432250976562, 0.741016357421875, 0.7410842895507812, 0.741087646484375, 0.7406998291015625, 0.7413154907226562, 0.7415840454101562, 0.7410225830078125, 0.7411015625, 0.7414537963867187, 0.740874267578125, 0.7414203491210938, 0.7403264770507813, 0.7410634155273438, 0.7409185180664063, 0.7408353271484375, 0.7404669189453125, 0.7404203491210938, 0.7403519897460937, 0.7420149536132813, 0.7408612670898438, 0.7405206298828125, 0.7407544555664063, 0.7414341430664062, 0.7410803833007813, 0.7410902099609376, 0.7408519287109375, 0.7409415893554687, 0.7404564208984376, 0.7404268188476563, 0.740907958984375, 0.741411865234375, 0.7406228637695312, 0.741372314453125, 0.7408510131835937, 0.7408688354492188, 0.741296142578125, 0.7404994506835938, 0.74149267578125, 0.74127978515625, 0.7410421752929688, 0.7401585693359375, 0.7415073852539062, 0.7416324462890626, 0.7409112548828125, 0.7406940307617188, 0.7410747680664063, 0.7413629760742187, 0.7412233276367187, 0.74068994140625, 0.7412100830078125, 0.7406735229492187, 0.7413104858398437, 0.7413859252929688, 0.7409503173828125, 0.7408230590820313, 0.7416156005859375, 0.741074951171875, 0.7407103881835938, 0.741961669921875, 0.7415742797851562, 0.7415607299804687, 0.7408815307617187, 0.741004150390625, 0.7419085083007813, 0.7408836059570313, 0.7411778564453125, 0.7420112915039062, 0.7413792114257812, 0.7412273559570313, 0.7409903564453125, 0.7410182495117188, 0.74169140625, 0.746708984375, 0.7411466064453125, 0.741240478515625, 0.7410732421875, 0.7407437744140625, 0.740569091796875, 0.7410682983398438, 0.7411778564453125, 0.7406277465820312, 0.7407330322265625, 0.7405321044921875, 0.741503662109375, 0.7409111328125, 0.7410657348632812, 0.74077490234375, 0.7412572021484375, 0.740464599609375, 0.7408700561523438, 0.7406626586914062, 0.7411368408203125, 0.741069091796875, 0.74134326171875, 0.7410131225585938, 0.7410650634765625, 0.7405731811523437, 0.7413833618164063, 0.7407764282226562, 0.7408101196289063, 0.7409360961914062, 0.7406024169921875, 0.7416668090820312, 0.7409581909179688, 0.740361572265625, 0.7412241821289063, 0.7413707275390625, 0.7408877563476562, 0.7406815795898437, 0.74096728515625, 0.7412142944335938, 0.7409121704101562, 0.7410260620117187, 0.74164501953125, 0.7411445922851563, 0.741074951171875, 0.7407349853515625, 0.7418388671875, 0.7409418334960938, 0.7413053588867188, 0.7412003173828124, 0.7406268310546875, 0.741662841796875, 0.7410089721679688, 0.7414379272460937, 0.7410524291992188, 0.741074951171875, 0.7409862670898437, 0.741231201171875, 0.7412080688476562, 0.7415582885742188, 0.7415643920898437, 0.7411831665039063, 0.740969970703125, 0.7412069091796875, 0.7410032348632812, 0.7411319580078125, 0.7406467895507812, 0.7410001831054688, 0.7411732177734375, 0.7405772705078125, 0.7407882080078125, 0.74119580078125, 0.7415930786132813, 0.7405238647460938, 0.74088671875, 0.7413165893554687, 0.740890625, 0.7410768432617187, 0.7412532958984375, 0.7409111328125, 0.7414476928710938, 0.740789794921875, 0.740991455078125, 0.7406918334960938, 0.7410219116210938, 0.7410482788085937, 0.74030078125, 0.74102783203125, 0.7408057861328124, 0.7415857543945312, 0.7404413452148437, 0.7403440551757813, 0.7411328125, 0.7417835693359375, 0.741223876953125, 0.7406965942382813, 0.7409930419921875, 0.7410515747070312, 0.7409672241210937, 0.7407218627929687, 0.7408848876953125, 0.7415095825195313, 0.7411261596679688, 0.7403001098632812, 0.740924072265625, 0.7412849731445312, 0.74093994140625, 0.7412880249023438, 0.7411759643554687, 0.7412305908203125, 0.7412200317382812, 0.7410457763671875, 0.74151953125, 0.7411492919921875, 0.7410728759765625, 0.741060302734375, 0.7409923706054687, 0.7410328979492188, 0.7411978149414062, 0.7412183227539062, 0.7411544189453125, 0.7412242431640625, 0.7409833984375, 0.7409845581054687, 0.7409912109375, 0.7409848022460938, 0.7414599609375, 0.7411077270507812, 0.74080419921875]",tokens/s,1.349568131137656,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1235, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1037, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 763, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 557, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpp70zzfht/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1174, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 894, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 507, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 436, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 58598 has 14.69 GiB memory in use. Of the allocated memory 14.29 GiB is allocated by PyTorch, and 313.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpud_qccp0/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 4 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 28.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 191597 has 14.74 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 80.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 107229 has 14.64 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 312.03 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1349, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1142, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 852, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 604, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 22037 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpb1x2k7zd/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmppj1xd8qp/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmptsu4yc23/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 58.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 129697 has 14.69 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 193.68 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpnx61_yf_/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp86iu2cjq/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3900, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmprhv7knqk/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1243, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1121, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 842, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 530, in forward attn_output = self._flash_attention_forward( File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 628, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,2621.128704,2851.995648,0.0,2449.473536,2380.274176,s,1,11.6616923828125,11.6616923828125,0.0,11.6616923828125,11.6616923828125,11.6616923828125,11.6616923828125,[11.6616923828125],,kWh,5.55593570291658e-05,6.121295494944798e-06,1.7966958817998424e-05,7.964761134210902e-05,,MB,2677.522432,3143.499776,0.0,2726.2976,2674.707968,s,10,1.2220152969360352,0.12220152969360351,0.0005603737140551553,0.12231902694702149,0.1225442253112793,0.12257707099914551,0.12260334754943848,"[0.12260991668701172, 0.12247606658935548, 0.12230147552490235, 0.12253692626953125, 0.12226898956298828, 0.12233657836914062, 0.1221876449584961, 0.12056582641601563, 0.12225872039794922, 0.12247315216064453]",tokens/s,2094.900126388516,kWh,3.6576662540104244e-06,4.0337242018213753e-07,2.4329915297249902e-06,6.494030203917553e-06,tokens/kWh,39420820.65549477,MB,2679.918592,3143.499776,0.0,2726.2976,2674.710528,s,10,24.135136962890627,2.4135136962890624,0.006073498916855239,2.413227294921875,2.4209952392578127,2.422407531738281,2.423537365722656,"[2.408634765625, 2.405480712890625, 2.420681396484375, 2.419964599609375, 2.41271923828125, 2.41489404296875, 2.42381982421875, 2.4137353515625, 2.405495361328125, 2.409711669921875]",tokens/s,26.103021539453735,kWh,7.072012757432276e-05,7.800329792143633e-06,3.79515685556752e-05,0.00011647202592214155,tokens/kWh,540902.4141308732,,s,630,24.130494861602777,0.03830237279619489,0.000509522315912815,0.038200975418090824,0.038627736663818364,0.038902374649047854,0.040230931015014654,"[0.038914112091064455, 0.0383201904296875, 0.03836323165893555, 0.03824435043334961, 0.038001983642578126, 0.03810960006713867, 0.038123329162597655, 0.03817670440673828, 0.038410209655761716, 0.038281696319580075, 0.03814204788208008, 0.03811328125, 0.037996543884277346, 0.03808665466308594, 0.038002689361572264, 0.038019073486328124, 0.03804774475097656, 0.03809280014038086, 0.0380211181640625, 0.03796275329589844, 0.0382512321472168, 0.0381256332397461, 0.038144222259521486, 0.0380579833984375, 0.03811942291259766, 0.0380497932434082, 0.038147552490234375, 0.03817116928100586, 0.039308414459228516, 0.0380261116027832, 0.038104286193847654, 0.038118175506591793, 0.03816243362426758, 0.03821363067626953, 0.038009918212890625, 0.037970718383789064, 0.03849801635742187, 0.03849382400512695, 0.038449985504150394, 0.038250495910644534, 0.03868262481689453, 0.040179584503173826, 0.03824857711791992, 0.038182910919189454, 0.038266433715820315, 0.0381383056640625, 0.03829145431518555, 0.038504447937011715, 0.03839900970458984, 0.0381060791015625, 0.03809811019897461, 0.037913406372070316, 0.03803955078125, 0.03804569625854492, 0.03847782516479492, 0.0381253776550293, 0.0381196174621582, 0.03803340911865234, 0.03800678253173828, 0.038084606170654296, 0.03809689712524414, 0.037986305236816405, 0.03808790588378906, 0.03853107070922852, 0.038012928009033206, 0.0380063362121582, 0.03787411117553711, 0.03802243041992188, 0.037841217041015625, 0.03790480041503906, 0.038203102111816406, 0.03806198501586914, 0.038107521057128904, 0.03804931259155273, 0.0378160629272461, 0.03791126251220703, 0.03807231903076172, 0.038642879486083984, 0.038150974273681644, 0.037951488494873044, 0.03801465606689453, 0.0380173454284668, 0.03815423965454102, 0.038255935668945314, 0.03831824111938477, 0.038426849365234376, 0.03842892837524414, 0.038180286407470704, 0.03806886291503906, 0.03804569625854492, 0.03816243362426758, 0.037928958892822266, 0.03811638259887695, 0.03788899230957031, 0.03801497650146484, 0.038330368041992184, 0.03843875122070312, 0.03825884628295898, 0.03816611099243164, 0.0380522575378418, 0.03817881774902344, 0.03794515228271484, 0.038063488006591796, 0.038284095764160156, 0.03807846450805664, 0.03829715347290039, 0.03821612930297852, 0.03803692626953125, 0.03805996704101562, 0.03799257659912109, 0.03802918243408203, 0.03818560028076172, 0.03798966217041016, 0.03826723098754883, 0.03811366271972656, 0.03828079986572266, 0.03801046371459961, 0.03806700897216797, 0.03816447830200195, 0.03802521514892578, 0.038145278930664064, 0.03804828643798828, 0.03935606384277344, 0.04025190353393555, 0.03846499252319336, 0.03808726501464844, 0.0389939193725586, 0.038235774993896486, 0.03803299331665039, 0.038109630584716794, 0.03805219268798828, 0.03791836929321289, 0.03806438446044922, 0.03794480133056641, 0.038600990295410156, 0.03815590286254883, 0.03818979263305664, 0.03805388641357422, 0.03808665466308594, 0.03800883102416992, 0.03808444976806641, 0.03808067321777344, 0.0380513916015625, 0.03885305786132812, 0.04040496063232422, 0.03845513534545898, 0.03831539154052734, 0.03837795257568359, 0.03819926452636719, 0.0389505615234375, 0.0403616943359375, 0.03848495864868164, 0.03810307312011719, 0.03818467330932617, 0.03816672134399414, 0.03849635314941406, 0.03796377563476563, 0.038223167419433594, 0.038169281005859375, 0.03815628814697265, 0.03866934585571289, 0.03835337448120117, 0.038829662322998046, 0.03897232055664063, 0.03833446502685547, 0.03853311920166016, 0.03837868881225586, 0.03850864028930664, 0.038758174896240234, 0.03832067108154297, 0.03842220687866211, 0.03873455810546875, 0.038449153900146485, 0.03845503997802734, 0.038452926635742186, 0.038400577545166015, 0.03832627105712891, 0.03825155258178711, 0.038572734832763675, 0.03827497482299805, 0.03861337661743164, 0.03821363067626953, 0.03823334503173828, 0.03820006561279297, 0.03829145431518555, 0.038299648284912106, 0.03889561462402344, 0.038413982391357425, 0.03849456024169922, 0.03873942565917969, 0.03828572845458984, 0.038084735870361326, 0.03845926284790039, 0.03948761749267578, 0.03836108779907227, 0.0383047981262207, 0.03839267349243164, 0.038125694274902346, 0.03884966278076172, 0.03871628952026367, 0.0384450569152832, 0.038430721282958984, 0.03808870315551758, 0.03821916961669922, 0.03837731170654297, 0.03827283096313477, 0.038075328826904294, 0.038194911956787106, 0.03823440170288086, 0.038537216186523435, 0.03837747192382813, 0.03829145431518555, 0.03871456146240235, 0.038529857635498044, 0.038354942321777344, 0.03828326416015625, 0.03808415985107422, 0.03817705535888672, 0.03808182525634766, 0.038105983734130856, 0.03851264190673828, 0.03831808090209961, 0.038456382751464846, 0.03821459197998047, 0.038358432769775394, 0.03825033569335937, 0.038238849639892575, 0.03868889617919922, 0.03890790557861328, 0.03862732696533203, 0.038225345611572266, 0.03858694458007812, 0.039139328002929685, 0.039515743255615236, 0.038315902709960936, 0.03844086456298828, 0.03830233764648437, 0.038299518585205077, 0.03833663940429687, 0.03867238235473633, 0.0383631362915039, 0.038483936309814455, 0.03835062408447266, 0.0383449592590332, 0.03822358322143555, 0.03812790298461914, 0.03842044830322266, 0.03809487915039062, 0.038373374938964845, 0.03796691131591797, 0.038316993713378905, 0.03821744155883789, 0.03885228729248047, 0.038250591278076174, 0.038117889404296876, 0.03812374496459961, 0.03808028793334961, 0.03804134368896484, 0.038063358306884766, 0.03801599884033203, 0.03803327941894531, 0.038076065063476563, 0.03801119995117187, 0.038096031188964846, 0.038059009552001956, 0.038075489044189455, 0.0392704963684082, 0.03799737548828125, 0.038002689361572264, 0.03851206588745117, 0.03819782257080078, 0.03798828887939453, 0.038090816497802736, 0.038256065368652344, 0.038392383575439455, 0.038092353820800784, 0.03822947311401367, 0.03829859161376953, 0.038165695190429685, 0.03824496078491211, 0.03807459259033203, 0.03794732666015625, 0.03819935989379883, 0.03806412887573242, 0.0384716796875, 0.03822796630859375, 0.03817801666259766, 0.03885955047607422, 0.0381416015625, 0.03982780838012695, 0.040463935852050784, 0.038272705078125, 0.03811199951171875, 0.03805388641357422, 0.038231582641601564, 0.03851721572875977, 0.03813580703735352, 0.03817881774902344, 0.038332416534423826, 0.0381841926574707, 0.03810345458984375, 0.03818316650390625, 0.03857727813720703, 0.038419422149658204, 0.03825827026367187, 0.03821526336669922, 0.038214176177978516, 0.03810332870483398, 0.03823001480102539, 0.03824639892578125, 0.038324222564697266, 0.03827097702026367, 0.038442462921142576, 0.0383955192565918, 0.038111198425292966, 0.03863142395019531, 0.03819724655151367, 0.03826214218139649, 0.03837196731567383, 0.03893183898925781, 0.03848432159423828, 0.038459678649902344, 0.038199295043945314, 0.03869286346435547, 0.038241504669189456, 0.03953744125366211, 0.03836659240722656, 0.038281822204589845, 0.03843075180053711, 0.03848191833496094, 0.03817596817016602, 0.03845724868774414, 0.03817068862915039, 0.03872441482543945, 0.038242305755615234, 0.03833651351928711, 0.038076416015625, 0.03829759979248047, 0.038002689361572264, 0.03818086242675781, 0.03823772811889648, 0.038344768524169924, 0.038053855895996094, 0.03818950271606445, 0.03816447830200195, 0.03818086242675781, 0.03825183868408203, 0.03826348876953125, 0.03843388748168945, 0.038261665344238284, 0.038096736907958985, 0.03827027130126953, 0.038128448486328126, 0.038148128509521484, 0.03802876663208008, 0.03812611389160156, 0.038236000061035155, 0.03820057678222656, 0.03803433609008789, 0.038250495910644534, 0.038043647766113284, 0.03814534378051758, 0.03811340713500976, 0.03852675247192383, 0.03824720001220703, 0.038171871185302735, 0.03824924850463867, 0.03826457595825195, 0.038440673828125, 0.03817116928100586, 0.039196670532226564, 0.0392437744140625, 0.038424575805664066, 0.03832012939453125, 0.038191104888916014, 0.03794937515258789, 0.03805132675170898, 0.03850297546386719, 0.03881564712524414, 0.03828054428100586, 0.03824720001220703, 0.03831193542480469, 0.038809215545654294, 0.03840768051147461, 0.03815923309326172, 0.03821539306640625, 0.03818896102905273, 0.0382344970703125, 0.03810038375854492, 0.03796028900146484, 0.03812905502319336, 0.03829926300048828, 0.039310302734375, 0.047026176452636716, 0.03845529556274414, 0.03843686294555664, 0.03827916717529297, 0.03876764678955078, 0.03960422515869141, 0.038554431915283204, 0.038244510650634764, 0.03829059219360351, 0.03812003326416016, 0.03816678237915039, 0.03831808090209961, 0.03814972686767578, 0.03811164855957031, 0.038161792755126954, 0.03807052612304687, 0.03819903945922851, 0.03808729553222656, 0.03812351989746094, 0.038118816375732424, 0.03808111953735351, 0.03799244689941406, 0.03810636901855469, 0.038228416442871095, 0.03804191970825195, 0.0385096321105957, 0.03869177627563476, 0.03824630355834961, 0.03836095809936523, 0.03825875091552734, 0.03823632049560547, 0.03817574310302734, 0.03801926422119141, 0.03945555114746094, 0.03838566589355469, 0.03835481643676758, 0.03821718215942383, 0.03858441543579102, 0.03832070541381836, 0.03822796630859375, 0.03808665466308594, 0.03893657684326172, 0.03833647918701172, 0.03822371292114258, 0.038113471984863284, 0.03829065704345703, 0.0380280647277832, 0.03814121627807617, 0.038762462615966796, 0.03877040100097656, 0.03880521774291992, 0.03811388778686523, 0.03816815948486328, 0.03834649658203125, 0.03804739379882813, 0.038072769165039065, 0.03804396820068359, 0.03859686279296875, 0.038295520782470706, 0.03793862533569336, 0.0381220817565918, 0.038128990173339844, 0.038139968872070315, 0.038224479675292966, 0.038152191162109376, 0.03813580703735352, 0.03826073455810547, 0.03817267227172851, 0.03821347045898438, 0.03807638549804687, 0.03807455825805664, 0.0387083854675293, 0.03831254577636719, 0.03812768173217773, 0.03798239898681641, 0.0382033920288086, 0.03859360122680664, 0.03835590362548828, 0.03838540649414063, 0.039446495056152345, 0.03831017684936523, 0.038338558197021484, 0.03857104110717773, 0.038233055114746096, 0.03821363067626953, 0.03799859237670898, 0.0380579833984375, 0.03820544052124023, 0.039796638488769534, 0.038409694671630856, 0.03829974365234375, 0.03822761535644531, 0.03812646484375, 0.03828678512573242, 0.03841462326049805, 0.03817295837402344, 0.03809203338623047, 0.03821398544311523, 0.03813343811035156, 0.03814169692993164, 0.03829180908203125, 0.038228607177734374, 0.03847782516479492, 0.038259998321533206, 0.03818550491333008, 0.038179008483886716, 0.03851871871948242, 0.038432479858398434, 0.03838739013671875, 0.038136478424072265, 0.038200607299804686, 0.03848787307739258, 0.03812492752075195, 0.03815507125854492, 0.0383969612121582, 0.03811017608642578, 0.037807487487792966, 0.037833343505859374, 0.03795308685302735, 0.038089153289794925, 0.03797359848022461, 0.03785321426391602, 0.03781257629394531, 0.03792899322509766, 0.038116542816162106, 0.03894559860229492, 0.03821977615356445, 0.03837091064453125, 0.03835126495361328, 0.038270751953125, 0.03823987197875976, 0.03849071884155274, 0.03816755294799805, 0.03806515121459961, 0.03807785415649414, 0.03796428680419922, 0.03806211090087891, 0.038228031158447265, 0.038100639343261716, 0.03842697525024414, 0.038037246704101565, 0.03808185577392578, 0.03793196868896485, 0.03806208038330078, 0.03863347244262695, 0.03882112121582031, 0.038289569854736326, 0.03799919891357422, 0.03817007827758789, 0.03795123291015625, 0.03808950424194336, 0.03825008010864258, 0.037969406127929685, 0.03845977783203125, 0.03827532958984375, 0.038451488494873044, 0.038201343536376955, 0.03829145431518555, 0.038386913299560545, 0.03843948745727539, 0.03826095962524414, 0.038004737854003906, 0.03836928176879883, 0.03800883102416992, 0.03831808090209961, 0.037975711822509764, 0.03804604721069336, 0.03800883102416992, 0.037893695831298826, 0.03817452621459961, 0.038197887420654296, 0.03811862564086914, 0.03814070510864258, 0.03812489700317383, 0.03902259063720703, 0.03838259124755859, 0.038095870971679685, 0.03810508728027344, 0.03821977615356445, 0.038171646118164065, 0.041534465789794923, 0.03811942291259766, 0.038002689361572264, 0.03807187271118164, 0.03798998260498047, 0.03796259307861328, 0.037914497375488285, 0.03799257659912109, 0.03815545654296875, 0.03808256149291992, 0.03800761413574219, 0.038182910919189454, 0.037975711822509764, 0.038074462890625, 0.03805414581298828, 0.03815628814697265, 0.038039520263671876, 0.038125598907470706, 0.03793305587768555, 0.038076416015625, 0.03850239944458008, 0.03821158218383789, 0.03817609786987305, 0.038056385040283205, 0.038152416229248046, 0.03823001480102539, 0.03852492904663086, 0.03824639892578125, 0.03817062377929688, 0.037976062774658204, 0.03810713577270508, 0.03819286346435547, 0.03805212783813477, 0.03784048080444336, 0.038023582458496095, 0.037838848114013675, 0.03801449584960938, 0.03797603225708008, 0.038017536163330076, 0.03807366561889648, 0.03790304183959961, 0.037932640075683595, 0.03798015975952149, 0.03793075180053711, 0.038127712249755856, 0.03850092697143555, 0.04037392044067383, 0.038537567138671874, 0.03821158218383789, 0.038125343322753906, 0.03824457550048828, 0.03864371109008789, 0.03840748977661133, 0.0387918701171875, 0.03822182464599609, 0.038330368041992184, 0.03816447830200195]",tokens/s,26.108043105343697,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: Phi3ForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,2627.469312,2851.995648,0.0,2449.473536,2380.274176,s,1,11.0900888671875,11.0900888671875,0.0,11.0900888671875,11.0900888671875,11.0900888671875,11.0900888671875,[11.0900888671875],,kWh,5.687830749999894e-05,6.266608736095465e-06,1.8734459431998807e-05,8.187937566809321e-05,,MB,2725.74464,3143.499776,0.0,2726.2976,2674.707968,s,10,1.2258160552978514,0.12258160552978517,0.00023113163263810603,0.12251057434082031,0.12281737060546875,0.12292017517089844,0.12300241882324219,"[0.12250166320800782, 0.1224617919921875, 0.12223919677734375, 0.12279452514648438, 0.12272156524658204, 0.12250064086914063, 0.12251948547363281, 0.12228201293945312, 0.12302297973632813, 0.1227721939086914]",tokens/s,2088.404690847327,kWh,3.6568604209374906e-06,4.0328480061593604e-07,2.437703339050034e-06,6.497848560603461e-06,tokens/kWh,39397655.64130431,MB,2736.61952,3143.499776,0.0,2726.2976,2674.710528,s,10,23.944139404296873,2.3944139404296876,0.008119033266761253,2.3903076171875,2.4054596679687497,2.408448828125,2.41084015625,"[2.388973876953125, 2.38654931640625, 2.390777587890625, 2.402520751953125, 2.389837646484375, 2.39129541015625, 2.41143798828125, 2.40479541015625, 2.38980859375, 2.388142822265625]",tokens/s,26.311240064320035,kWh,6.917607893656269e-05,7.630003969309075e-06,3.7198022813949865e-05,0.00011400410571982165,tokens/kWh,552611.6765902259,,s,630,23.939147518157956,0.037998646854218984,0.0005852333581250627,0.037887887954711916,0.03827270736694336,0.03857254180908203,0.04092545295715332,"[0.038161441802978514, 0.03774153518676758, 0.03761356735229492, 0.03746416091918945, 0.03774998474121094, 0.03754668807983398, 0.037629726409912106, 0.0380558090209961, 0.038134273529052735, 0.03793600082397461, 0.03794979095458984, 0.03758963012695313, 0.03771187210083008, 0.03814710235595703, 0.03779628753662109, 0.037511711120605466, 0.037840606689453125, 0.03764048004150391, 0.03758636856079101, 0.037601505279541016, 0.037970272064208985, 0.037824222564697266, 0.03788547134399414, 0.03796044921875, 0.037922080993652345, 0.03780886459350586, 0.038096481323242185, 0.03782902526855469, 0.03817267227172851, 0.0378100471496582, 0.03787081527709961, 0.037937889099121096, 0.03799014282226563, 0.04156665420532227, 0.03859251022338867, 0.037875423431396486, 0.03793507385253906, 0.03783433532714844, 0.03786415863037109, 0.03781753540039062, 0.037988929748535155, 0.037734241485595704, 0.037704097747802735, 0.03779580688476562, 0.03789827346801758, 0.037797664642333986, 0.03784268951416016, 0.037953983306884764, 0.03780201721191406, 0.03783260726928711, 0.037789791107177735, 0.037701217651367185, 0.03784304046630859, 0.037724479675292966, 0.037894142150878905, 0.03776716613769531, 0.03780956649780273, 0.03789884948730469, 0.037814014434814455, 0.037838943481445314, 0.0384738883972168, 0.03777059173583985, 0.03787228775024414, 0.038429054260253906, 0.037763072967529294, 0.03762985610961914, 0.0377077751159668, 0.03809904098510742, 0.03770163345336914, 0.038449153900146485, 0.03859414291381836, 0.0379249267578125, 0.03779996871948242, 0.037832286834716795, 0.0376610221862793, 0.03931353759765625, 0.03791487884521484, 0.03783036804199219, 0.03825078582763672, 0.03782857513427734, 0.03827206420898437, 0.037770206451416016, 0.03792079925537109, 0.03818902587890625, 0.03793920135498047, 0.037838848114013675, 0.037902015686035156, 0.03773062515258789, 0.03809791946411133, 0.03779891204833984, 0.03770163345336914, 0.037984031677246094, 0.038043167114257814, 0.037800350189208985, 0.0377408332824707, 0.037867774963378904, 0.03777715301513672, 0.037781375885009766, 0.03770790481567383, 0.037718017578125, 0.03772003173828125, 0.037655872344970705, 0.037870559692382816, 0.037857025146484376, 0.03775183868408203, 0.03782550430297851, 0.03781219100952148, 0.037736480712890624, 0.0376360969543457, 0.03762390518188476, 0.03798396682739258, 0.03767068862915039, 0.03848812866210938, 0.037695838928222654, 0.03762911987304687, 0.037903167724609374, 0.038105056762695315, 0.03779135894775391, 0.03762435150146484, 0.03749612808227539, 0.03781894302368164, 0.03748659133911133, 0.03785481643676758, 0.03766838455200195, 0.03735948944091797, 0.03770470428466797, 0.038346336364746096, 0.03818960189819336, 0.037609344482421876, 0.03770889663696289, 0.037900447845458984, 0.037569278717041014, 0.038430721282958984, 0.040882080078125, 0.037851295471191405, 0.03782761764526367, 0.03773737716674805, 0.037883968353271485, 0.037711807250976566, 0.03802912139892578, 0.03787321472167969, 0.03769811248779297, 0.03762911987304687, 0.03786636734008789, 0.03775699234008789, 0.03774457550048828, 0.03822198486328125, 0.03847103881835937, 0.037865535736083984, 0.03787408065795898, 0.03768316650390625, 0.03770576095581055, 0.0376545295715332, 0.037875038146972656, 0.03761174392700195, 0.03774643325805664, 0.037908287048339845, 0.03776192092895508, 0.037748737335205076, 0.038174270629882816, 0.03794694519042969, 0.037645313262939455, 0.03751625442504883, 0.037730560302734376, 0.03785929489135742, 0.037796062469482423, 0.03950211334228516, 0.03793324661254883, 0.037996543884277346, 0.037865089416503905, 0.037722110748291016, 0.037887966156005856, 0.038097312927246094, 0.03799766540527344, 0.03789852905273437, 0.03801993560791016, 0.03787753677368164, 0.037765121459960936, 0.03777740859985351, 0.037935104370117184, 0.038215679168701173, 0.03777740859985351, 0.03797568130493164, 0.03780956649780273, 0.0378256950378418, 0.037736255645751955, 0.03796582412719727, 0.03777507019042969, 0.037898174285888674, 0.0383133430480957, 0.037821281433105466, 0.038043647766113284, 0.038046974182128906, 0.037767967224121096, 0.03819107055664062, 0.037806079864501956, 0.03781836700439453, 0.037801311492919924, 0.03780422210693359, 0.03778950500488281, 0.037806304931640625, 0.03781622314453125, 0.03783113479614258, 0.037955646514892576, 0.037663841247558595, 0.037882785797119144, 0.03776921463012695, 0.03798425674438476, 0.03777740859985351, 0.038234111785888675, 0.037789695739746096, 0.0378727035522461, 0.037755840301513674, 0.03794515228271484, 0.038547550201416016, 0.03964271926879883, 0.037890369415283204, 0.03784518432617188, 0.038077503204345706, 0.037870494842529294, 0.03960009765625, 0.03836896133422851, 0.03800009536743164, 0.03776812744140625, 0.03781990432739258, 0.03766470336914062, 0.03899446487426758, 0.03801641464233398, 0.03763779067993164, 0.03810012817382812, 0.04136732864379883, 0.037910526275634765, 0.03810508728027344, 0.03774687957763672, 0.03793900680541992, 0.03816447830200195, 0.03803276824951172, 0.03783542251586914, 0.03819107055664062, 0.03806995010375976, 0.03837382507324219, 0.0386616325378418, 0.038141342163085935, 0.037985248565673826, 0.037926910400390625, 0.040943168640136716, 0.03817062377929688, 0.037985824584960935, 0.03786025619506836, 0.03787776184082031, 0.03775244903564453, 0.03781027221679688, 0.03863491058349609, 0.037960544586181644, 0.037996543884277346, 0.03819839859008789, 0.038030368804931644, 0.03801667022705078, 0.03815996932983398, 0.037929534912109375, 0.037760353088378905, 0.03792569732666016, 0.03781961441040039, 0.03772463989257813, 0.038012191772460936, 0.038177310943603514, 0.03802076721191406, 0.03794144058227539, 0.03809487915039062, 0.038015487670898435, 0.038012928009033206, 0.03827475357055664, 0.037816287994384766, 0.03788006210327149, 0.03787513732910156, 0.03782627105712891, 0.03788671875, 0.03781014251708984, 0.03777766418457031, 0.03803337478637695, 0.03775692749023438, 0.03782656097412109, 0.03783679962158203, 0.03798588943481445, 0.03836511993408203, 0.03786966323852539, 0.03780441665649414, 0.037965152740478514, 0.03771664047241211, 0.03782175827026367, 0.03783135986328125, 0.038037376403808595, 0.037703327178955075, 0.037872062683105466, 0.037791584014892576, 0.037638046264648437, 0.03783913421630859, 0.03775283050537109, 0.03781631851196289, 0.03759430313110351, 0.0377086067199707, 0.03769548797607422, 0.03774031829833984, 0.03797353744506836, 0.03768595123291016, 0.037945343017578126, 0.03768524932861328, 0.03786342239379883, 0.037822463989257815, 0.0385906867980957, 0.03783030319213867, 0.038876449584960934, 0.03797273635864258, 0.037783744812011716, 0.03770767974853516, 0.03813196945190429, 0.03766518402099609, 0.03829731369018555, 0.03751964950561523, 0.037716064453125, 0.03789184188842774, 0.03784688186645508, 0.03758310317993164, 0.0378076171875, 0.03763667297363281, 0.03766681671142578, 0.0385863037109375, 0.037576766967773435, 0.03781631851196289, 0.03794124984741211, 0.03786751937866211, 0.03771708679199219, 0.037886878967285154, 0.037804031372070314, 0.03809856033325195, 0.03794777679443359, 0.03777123260498047, 0.03834400177001953, 0.037888832092285156, 0.03811318588256836, 0.037967872619628903, 0.03804159927368164, 0.03770483016967773, 0.037948287963867185, 0.0380252799987793, 0.037932991027832035, 0.03788595199584961, 0.03809075164794922, 0.038147552490234375, 0.03793324661254883, 0.03776137542724609, 0.03791462326049805, 0.03824639892578125, 0.0380682258605957, 0.038012321472167966, 0.03801340866088867, 0.03790655899047852, 0.03804159927368164, 0.03787155151367187, 0.037873729705810544, 0.03823231887817383, 0.03833212661743164, 0.038066207885742186, 0.03807231903076172, 0.03790848159790039, 0.03798400115966797, 0.03819955062866211, 0.037918144226074216, 0.037897857666015625, 0.03792323303222656, 0.03799708938598633, 0.03794944000244141, 0.03780774307250977, 0.03793958282470703, 0.03796377563476563, 0.03801897430419922, 0.03790633773803711, 0.0381912956237793, 0.038625568389892576, 0.03803827285766601, 0.03797187042236328, 0.037897823333740234, 0.038089118957519534, 0.04763238525390625, 0.03802316665649414, 0.037789695739746096, 0.03781532669067383, 0.037862369537353516, 0.037804031372070314, 0.03783375930786133, 0.03790243148803711, 0.03779878234863281, 0.03880716705322266, 0.03771004867553711, 0.03780220794677734, 0.037846782684326175, 0.038443199157714845, 0.03778051376342773, 0.038009822845458986, 0.03831123352050781, 0.037946048736572265, 0.03788595199584961, 0.03783679962158203, 0.03782377624511719, 0.03782486343383789, 0.037902496337890626, 0.03790460968017578, 0.037967552185058595, 0.03868499374389649, 0.03827248001098633, 0.03844559860229492, 0.03789004898071289, 0.03796889495849609, 0.03793318557739258, 0.0380445442199707, 0.038028671264648435, 0.03806886291503906, 0.03802649688720703, 0.03793084716796875, 0.041791393280029294, 0.038678657531738284, 0.03791756820678711, 0.038020641326904296, 0.038063583374023435, 0.0378419189453125, 0.03789177703857422, 0.038029022216796875, 0.037937759399414066, 0.03786956787109375, 0.03788780975341797, 0.03798444747924805, 0.03905945587158203, 0.03846144104003906, 0.03796582412719727, 0.03784630584716797, 0.037818878173828126, 0.03856816101074219, 0.038018943786621094, 0.038043777465820314, 0.03865129470825195, 0.03836988830566406, 0.03857612609863281, 0.038133758544921875, 0.03817801666259766, 0.03813040161132813, 0.03810128021240235, 0.037942462921142575, 0.037857566833496094, 0.03785292816162109, 0.0378721923828125, 0.03830374526977539, 0.03799808120727539, 0.03779020690917969, 0.03791667175292969, 0.037935104370117184, 0.03826073455810547, 0.03788390350341797, 0.03783475112915039, 0.03797196960449219, 0.03825356674194336, 0.037974559783935546, 0.03794316864013672, 0.037878368377685545, 0.03806617736816406, 0.04098169708251953, 0.03864460754394531, 0.037861057281494144, 0.03791244888305664, 0.03806246566772461, 0.037862815856933595, 0.037911136627197264, 0.039026142120361325, 0.03803190231323242, 0.037903423309326174, 0.03795443344116211, 0.03786348724365234, 0.03787161636352539, 0.037967872619628903, 0.03786041641235351, 0.0391710090637207, 0.04103094482421875, 0.03828400039672852, 0.03814809417724609, 0.03813113784790039, 0.037869918823242185, 0.03843494415283203, 0.038053985595703124, 0.03806115341186524, 0.037808414459228515, 0.03794720077514648, 0.03801171112060547, 0.038100288391113284, 0.03790918350219727, 0.03806208038330078, 0.0381952018737793, 0.03973529434204102, 0.03783395385742187, 0.037733150482177735, 0.038012928009033206, 0.037762592315673825, 0.03833244705200195, 0.03766694259643555, 0.03771424102783203, 0.03790956878662109, 0.03840464019775391, 0.03795596694946289, 0.03793427276611328, 0.03787152099609375, 0.03815132904052734, 0.03776063919067383, 0.03769161605834961, 0.03775267028808594, 0.03802732849121094, 0.03790809631347656, 0.03777753448486328, 0.03782476806640625, 0.0380516471862793, 0.037978111267089845, 0.03792272186279297, 0.0377940788269043, 0.03777308654785156, 0.03792521667480469, 0.03773427200317383, 0.037700767517089846, 0.03782332611083984, 0.037703678131103514, 0.03773440170288086, 0.037740287780761717, 0.037900543212890624, 0.03790233612060547, 0.038053726196289064, 0.038215328216552734, 0.037812255859375, 0.037943679809570315, 0.03820486450195312, 0.03801279830932617, 0.03806492614746094, 0.03788595199584961, 0.03802515029907227, 0.0379532470703125, 0.03810464096069336, 0.03794777679443359, 0.037732574462890626, 0.03779132843017578, 0.038031776428222655, 0.03758713531494141, 0.037778976440429685, 0.03798252868652344, 0.037977985382080075, 0.037970497131347654, 0.037690494537353514, 0.037837406158447266, 0.03786137771606445, 0.03788185501098633, 0.03786137771606445, 0.03801702499389648, 0.03790444946289063, 0.03820947265625, 0.03783475112915039, 0.03845849609375, 0.03807263946533203, 0.03829971313476563, 0.03796223831176758, 0.037862686157226565, 0.03793993759155274, 0.037883872985839846, 0.03788188934326172, 0.038580062866210935, 0.037700927734375, 0.037811073303222656, 0.037838817596435544, 0.03820041656494141, 0.03813443374633789, 0.03808281707763672, 0.03765804672241211, 0.03780361557006836, 0.03856617736816406, 0.03775148773193359, 0.03787116622924805, 0.03799289703369141, 0.037969440460205076, 0.03779414367675781, 0.037810302734375, 0.0378590087890625, 0.037849407196044925, 0.037820415496826174, 0.03791241455078125, 0.03792816162109375, 0.038052799224853516, 0.03786735916137695, 0.03799788665771484, 0.03767324829101563, 0.037708351135253906, 0.03770534515380859, 0.037679489135742185, 0.037761184692382814, 0.03769532775878906, 0.03792844772338867, 0.03787843322753906, 0.037893344879150394, 0.037859264373779296, 0.03793376159667969, 0.03781763076782227, 0.03784777450561523, 0.03789564895629883, 0.037798431396484374, 0.03774630355834961, 0.03829142379760742, 0.037775775909423825, 0.037854366302490235, 0.03806639862060547, 0.03779852676391601, 0.038101249694824216, 0.03771775817871094, 0.037873279571533205, 0.037728321075439455, 0.038152320861816406, 0.03779375839233398, 0.03822819137573242, 0.03801107025146484, 0.03800223922729492, 0.037705982208251956, 0.03787299346923828, 0.03788470458984375, 0.037730175018310545, 0.03791667175292969, 0.03800393676757813, 0.03785311889648438, 0.037778270721435546, 0.03783270263671875]",tokens/s,26.31672658861983,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1243, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1121, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 842, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 530, in forward attn_output = self._flash_attention_forward( File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 628, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: Phi3ForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4896.751616,5995.626496,0.0,5593.104384,5582.857216,s,1,11.489771484375,11.489771484375,0.0,11.489771484375,11.489771484375,11.489771484375,11.489771484375,[11.489771484375],,kWh,0.00011806145224998848,1.301551743460821e-05,3.635197352599984e-05,0.00016742894321059652,,MB,1569.632256,6324.87936,0.0,5907.677184,5844.559872,s,10,2.0207774047851563,0.20207774047851562,0.00038689895244857,0.20201309967041015,0.202544287109375,0.20273568267822267,0.2028887991333008,"[0.20195497131347656, 0.20207122802734376, 0.2018753662109375, 0.2016386260986328, 0.2021746826171875, 0.2017010498046875, 0.2016741485595703, 0.20292707824707032, 0.2022584991455078, 0.2025017547607422]",tokens/s,1266.8391847305777,kWh,5.911609283416207e-06,6.519431479882893e-07,3.9273975863601815e-06,1.0490950017764678e-05,tokens/kWh,24401984.526330467,MB,1573.670912,6324.87936,0.0,5907.677184,5844.562432,s,10,14.907234375,1.4907234375,0.0018872144851043732,1.4902535400390624,1.4928083984375,1.4931936889648436,1.4935019213867187,"[1.4899151611328125, 1.4897984619140625, 1.489876953125, 1.4917333984375, 1.4927227783203125, 1.4935789794921874, 1.489792236328125, 1.4867083740234375, 1.49251611328125, 1.4905919189453125]",tokens/s,42.261360098861395,kWh,4.35392782282485e-05,4.802132315365114e-06,2.8924045361438113e-05,7.726545590505172e-05,tokens/kWh,815370.8440861083,,s,630,14.904556339263905,0.023658025935339548,0.00018368785427193713,0.02365676784515381,0.023894118690490723,0.02396721887588501,0.024107075710296633,"[0.023576576232910155, 0.023207935333251953, 0.02334048080444336, 0.02360697555541992, 0.02399724769592285, 0.023935264587402343, 0.02375657653808594, 0.02368307113647461, 0.023910400390625, 0.023704959869384767, 0.02363216018676758, 0.023538015365600587, 0.023580671310424805, 0.023748607635498048, 0.02356547164916992, 0.0237076473236084, 0.02373472023010254, 0.023689504623413085, 0.02355628776550293, 0.02344691276550293, 0.023321151733398438, 0.02345779228210449, 0.0239554557800293, 0.023750656127929686, 0.02369945526123047, 0.023673887252807616, 0.023533920288085937, 0.02353011131286621, 0.02368307113647461, 0.023735647201538087, 0.023720703125, 0.02387958335876465, 0.023602176666259765, 0.023791648864746093, 0.023808992385864258, 0.02386534309387207, 0.023525568008422853, 0.023442752838134767, 0.023441919326782225, 0.023572288513183593, 0.02374675178527832, 0.023776384353637697, 0.023645055770874022, 0.023610815048217774, 0.023409215927124024, 0.02342092704772949, 0.02353683280944824, 0.023660991668701174, 0.02365603256225586, 0.023589664459228516, 0.023502656936645508, 0.02356038475036621, 0.02367897605895996, 0.023608608245849608, 0.02356502342224121, 0.023529472351074218, 0.02345062446594238, 0.02376192092895508, 0.02389401626586914, 0.02381814384460449, 0.02383395195007324, 0.02375913619995117, 0.023752544403076174, 0.023408479690551758, 0.023318080902099608, 0.023470304489135743, 0.02357491111755371, 0.023391904830932616, 0.02329635238647461, 0.023824384689331055, 0.02391859245300293, 0.02371174430847168, 0.023599103927612306, 0.023576543807983397, 0.02327334403991699, 0.023424575805664063, 0.023511648178100586, 0.023451648712158202, 0.02343731117248535, 0.0233984317779541, 0.023286975860595704, 0.023364383697509764, 0.023556095123291015, 0.023492319107055664, 0.023716127395629883, 0.023713983535766602, 0.023445344924926757, 0.02357449531555176, 0.023492000579833985, 0.02367958450317383, 0.023654399871826173, 0.023597055435180665, 0.023576576232910155, 0.023737375259399413, 0.02380080032348633, 0.02396073532104492, 0.02388380813598633, 0.023689311981201173, 0.023694047927856444, 0.023545856475830077, 0.023508991241455078, 0.02353104019165039, 0.023467615127563478, 0.023843103408813477, 0.02374838447570801, 0.02414620780944824, 0.024066688537597657, 0.023996320724487305, 0.023787200927734373, 0.023590463638305664, 0.02361952018737793, 0.023737152099609374, 0.023734399795532227, 0.02367840003967285, 0.023583168029785155, 0.023513088226318358, 0.02368307113647461, 0.023832576751708984, 0.024330015182495116, 0.023812320709228514, 0.023752704620361328, 0.023495872497558593, 0.02373302459716797, 0.023773216247558595, 0.023812095642089845, 0.023725791931152342, 0.023531295776367187, 0.02323036766052246, 0.02351103973388672, 0.023646175384521486, 0.023626943588256837, 0.023640607833862303, 0.023376415252685547, 0.02357372856140137, 0.023417407989501954, 0.02332262420654297, 0.02355990409851074, 0.023421119689941407, 0.023568479537963868, 0.023684127807617188, 0.023690080642700194, 0.02376905632019043, 0.02374774360656738, 0.023634944915771484, 0.02345302391052246, 0.023640735626220703, 0.023554048538208007, 0.023441408157348635, 0.023536960601806642, 0.02342540740966797, 0.023487808227539063, 0.023524351119995117, 0.023711551666259767, 0.023787551879882813, 0.02381020736694336, 0.023756223678588866, 0.023571008682250975, 0.02372812843322754, 0.023686912536621092, 0.02380620765686035, 0.023643680572509766, 0.02348480033874512, 0.023365728378295897, 0.023457151412963867, 0.023693695068359374, 0.02368307113647461, 0.023799135208129884, 0.0236430721282959, 0.023502815246582032, 0.02370969581604004, 0.023598335266113282, 0.023618303298950195, 0.023609344482421874, 0.02371993637084961, 0.023613439559936524, 0.023751840591430665, 0.024072608947753905, 0.023971712112426758, 0.023940736770629883, 0.02377129554748535, 0.0236080322265625, 0.023588960647583007, 0.024001728057861327, 0.02404377555847168, 0.02387171173095703, 0.023732383728027343, 0.02359926414489746, 0.023773248672485352, 0.02384480094909668, 0.023587039947509766, 0.023258047103881838, 0.023411680221557617, 0.023672384262084963, 0.024004480361938477, 0.023686975479125977, 0.02374131202697754, 0.023698400497436524, 0.023634016036987306, 0.023614208221435548, 0.023731391906738283, 0.02356928062438965, 0.023607295989990236, 0.023587072372436523, 0.023755680084228514, 0.023382879257202147, 0.023508224487304687, 0.023520000457763673, 0.023427072525024413, 0.023287647247314452, 0.023198944091796875, 0.023374784469604493, 0.02350668716430664, 0.0235513916015625, 0.023595392227172853, 0.023463647842407228, 0.02333158493041992, 0.023371776580810546, 0.02365388870239258, 0.02386956787109375, 0.02384499168395996, 0.0237521915435791, 0.02375142478942871, 0.023848575592041017, 0.02409721565246582, 0.02407580757141113, 0.023939039230346678, 0.023730112075805665, 0.02348297691345215, 0.023494560241699217, 0.02368921661376953, 0.023784608840942384, 0.023726335525512697, 0.023851871490478516, 0.023561983108520507, 0.02369740867614746, 0.023863040924072265, 0.023619104385375976, 0.023857919692993165, 0.023446752548217775, 0.023476991653442383, 0.0240960636138916, 0.02411494445800781, 0.02418783950805664, 0.023828191757202147, 0.0236014404296875, 0.02362544059753418, 0.023975711822509765, 0.02402355194091797, 0.023940895080566408, 0.02368534469604492, 0.02352499198913574, 0.02365683174133301, 0.02339619255065918, 0.023602943420410156, 0.023610559463500977, 0.023555904388427733, 0.023687231063842774, 0.02374239921569824, 0.023895488739013673, 0.023663167953491212, 0.02366806411743164, 0.023624351501464844, 0.02362508773803711, 0.023626367568969728, 0.023643999099731444, 0.023678783416748048, 0.023562591552734376, 0.02332057571411133, 0.023351264953613282, 0.023592031478881836, 0.02361235237121582, 0.023624799728393556, 0.023538591384887696, 0.02349171257019043, 0.023632768630981446, 0.023802911758422852, 0.023896831512451172, 0.023752927780151367, 0.02373017692565918, 0.023650304794311523, 0.02367283248901367, 0.02378550338745117, 0.023934112548828126, 0.02393974494934082, 0.023850719451904298, 0.023677568435668945, 0.02385055923461914, 0.023861503601074217, 0.02384432029724121, 0.023700000762939454, 0.02350694465637207, 0.02345523262023926, 0.023640575408935546, 0.023719711303710936, 0.023775455474853515, 0.023612960815429688, 0.02366476821899414, 0.023755104064941406, 0.023785472869873047, 0.023846912384033202, 0.023769088745117187, 0.023565759658813478, 0.02344361686706543, 0.023599519729614257, 0.02411110305786133, 0.024037439346313475, 0.023860607147216797, 0.023931455612182618, 0.02354380798339844, 0.023643583297729493, 0.023654975891113282, 0.023775232315063476, 0.023820287704467775, 0.023694751739501953, 0.023551679611206053, 0.023560224533081056, 0.023494495391845702, 0.023667903900146486, 0.023646848678588867, 0.02350726318359375, 0.023557119369506836, 0.023596031188964844, 0.02357593536376953, 0.02360697555541992, 0.023491519927978516, 0.02343065643310547, 0.023306751251220705, 0.023385503768920898, 0.02357088088989258, 0.023548095703125, 0.02354172706604004, 0.023603391647338868, 0.02325801658630371, 0.02342799949645996, 0.023533567428588868, 0.023601152420043944, 0.023643775939941405, 0.023667327880859373, 0.023527168273925782, 0.023629823684692384, 0.023820287704467775, 0.0237806396484375, 0.023797855377197266, 0.02386944007873535, 0.023437952041625975, 0.023666688919067383, 0.02395955276489258, 0.024007936477661133, 0.023803712844848633, 0.02403424072265625, 0.02385305595397949, 0.02389504051208496, 0.023778303146362305, 0.02369536018371582, 0.02373206329345703, 0.02364348793029785, 0.023671648025512696, 0.024153440475463868, 0.023961471557617187, 0.02376780891418457, 0.02375587272644043, 0.023571359634399415, 0.023812095642089845, 0.023948448181152344, 0.0239215030670166, 0.023811904907226563, 0.023726272583007812, 0.023680288314819335, 0.023768800735473633, 0.024005632400512695, 0.023958623886108397, 0.02378201675415039, 0.023687456130981447, 0.023769088745117187, 0.023900159835815428, 0.023820032119750978, 0.023895360946655272, 0.023763200759887696, 0.023801376342773437, 0.023770048141479493, 0.023744512557983398, 0.02366464042663574, 0.023577760696411133, 0.02350166320800781, 0.02345680046081543, 0.023436256408691406, 0.023412607192993165, 0.023404447555541993, 0.0235828800201416, 0.02341257667541504, 0.023273120880126952, 0.023320959091186522, 0.023463775634765625, 0.023492639541625976, 0.023518720626831056, 0.023477119445800783, 0.02335532760620117, 0.023318656921386717, 0.02356755256652832, 0.0238720645904541, 0.023729728698730468, 0.023740991592407227, 0.023615488052368162, 0.023259136199951173, 0.02352275276184082, 0.023708223342895508, 0.023738208770751952, 0.023674367904663086, 0.023454368591308592, 0.023649280548095702, 0.023814271926879883, 0.023947551727294923, 0.023924512863159178, 0.023825151443481445, 0.023684703826904296, 0.02358278465270996, 0.02374083137512207, 0.023756479263305662, 0.023734592437744142, 0.023627775192260742, 0.02346294403076172, 0.023917024612426757, 0.023839231491088866, 0.023803295135498045, 0.023722591400146483, 0.023504896163940428, 0.023529472351074218, 0.0235599365234375, 0.02373868751525879, 0.023700447082519532, 0.023896831512451172, 0.02376246452331543, 0.02366681671142578, 0.02367340850830078, 0.023795040130615234, 0.023749216079711914, 0.023729888916015626, 0.023497055053710938, 0.023762752532958984, 0.02391904067993164, 0.024090368270874022, 0.023929920196533203, 0.02326905632019043, 0.023480543136596678, 0.023793664932250977, 0.023756160736083984, 0.023693952560424805, 0.023605247497558594, 0.023586816787719726, 0.02357369613647461, 0.02365523147583008, 0.023568384170532225, 0.023670656204223633, 0.023533695220947264, 0.023396352767944335, 0.023339008331298827, 0.023391679763793947, 0.02332320022583008, 0.023259136199951173, 0.02326937675476074, 0.02340838432312012, 0.02343756866455078, 0.023438880920410157, 0.02344598388671875, 0.02343731117248535, 0.02367692756652832, 0.023529472351074218, 0.023414783477783203, 0.023408639907836915, 0.023455743789672853, 0.02367692756652832, 0.023609024047851562, 0.023438880920410157, 0.02345619201660156, 0.02363382339477539, 0.02372428894042969, 0.023636224746704102, 0.023495935440063478, 0.023480512619018554, 0.023494783401489257, 0.02361577606201172, 0.023591007232666016, 0.023656160354614257, 0.023634464263916015, 0.023627519607543945, 0.023813728332519532, 0.023868928909301756, 0.023855615615844726, 0.023855520248413087, 0.02367487907409668, 0.023541759490966797, 0.0237260799407959, 0.02368716812133789, 0.023711584091186524, 0.02372822380065918, 0.023773248672485352, 0.023472127914428712, 0.023563743591308594, 0.02376144027709961, 0.02370560073852539, 0.023752704620361328, 0.02376857566833496, 0.02366105651855469, 0.024012672424316407, 0.02360940742492676, 0.023428800582885743, 0.02343142318725586, 0.02332806396484375, 0.023451679229736327, 0.02354256057739258, 0.023711200714111327, 0.02380611228942871, 0.023752960205078125, 0.02372198486328125, 0.023746559143066406, 0.02352479934692383, 0.023683647155761718, 0.02372812843322754, 0.023586528778076172, 0.023510719299316408, 0.02380771255493164, 0.023947519302368166, 0.02388582420349121, 0.023682815551757812, 0.023610240936279298, 0.02349260711669922, 0.023619583129882812, 0.02380739212036133, 0.023754911422729494, 0.023638336181640626, 0.02347225570678711, 0.023330816268920897, 0.023594047546386717, 0.0236759033203125, 0.023655616760253906, 0.023823040008544922, 0.023588544845581056, 0.023562496185302734, 0.02350707244873047, 0.023709375381469725, 0.02369977569580078, 0.023631872177124022, 0.02353561592102051, 0.02365023994445801, 0.023733407974243163, 0.023765920639038086, 0.023793664932250977, 0.023708896636962892, 0.02373446464538574, 0.023698015213012694, 0.023977088928222656, 0.023815040588378907, 0.023774368286132812, 0.023649120330810548, 0.023539968490600586, 0.02361523246765137, 0.023842239379882814, 0.023833152770996093, 0.02371583938598633, 0.02366624069213867, 0.023548351287841798, 0.023754751205444336, 0.0241112003326416, 0.024046592712402344, 0.023961727142333984, 0.023868192672729494, 0.023848447799682617, 0.023861408233642578, 0.02342092704772949, 0.023553632736206056, 0.023425024032592775, 0.023253408432006836, 0.023357343673706055, 0.023468128204345705, 0.02355766487121582, 0.023558271408081054, 0.023476224899291992, 0.02338377571105957, 0.023445888519287108, 0.02365670394897461, 0.023664192199707033, 0.023622079849243163, 0.02372198486328125, 0.023408639907836915, 0.023373823165893554, 0.02346188735961914, 0.023827999114990235, 0.023822816848754882, 0.023613248825073242, 0.02362553596496582, 0.02359129524230957, 0.023574560165405274, 0.02375052833557129, 0.023611488342285155, 0.023575616836547852, 0.023491519927978516, 0.023717695236206055, 0.024090816497802734, 0.023984128952026368, 0.023883520126342775, 0.02381987190246582, 0.02373699188232422, 0.023531007766723632, 0.023723615646362304, 0.023688095092773438, 0.023817920684814455, 0.02381376075744629, 0.023320768356323244, 0.023437183380126955, 0.023581024169921874, 0.02357872009277344, 0.023582912445068358, 0.023500799179077148, 0.023371776580810546, 0.023627616882324218, 0.023865631103515625, 0.023877504348754883, 0.023764640808105468, 0.023744863510131838, 0.023748607635498048, 0.023919904708862304, 0.02389852714538574, 0.023771455764770508, 0.023742464065551756, 0.02361248016357422, 0.023753664016723634, 0.024004512786865235, 0.024076383590698244, 0.023791616439819335, 0.023809951782226564]",tokens/s,42.26895357766238,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4049.870848,4705.878016,0.0,4303.355904,4034.388992,s,1,10.696091796875,10.696091796875,0.0,10.696091796875,10.696091796875,10.696091796875,10.696091796875,[10.696091796875],,kWh,9.19242741083508e-05,1.0132546015705288e-05,2.7585022068003784e-05,0.00012964184219205988,,MB,4000.620544,4764.598272,0.0,4347.396096,4202.566656,s,10,1.910161102294922,0.1910161102294922,0.0003782256772284843,0.1909824676513672,0.19129761352539063,0.19158074493408203,0.19180725006103516,"[0.19095318603515626, 0.19037971496582032, 0.19079916381835937, 0.1912346954345703, 0.19094537353515625, 0.1906317138671875, 0.19101174926757813, 0.19123158264160156, 0.19186387634277344, 0.19111004638671875]",tokens/s,1340.201094517286,kWh,5.602813992295423e-06,6.178850302104647e-07,3.713524983396299e-06,9.934224005902186e-06,tokens/kWh,25769501.457577724,MB,4004.671488,4764.598272,0.0,4347.396096,4213.08416,s,10,16.391986938476563,1.6391986938476564,0.012439895530622361,1.6444758300781248,1.6486777587890626,1.6538579223632812,1.6580020532226563,"[1.6590380859375, 1.616820556640625, 1.647526611328125, 1.646387451171875, 1.642845703125, 1.6474293212890625, 1.64610595703125, 1.6315677490234375, 1.6323258056640626, 1.621939697265625]",tokens/s,38.43341276225729,kWh,4.7432615342704704e-05,5.231564961015069e-06,3.1496808740403095e-05,8.416098904412287e-05,tokens/kWh,748565.3473840612,,s,630,16.38920757865904,0.026014615204220725,0.0004959527044790329,0.0259661283493042,0.026404332160949708,0.026742088603973386,0.02831885852813721,"[0.027152799606323243, 0.02630854415893555, 0.02647897529602051, 0.026236928939819337, 0.02615091133117676, 0.02619808006286621, 0.026085311889648438, 0.025985055923461915, 0.02603824043273926, 0.026019744873046875, 0.026168960571289063, 0.025993696212768556, 0.026189823150634766, 0.02614214324951172, 0.026124832153320312, 0.02615283203125, 0.026098880767822265, 0.026340320587158204, 0.026343423843383788, 0.026068992614746093, 0.026451904296875, 0.026516639709472656, 0.02626598358154297, 0.026086015701293944, 0.02693724822998047, 0.02928374481201172, 0.02656716728210449, 0.02702332878112793, 0.02615839958190918, 0.02601763153076172, 0.026030271530151368, 0.02607798385620117, 0.02611404800415039, 0.026300735473632812, 0.02612396812438965, 0.02617888069152832, 0.02613747215270996, 0.0262490234375, 0.02614271926879883, 0.026116287231445313, 0.026330623626708984, 0.026227008819580077, 0.026403135299682617, 0.026318527221679686, 0.026368000030517577, 0.026105535507202147, 0.026068864822387697, 0.026206304550170898, 0.026249568939208986, 0.026238431930541994, 0.026270240783691407, 0.026896127700805662, 0.030252544403076172, 0.026200063705444337, 0.02615100860595703, 0.026309600830078127, 0.026418880462646486, 0.02591744041442871, 0.026050559997558592, 0.02586419105529785, 0.02563462448120117, 0.02562067222595215, 0.025647104263305662, 0.02667862319946289, 0.025845695495605468, 0.02537548828125, 0.02530713653564453, 0.025187488555908202, 0.025792671203613282, 0.025627328872680665, 0.025629791259765625, 0.025647584915161132, 0.025772480010986327, 0.025794368743896484, 0.02567967987060547, 0.025761215209960938, 0.025627296447753908, 0.025427711486816405, 0.025415359497070314, 0.025529407501220704, 0.02541744041442871, 0.025606176376342774, 0.0255284481048584, 0.025489248275756837, 0.0254006404876709, 0.025446559906005858, 0.02543471908569336, 0.02544361686706543, 0.02537750434875488, 0.025394847869873047, 0.025368160247802734, 0.02554252815246582, 0.025410303115844725, 0.025405567169189455, 0.025607456207275392, 0.025504480361938475, 0.025554943084716796, 0.02619545555114746, 0.025571840286254883, 0.025672800064086915, 0.025662368774414062, 0.025737056732177733, 0.02564956855773926, 0.025870208740234376, 0.025726848602294922, 0.025747360229492186, 0.026345567703247072, 0.025611616134643553, 0.02559452819824219, 0.025602048873901367, 0.025853952407836913, 0.0258374080657959, 0.02604662322998047, 0.025634815216064453, 0.025688095092773436, 0.025669567108154295, 0.02554447937011719, 0.02553500747680664, 0.025984735488891603, 0.025862207412719728, 0.025988447189331056, 0.02585795211791992, 0.025848512649536134, 0.02566044807434082, 0.02575584030151367, 0.025776575088500977, 0.026611007690429688, 0.026096319198608397, 0.025980447769165037, 0.02628451156616211, 0.02586134338378906, 0.025899808883666994, 0.025898752212524415, 0.02606719970703125, 0.026052608489990234, 0.02599068832397461, 0.025824832916259765, 0.025850624084472657, 0.02587993621826172, 0.02581823921203613, 0.025884096145629882, 0.026042591094970702, 0.02592585563659668, 0.026076160430908202, 0.025798463821411134, 0.025895904541015625, 0.025821056365966797, 0.026078527450561523, 0.025963104248046875, 0.026042591094970702, 0.02597603225708008, 0.025811744689941407, 0.025831424713134765, 0.02601580810546875, 0.025960384368896486, 0.025923583984375, 0.026210399627685548, 0.026058879852294922, 0.026148639678955077, 0.026421152114868163, 0.0261243839263916, 0.025947456359863282, 0.02598582458496094, 0.025884416580200194, 0.026144927978515625, 0.026185728073120116, 0.02627993583679199, 0.027183103561401366, 0.026658815383911134, 0.026203935623168945, 0.026174911499023436, 0.026089792251586915, 0.026271936416625976, 0.026583328247070312, 0.02832614326477051, 0.027219295501708984, 0.02618409538269043, 0.026146688461303712, 0.026007680892944335, 0.02600707244873047, 0.026094079971313477, 0.02626483154296875, 0.02598518371582031, 0.026059328079223634, 0.026287456512451172, 0.026137279510498046, 0.026159072875976564, 0.025976831436157227, 0.02665705680847168, 0.026597375869750976, 0.026203680038452148, 0.026401504516601563, 0.02616691207885742, 0.026169471740722657, 0.026009599685668947, 0.029142335891723634, 0.028283584594726564, 0.026222368240356446, 0.027504255294799804, 0.02602262306213379, 0.02597395133972168, 0.026022592544555665, 0.02613999938964844, 0.026668960571289063, 0.026083168029785157, 0.026168224334716796, 0.026181631088256836, 0.02622630310058594, 0.025974239349365234, 0.026051488876342774, 0.02611737632751465, 0.026195743560791015, 0.02607334327697754, 0.02587516784667969, 0.026005664825439454, 0.026092512130737305, 0.026401023864746093, 0.02590496063232422, 0.02604444885253906, 0.025985088348388672, 0.02582806396484375, 0.025728864669799803, 0.025641120910644532, 0.02573516845703125, 0.025841663360595703, 0.025858047485351563, 0.025866239547729493, 0.025855968475341797, 0.026134559631347656, 0.02609971237182617, 0.026038272857666016, 0.02612633514404297, 0.02600489616394043, 0.02609008026123047, 0.026135583877563477, 0.026300960540771485, 0.026957792282104494, 0.025911808013916016, 0.025906400680541994, 0.02591766357421875, 0.025926271438598634, 0.025943552017211914, 0.025929567337036132, 0.025839839935302734, 0.025821535110473633, 0.02574950408935547, 0.025686208724975585, 0.025672607421875, 0.025572256088256837, 0.025631872177124024, 0.02574425506591797, 0.025717824935913087, 0.026550271987915038, 0.02588083267211914, 0.02579167938232422, 0.025817663192749023, 0.02573516845703125, 0.02591663932800293, 0.026065696716308595, 0.025786624908447266, 0.026011392593383788, 0.025710399627685548, 0.025657087326049804, 0.025661888122558593, 0.02566969680786133, 0.025800640106201173, 0.025683391571044923, 0.0256331844329834, 0.026140832901000978, 0.025621824264526367, 0.025772960662841796, 0.025887615203857423, 0.025813087463378907, 0.025962751388549806, 0.025944320678710938, 0.02588479995727539, 0.025999296188354493, 0.026003200531005858, 0.02595302391052246, 0.02589823913574219, 0.026067455291748046, 0.02599075126647949, 0.02610745620727539, 0.025986976623535156, 0.026039072036743164, 0.02598031997680664, 0.0261843204498291, 0.026220479965209962, 0.026300735473632812, 0.02599103927612305, 0.02613043212890625, 0.026269311904907226, 0.026761600494384766, 0.026120256423950196, 0.026204095840454102, 0.026179584503173828, 0.026250911712646485, 0.026064672470092774, 0.026296895980834963, 0.02613657569885254, 0.026049760818481444, 0.02605891227722168, 0.026491519927978515, 0.026216447830200194, 0.02612633514404297, 0.026299903869628907, 0.02687027168273926, 0.02599692726135254, 0.026200000762939452, 0.026844736099243163, 0.026343807220458985, 0.026386943817138672, 0.026346975326538086, 0.02633750343322754, 0.02641676712036133, 0.026790016174316405, 0.026153919219970703, 0.026132928848266603, 0.026022432327270507, 0.02689411163330078, 0.02598940849304199, 0.025758783340454103, 0.0256909122467041, 0.025654592514038087, 0.025516799926757813, 0.026986495971679687, 0.02565750312805176, 0.026359296798706053, 0.025815391540527345, 0.025887775421142577, 0.025754592895507813, 0.02660966491699219, 0.026206207275390626, 0.02611814308166504, 0.025956352233886718, 0.025984960556030272, 0.026132511138916015, 0.02671824073791504, 0.026068992614746093, 0.026074367523193358, 0.026925823211669923, 0.025976800918579103, 0.026015775680541992, 0.026207712173461913, 0.02637264060974121, 0.02619100761413574, 0.02618864059448242, 0.02631260871887207, 0.0260230712890625, 0.02592620849609375, 0.025928319931030272, 0.026105600357055662, 0.026108959197998046, 0.025826271057128907, 0.026007551193237305, 0.02594611167907715, 0.025835552215576173, 0.02563657569885254, 0.02585366439819336, 0.025878335952758787, 0.02578915214538574, 0.02581235122680664, 0.02598931121826172, 0.02590265655517578, 0.02594870376586914, 0.02633772850036621, 0.025851743698120117, 0.025749568939208985, 0.026228736877441407, 0.02581430435180664, 0.025763776779174803, 0.025926431655883788, 0.0283602237701416, 0.028809375762939453, 0.02615443229675293, 0.026604415893554688, 0.026004959106445312, 0.025867904663085937, 0.02677849578857422, 0.025943872451782226, 0.025904224395751952, 0.0259202880859375, 0.025940095901489258, 0.025869632720947267, 0.025878719329833984, 0.025827840805053712, 0.026602880477905273, 0.02603830337524414, 0.026569311141967773, 0.026670848846435547, 0.026283424377441408, 0.026415584564208984, 0.02830102348327637, 0.026017824172973634, 0.02612076759338379, 0.02597235107421875, 0.02595680046081543, 0.025833471298217774, 0.02612633514404297, 0.025751935958862306, 0.025691904067993165, 0.02559577560424805, 0.025820192337036134, 0.026188959121704103, 0.027057823181152345, 0.025757856369018554, 0.026713119506835938, 0.025908384323120117, 0.025871648788452148, 0.026156608581542968, 0.026143680572509764, 0.026167648315429688, 0.02595996856689453, 0.026167455673217772, 0.026048511505126954, 0.02611520004272461, 0.026297216415405274, 0.02644144058227539, 0.026163679122924804, 0.02613465690612793, 0.02595612716674805, 0.026177440643310547, 0.026175487518310548, 0.026247167587280275, 0.026033344268798827, 0.026118656158447266, 0.026073408126831055, 0.026004800796508788, 0.025945152282714844, 0.02601091194152832, 0.025954271316528322, 0.025966976165771486, 0.025955615997314455, 0.025899808883666994, 0.026059968948364258, 0.02595097541809082, 0.026247167587280275, 0.02614476776123047, 0.025947263717651367, 0.02593836784362793, 0.025874784469604492, 0.026933055877685547, 0.02585539245605469, 0.02588960075378418, 0.025919424057006837, 0.025753631591796874, 0.025659391403198242, 0.02552627182006836, 0.02574652862548828, 0.025711584091186523, 0.025610080718994142, 0.02534806442260742, 0.025435583114624023, 0.025346271514892577, 0.025367008209228516, 0.025309024810791017, 0.02542198371887207, 0.025430015563964844, 0.025763328552246095, 0.02541993522644043, 0.02553891181945801, 0.025367584228515625, 0.02542880058288574, 0.02560220718383789, 0.02591923141479492, 0.025663679122924804, 0.02577619171142578, 0.02586591911315918, 0.02619379234313965, 0.0257806396484375, 0.02610588836669922, 0.026020000457763672, 0.026016864776611328, 0.025649728775024413, 0.02558585548400879, 0.025593856811523437, 0.02678374481201172, 0.025812992095947264, 0.025784191131591798, 0.025848960876464842, 0.02596963119506836, 0.025910720825195313, 0.025969024658203124, 0.026056928634643553, 0.026257343292236328, 0.026282175064086914, 0.026238847732543945, 0.026277471542358398, 0.026097728729248048, 0.02609548759460449, 0.026101760864257813, 0.02625766372680664, 0.026140384674072266, 0.026330751419067384, 0.026427391052246094, 0.026190719604492187, 0.026095775604248046, 0.026297536849975586, 0.02605161666870117, 0.026174528121948242, 0.025965280532836914, 0.02605244827270508, 0.0261529598236084, 0.026093568801879883, 0.02661155128479004, 0.02595439910888672, 0.025819007873535155, 0.02677142333984375, 0.028757375717163088, 0.025639551162719727, 0.025651615142822267, 0.02552115249633789, 0.025571935653686522, 0.026454336166381837, 0.027133216857910158, 0.025722688674926757, 0.02568230438232422, 0.025823776245117187, 0.02572697639465332, 0.025477024078369142, 0.025933855056762694, 0.025730623245239257, 0.025786880493164063, 0.02575564765930176, 0.02592758369445801, 0.025840991973876952, 0.025939968109130858, 0.026188159942626955, 0.026028640747070314, 0.025902847290039062, 0.025761823654174804, 0.02594985580444336, 0.025835968017578124, 0.025823232650756835, 0.025930719375610353, 0.025815776824951172, 0.02582908821105957, 0.025763519287109377, 0.026368831634521483, 0.026125791549682618, 0.026096000671386718, 0.025903455734252928, 0.02627155113220215, 0.027724895477294922, 0.026067359924316406, 0.025829984664916993, 0.026443296432495118, 0.02563929557800293, 0.02561414337158203, 0.025445663452148437, 0.02549247932434082, 0.025375999450683594, 0.025352863311767577, 0.02534774398803711, 0.025603424072265624, 0.025789440155029295, 0.025601696014404297, 0.025487712860107422, 0.025486879348754883, 0.025516511917114258, 0.025511936187744142, 0.025456640243530275, 0.025479167938232423, 0.02556723213195801, 0.02582681655883789, 0.025710527420043944, 0.025852479934692384, 0.026415103912353514, 0.025727903366088867, 0.025881919860839844, 0.025548511505126954, 0.02557107162475586, 0.025592832565307616, 0.02572425651550293, 0.025762464523315428, 0.025792512893676758, 0.025769216537475586, 0.026079999923706056, 0.02629360008239746, 0.02584169578552246, 0.026036863327026368, 0.02575974464416504, 0.02592153549194336, 0.025792192459106446, 0.025859615325927735, 0.026165727615356446, 0.025991647720336915, 0.02603628730773926, 0.025859872817993165, 0.02611609649658203, 0.026398143768310546, 0.026147392272949217, 0.025933311462402343, 0.02595686340332031, 0.026564863204956053, 0.02574457550048828, 0.025666112899780272, 0.027158527374267577, 0.025731103897094727, 0.02563817596435547, 0.02560089683532715, 0.02550886344909668, 0.025506624221801756, 0.02540729522705078, 0.025488767623901367, 0.02547760009765625, 0.025796960830688477, 0.025861888885498046, 0.025788671493530275, 0.025602048873901367, 0.025519359588623048, 0.025510400772094727, 0.025540864944458008, 0.02550601577758789, 0.025500864028930665, 0.02549001693725586, 0.026148895263671874, 0.025400991439819335, 0.0253504638671875, 0.025293855667114257, 0.025281471252441408, 0.025391136169433594, 0.025452415466308595, 0.025415807723999023, 0.025425247192382813, 0.025516000747680664, 0.025375263214111328, 0.025402528762817383, 0.02537763214111328, 0.025284767150878906]",tokens/s,38.43993048329831,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,3162.034176,4423.876608,0.0,4028.628992,3944.723968,s,1,10.139919921875,10.139919921875,0.0,10.139919921875,10.139919921875,10.139919921875,10.139919921875,[10.139919921875],,kWh,9.448301945834221e-05,1.0414976070960834e-05,2.917669000800094e-05,0.00013407468553730398,,MB,2943.471616,4763.615232,0.0,4353.687552,4305.05728,s,10,1.0864285354614256,0.10864285354614256,0.0009323365593873592,0.10896449661254883,0.10917128753662109,0.10925636291503907,0.10932442321777344,"[0.108942626953125, 0.10898636627197265, 0.10888729858398437, 0.10912060546875, 0.10934143829345704, 0.10915238189697266, 0.10807360076904297, 0.10881046295166015, 0.10909606170654297, 0.10601769256591796]",tokens/s,2356.3445881994644,kWh,3.251938411296191e-06,3.586319725762157e-07,2.1665819801776804e-06,5.777152364050087e-06,tokens/kWh,44312488.89903443,MB,2945.818624,4763.615232,0.0,4353.687552,4305.05984,s,10,21.81330712890625,2.1813307128906247,0.008862422337077453,2.181543701171875,2.1906317626953125,2.1919728637695313,2.1930457446289062,"[2.188095703125, 2.190167236328125, 2.19331396484375, 2.174505126953125, 2.1765244140625, 2.18593408203125, 2.170438720703125, 2.1771533203125, 2.1668408203125, 2.190333740234375]",tokens/s,28.881452788291124,kWh,6.324622699662207e-05,6.975845056279136e-06,3.424028047862247e-05,0.0001044623525315237,tokens/kWh,603088.0836326985,,s,630,21.81070582580564,0.03462016797746931,0.0005132230684201481,0.034509664535522455,0.0350792552947998,0.0353736967086792,0.03716842910766602,"[0.03538739013671875, 0.03503308868408203, 0.03500851058959961, 0.034581729888916016, 0.034584415435791015, 0.034508766174316405, 0.034425216674804686, 0.034523521423339844, 0.03444329452514648, 0.03458272171020508, 0.03487948989868164, 0.03455385589599609, 0.034490367889404294, 0.03480985641479492, 0.03461939239501953, 0.03500646209716797, 0.03496259307861328, 0.03493465423583984, 0.03535696029663086, 0.03476755142211914, 0.03455980682373047, 0.03481411361694336, 0.03457846450805664, 0.03494911956787109, 0.03481100845336914, 0.03465468978881836, 0.03455340957641601, 0.03461356735229492, 0.03443075180053711, 0.03442953491210937, 0.03432259368896484, 0.03476684951782227, 0.03417497634887695, 0.034326526641845705, 0.03474051284790039, 0.034445022583007814, 0.03461503982543945, 0.03466870498657226, 0.03456156921386719, 0.034665023803710934, 0.03441664123535156, 0.03457987213134766, 0.034259552001953124, 0.03418838500976563, 0.0344334716796875, 0.034353633880615235, 0.03444057464599609, 0.03480384063720703, 0.034728446960449216, 0.037367809295654295, 0.034961406707763674, 0.03476275253295898, 0.03463987350463867, 0.034697216033935545, 0.03470950317382813, 0.03469107055664063, 0.034442272186279294, 0.03448112106323242, 0.03439206314086914, 0.034598209381103515, 0.03485356903076172, 0.03717529678344727, 0.03470131301879883, 0.03522969436645508, 0.03618815994262695, 0.0348711051940918, 0.034547039031982425, 0.035461982727050784, 0.036808704376220705, 0.03485628890991211, 0.03467913436889648, 0.03490028762817383, 0.03580723190307617, 0.03491984176635742, 0.034877857208251956, 0.03518278503417969, 0.03449651336669922, 0.03465011215209961, 0.03480166244506836, 0.03527679824829102, 0.034560001373291016, 0.03447795104980469, 0.03453305435180664, 0.03462393569946289, 0.03460675048828125, 0.034551647186279295, 0.03454617691040039, 0.03439811325073242, 0.03451500701904297, 0.034612319946289063, 0.03459900665283203, 0.035543903350830075, 0.035125152587890625, 0.03527811050415039, 0.03485555267333985, 0.0345797119140625, 0.03456915283203125, 0.03445555114746094, 0.03445555114746094, 0.034465473175048826, 0.0344290885925293, 0.03450896072387695, 0.03436921691894531, 0.03450681686401367, 0.03479283142089844, 0.0345337905883789, 0.034466270446777345, 0.03457369613647461, 0.03440435028076172, 0.03450646209716797, 0.03443382263183594, 0.03447814559936523, 0.03442489624023438, 0.03464601516723633, 0.03480575942993164, 0.034988033294677735, 0.034799617767333986, 0.03470950317382813, 0.03457024002075195, 0.034336769104003906, 0.03439820861816406, 0.03457331085205078, 0.034457759857177736, 0.03474313735961914, 0.0348322868347168, 0.03470479965209961, 0.035405601501464844, 0.03495062255859375, 0.03476534271240234, 0.03467673492431641, 0.03451084899902344, 0.03451254272460937, 0.034285919189453125, 0.03433881759643555, 0.03480780792236328, 0.03479347229003906, 0.0348671989440918, 0.0348221435546875, 0.034754558563232424, 0.034590721130371094, 0.034648063659667966, 0.03437088012695313, 0.03443513488769531, 0.03448691177368164, 0.03458796691894531, 0.03470099258422851, 0.03478192138671875, 0.03465859222412109, 0.03509849548339844, 0.034899456024169925, 0.03473436737060547, 0.03448796844482422, 0.034662113189697266, 0.03426553726196289, 0.03439260864257813, 0.03489484786987305, 0.034423809051513675, 0.03466236877441406, 0.03502054214477539, 0.03528054428100586, 0.03485299301147461, 0.035001056671142575, 0.03490793609619141, 0.03477043151855469, 0.0346319694519043, 0.03445939254760742, 0.034443744659423826, 0.03430131149291992, 0.0340814094543457, 0.034412384033203125, 0.034861217498779296, 0.03501875305175781, 0.03528927993774414, 0.03508614349365234, 0.03504742431640625, 0.03521331024169922, 0.03507183837890625, 0.03508348846435547, 0.03534944152832031, 0.03530281448364258, 0.03534646224975586, 0.03532790374755859, 0.035154590606689455, 0.035168254852294925, 0.03540377426147461, 0.03496956634521484, 0.03473590469360351, 0.035028831481933594, 0.03512483215332031, 0.035402751922607424, 0.034958335876464845, 0.0347586555480957, 0.034907615661621094, 0.034514591217041014, 0.03465286254882813, 0.03465753555297851, 0.03452998352050781, 0.034382080078125, 0.034215614318847655, 0.034271808624267576, 0.03417625427246094, 0.03409356689453125, 0.03415407943725586, 0.03432080078125, 0.03439779281616211, 0.034636192321777344, 0.03439206314086914, 0.034340545654296874, 0.03420393753051758, 0.034358814239501954, 0.0344246711730957, 0.034375457763671874, 0.03425491333007812, 0.03466665649414063, 0.034382495880126956, 0.03459827041625976, 0.03473881530761719, 0.03456950378417969, 0.034681312561035155, 0.03521356964111328, 0.03593011093139648, 0.034830337524414064, 0.03462960052490234, 0.03442076873779297, 0.03430806350708008, 0.03432825469970703, 0.03442723083496094, 0.034457023620605466, 0.03437420654296875, 0.03429580688476563, 0.03421388626098633, 0.03410528182983399, 0.03410480117797852, 0.03428003311157227, 0.03431628799438476, 0.03419340896606445, 0.034129920959472655, 0.034305759429931644, 0.03464015960693359, 0.03699097442626953, 0.03503513717651367, 0.03465216064453125, 0.03462348937988281, 0.034258846282958985, 0.03424470520019531, 0.0341071662902832, 0.03416086578369141, 0.034286880493164064, 0.03406460952758789, 0.03412223815917969, 0.03442483139038086, 0.03474374389648437, 0.03446169662475586, 0.034123870849609376, 0.03405408096313477, 0.03405337524414063, 0.034000926971435544, 0.03715161514282227, 0.03461939239501953, 0.034738174438476564, 0.03495727920532227, 0.03478940963745117, 0.034686817169189456, 0.03463753509521485, 0.03480825424194336, 0.03475843048095703, 0.03445126342773437, 0.03434947204589844, 0.03459481430053711, 0.034151649475097655, 0.0343785285949707, 0.034163871765136716, 0.03462643051147461, 0.03454492950439453, 0.03486995315551758, 0.03440639877319336, 0.03420528030395508, 0.0343392333984375, 0.03420569610595703, 0.03414371109008789, 0.0342817268371582, 0.03405033493041992, 0.034269184112548826, 0.034716926574707034, 0.03448684692382813, 0.03436761474609375, 0.03440166473388672, 0.03427734375, 0.03451561737060547, 0.03450067138671875, 0.03446934509277344, 0.03465827178955078, 0.03459331130981445, 0.03539971160888672, 0.03482419204711914, 0.03490611267089844, 0.03496345520019531, 0.03491020965576172, 0.0347393913269043, 0.03496364974975586, 0.034672286987304686, 0.03454665756225586, 0.03490371322631836, 0.034436607360839845, 0.03442134475708008, 0.03540518569946289, 0.03430694580078125, 0.03446076965332031, 0.034442142486572264, 0.03417497634887695, 0.0343633918762207, 0.03428281784057617, 0.03401388931274414, 0.03418080139160156, 0.03410966491699219, 0.035161598205566406, 0.0349005126953125, 0.03469292831420898, 0.03443059158325195, 0.034965534210205075, 0.034675521850585936, 0.034418689727783204, 0.0344370231628418, 0.03464371109008789, 0.03407206344604492, 0.03417993545532227, 0.034063423156738284, 0.03438278579711914, 0.035097984313964846, 0.03460492706298828, 0.0343109130859375, 0.034393505096435545, 0.03465071868896484, 0.03419456100463867, 0.03722943878173828, 0.03759638214111328, 0.03425513458251953, 0.034242111206054686, 0.03434108734130859, 0.03450508880615234, 0.03655100631713867, 0.03470489501953125, 0.03449478530883789, 0.03580742263793945, 0.035037185668945314, 0.03465011215209961, 0.03425075149536133, 0.034661983489990236, 0.035546913146972656, 0.03507878494262695, 0.035026943206787106, 0.03523583984375, 0.03490339279174805, 0.034546337127685546, 0.03462736129760742, 0.03440576171875, 0.03429430389404297, 0.03507436752319336, 0.03422822570800781, 0.03430115127563477, 0.03435708618164063, 0.03416096115112305, 0.03426572799682617, 0.03452694320678711, 0.03469692611694336, 0.034337024688720706, 0.03436316680908203, 0.03471971130371094, 0.03423494338989258, 0.03422208023071289, 0.03426038360595703, 0.03411004638671875, 0.034327934265136716, 0.034500606536865236, 0.03432447814941406, 0.034810497283935544, 0.03475001525878906, 0.034834495544433595, 0.03526022338867187, 0.034554847717285155, 0.03434659194946289, 0.034300193786621094, 0.034144382476806644, 0.03457747268676758, 0.034872257232666015, 0.034576385498046876, 0.03452320098876953, 0.03423839950561523, 0.034103294372558594, 0.034207199096679686, 0.03395199966430664, 0.03404748916625976, 0.03407747268676758, 0.0339128303527832, 0.033914302825927736, 0.03436758422851562, 0.03526294326782226, 0.034846240997314454, 0.034691551208496096, 0.034610561370849606, 0.034374271392822266, 0.03436048126220703, 0.03418812942504883, 0.03469107055664063, 0.03437286376953125, 0.034105182647705075, 0.034255775451660156, 0.03461324691772461, 0.03479142379760742, 0.034479934692382815, 0.03427552032470703, 0.03419955062866211, 0.03521283340454102, 0.03430380630493164, 0.03440019226074219, 0.03425558471679688, 0.03420159912109375, 0.03383295822143555, 0.0341190414428711, 0.03439475250244141, 0.03479075241088867, 0.03478915023803711, 0.03463257598876953, 0.034334720611572264, 0.0346333122253418, 0.034671009063720705, 0.034455169677734376, 0.03461977767944336, 0.03432243347167969, 0.03423788833618164, 0.034253376007080075, 0.03528444671630859, 0.034896385192871096, 0.03472796630859375, 0.03426303863525391, 0.03448831939697266, 0.03466035079956055, 0.03435724639892578, 0.03428953552246094, 0.034375808715820313, 0.03428704071044922, 0.03538739013671875, 0.0346478385925293, 0.03445987319946289, 0.034533374786376955, 0.034240543365478514, 0.034676704406738285, 0.03415260696411133, 0.03426838302612305, 0.03423091125488281, 0.0341682243347168, 0.034324417114257814, 0.034571006774902345, 0.03437942504882813, 0.034590110778808594, 0.03449932861328125, 0.034164833068847655, 0.037119998931884765, 0.034484222412109376, 0.034332481384277344, 0.034144161224365234, 0.0343059196472168, 0.03420585632324219, 0.03451110458374024, 0.034869247436523435, 0.034743648529052734, 0.0345258560180664, 0.03438777542114258, 0.034264671325683595, 0.034313823699951174, 0.03429478454589844, 0.03442227172851563, 0.03442329788208008, 0.034070526123046875, 0.034181121826171876, 0.0343078384399414, 0.034873054504394534, 0.03471404647827148, 0.03460300827026367, 0.03437932968139648, 0.03432463836669922, 0.03433478546142578, 0.03424492645263672, 0.03417497634887695, 0.034080768585205076, 0.034533374786376955, 0.03428102493286133, 0.03427372741699219, 0.03471769714355469, 0.034769054412841796, 0.03438985443115234, 0.034375679016113284, 0.03410739135742188, 0.03474431991577148, 0.034375137329101565, 0.03459945678710938, 0.03543215942382812, 0.03740409469604492, 0.0350731201171875, 0.03491814422607422, 0.034776287078857424, 0.034506752014160154, 0.034498687744140624, 0.03420598220825195, 0.034304000854492187, 0.03426303863525391, 0.033974273681640625, 0.034166145324707034, 0.03424505615234375, 0.034444671630859375, 0.03430688095092774, 0.034227745056152344, 0.036382560729980466, 0.035078239440917966, 0.03435984039306641, 0.034076576232910154, 0.034343006134033204, 0.03406380844116211, 0.0340906867980957, 0.03396217727661133, 0.03464860916137695, 0.03468828964233398, 0.03446636962890625, 0.034426433563232425, 0.03444403076171875, 0.0346861457824707, 0.03450124740600586, 0.034191551208496096, 0.034248767852783205, 0.034186176300048825, 0.0342108154296875, 0.03424665451049805, 0.034519264221191406, 0.03483363342285156, 0.03455779266357422, 0.034461536407470704, 0.034366336822509766, 0.03419443130493164, 0.03432115173339844, 0.03420800018310547, 0.03411264038085938, 0.03432742309570312, 0.03422003173828125, 0.034070560455322266, 0.03454528045654297, 0.034783584594726566, 0.03443270492553711, 0.03435961532592773, 0.034399551391601564, 0.03446956634521484, 0.034452255249023435, 0.03435542297363281, 0.034404319763183595, 0.03434409713745117, 0.034214336395263674, 0.03435974502563476, 0.03442483139038086, 0.03465420913696289, 0.034725822448730466, 0.034389472961425784, 0.034279647827148436, 0.03436991882324219, 0.03427900695800781, 0.03428803253173828, 0.034088958740234376, 0.034426048278808595, 0.03413670349121094, 0.0355302734375, 0.034889217376708984, 0.034751487731933595, 0.03433062362670898, 0.034617088317871095, 0.03422233581542969, 0.034455360412597655, 0.03429804611206055, 0.03434239959716797, 0.034319038391113284, 0.03494073486328125, 0.03477612686157226, 0.03458758544921875, 0.03472515106201172, 0.03469590377807617, 0.03427081680297851, 0.03440201568603515, 0.034493118286132815, 0.03430809783935547, 0.034106689453125, 0.03428422546386719, 0.034625408172607425, 0.0350079345703125, 0.034570945739746096, 0.034457599639892575, 0.034471519470214845, 0.03428803253173828, 0.03433039855957031, 0.03435951995849609, 0.03451036834716797, 0.03448403167724609, 0.034151073455810546, 0.03410124969482422, 0.03423027038574219, 0.03497574234008789, 0.034664447784423826, 0.03458598327636719, 0.034447391510009764, 0.034482112884521486, 0.03431827163696289, 0.03467702484130859, 0.03444364929199219, 0.03444035339355469, 0.03432745742797851, 0.03461251068115234, 0.03448400115966797, 0.03810604858398438, 0.03489996719360351, 0.034705406188964845, 0.034699264526367186, 0.035168254852294925, 0.034813697814941404, 0.03469952011108399, 0.03594649505615234, 0.03774259185791016, 0.034887680053710936, 0.03483011245727539, 0.03527702331542969, 0.03587071990966797, 0.03531491088867188, 0.03511785507202148, 0.03482150268554687, 0.03575462341308594]",tokens/s,28.88489740000097,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,2063.48288,2350.77632,0.0,1948.254208,1873.42336,s,1,9.317431640625,9.317431640625,0.0,9.317431640625,9.317431640625,9.317431640625,9.317431640625,[9.317431640625],,kWh,5.696934180004973e-05,6.276921012965676e-06,1.671223559199242e-05,7.995849840500783e-05,,MB,2125.438976,2451.439616,0.0,2034.23744,2015.14496,s,10,0.7627067565917968,0.07627067565917969,0.00020049643510992293,0.07619273757934569,0.07638459777832031,0.07661407470703126,0.07679765625,"[0.07684355163574219, 0.07617900848388671, 0.07624060821533203, 0.07618256378173828, 0.07633360290527344, 0.07617276763916016, 0.07613426971435547, 0.07620291137695312, 0.0761302719116211, 0.07628720092773437]",tokens/s,3356.4669224113354,kWh,2.2809562915690878e-06,2.5154774129715156e-07,1.5134408808905762e-06,4.045944913756816e-06,tokens/kWh,63273229.234921575,MB,2131.31264,2451.439616,0.0,2034.23744,2015.14752,s,10,13.397155029296876,1.3397155029296877,0.014957909601761661,1.3356490478515624,1.3598152465820312,1.3655943298339843,1.3702175964355467,"[1.3713734130859374, 1.358531005859375, 1.34696630859375, 1.3358153076171875, 1.322553955078125, 1.3227249755859376, 1.3260450439453124, 1.3354827880859375, 1.3353814697265625, 1.34228076171875]",tokens/s,47.02490929024237,kWh,3.9069423493011494e-05,4.3089623771555355e-06,2.179663332270963e-05,6.517501919287667e-05,tokens/kWh,966628.0237457239,,s,630,13.394967271804799,0.021261852812388585,0.0004476710005862764,0.021167183876037596,0.021768806648254393,0.02196435356140137,0.022463609409332275,"[0.022335071563720704, 0.02182310485839844, 0.02186524772644043, 0.022026464462280272, 0.02180851173400879, 0.02195088005065918, 0.021963808059692384, 0.021915775299072265, 0.022414016723632812, 0.021928255081176757, 0.021907199859619142, 0.02201740837097168, 0.02378745651245117, 0.025467071533203125, 0.022131200790405273, 0.022040063858032227, 0.022301183700561524, 0.021768192291259765, 0.021727231979370116, 0.021811199188232423, 0.021774335861206053, 0.02180694389343262, 0.021825696945190428, 0.02174950408935547, 0.021731584548950196, 0.021958080291748047, 0.021724992752075196, 0.02161257553100586, 0.021635456085205076, 0.021541215896606444, 0.02138857650756836, 0.021370687484741212, 0.021291040420532228, 0.02154310417175293, 0.021750015258789064, 0.021993408203125, 0.021780895233154296, 0.02203990364074707, 0.02165158462524414, 0.021570016860961914, 0.02147123146057129, 0.021411712646484376, 0.02136016082763672, 0.021297664642333985, 0.021250303268432618, 0.021491296768188478, 0.021590431213378905, 0.021599552154541016, 0.021492416381835938, 0.021604000091552736, 0.021375328063964843, 0.02129315185546875, 0.021175392150878908, 0.0210317440032959, 0.020993951797485352, 0.02113497543334961, 0.021107168197631837, 0.02140185546875, 0.021506048202514647, 0.021607423782348634, 0.0215600643157959, 0.021644607543945312, 0.022000064849853517, 0.022397663116455076, 0.021790719985961913, 0.02172096061706543, 0.021809312820434572, 0.021645471572875975, 0.021503423690795897, 0.021334400177001955, 0.02125791931152344, 0.021203264236450196, 0.021151168823242188, 0.021268159866333007, 0.021559711456298827, 0.02170745658874512, 0.021790496826171873, 0.021964799880981444, 0.021743616104125976, 0.021430112838745116, 0.021448415756225588, 0.021406080245971678, 0.02144655990600586, 0.021534591674804687, 0.021360767364501952, 0.021672096252441406, 0.021501951217651367, 0.021493343353271483, 0.02160406494140625, 0.02167670440673828, 0.021648832321166992, 0.021565343856811522, 0.021646015167236327, 0.02165353584289551, 0.021589088439941406, 0.021520511627197265, 0.02136128044128418, 0.021618816375732423, 0.02147737693786621, 0.021690528869628908, 0.02174140739440918, 0.021635072708129883, 0.02163020706176758, 0.02147724723815918, 0.021389696121215822, 0.02155366325378418, 0.02126848030090332, 0.021475263595581055, 0.02159987258911133, 0.02151625633239746, 0.021391040802001954, 0.02134281539916992, 0.02127686309814453, 0.021344127655029296, 0.02160867118835449, 0.021620512008666992, 0.021783903121948243, 0.021612640380859374, 0.021512895584106444, 0.02139491271972656, 0.0214716796875, 0.02154044723510742, 0.0216048641204834, 0.021845855712890626, 0.02158608055114746, 0.02185830307006836, 0.022277984619140625, 0.021652095794677733, 0.021729696273803712, 0.021690271377563478, 0.021696512222290038, 0.021797056198120116, 0.021910655975341798, 0.021923999786376953, 0.021672351837158203, 0.021443967819213868, 0.021523199081420898, 0.0216079044342041, 0.02168067169189453, 0.02135603141784668, 0.02143222427368164, 0.021631359100341797, 0.02167625617980957, 0.021420032501220702, 0.02124553680419922, 0.02135503959655762, 0.021022560119628907, 0.02099363136291504, 0.02131337547302246, 0.02115622329711914, 0.021066240310668945, 0.020954656600952148, 0.020872575759887695, 0.02103113555908203, 0.02104528045654297, 0.02131135940551758, 0.021107200622558595, 0.021514432907104492, 0.021315200805664063, 0.021533184051513672, 0.021276544570922852, 0.02111926460266113, 0.020952512741088867, 0.02194867134094238, 0.0218089599609375, 0.021627103805541992, 0.0212740478515625, 0.021080575942993163, 0.02107379150390625, 0.02240540885925293, 0.02216707229614258, 0.022034080505371093, 0.02113814353942871, 0.021669408798217774, 0.021346080780029298, 0.02127020835876465, 0.021339168548583986, 0.0213787841796875, 0.021310911178588868, 0.021190975189208986, 0.020918079376220703, 0.020783487319946288, 0.020840768814086915, 0.020840448379516603, 0.020808736801147462, 0.02105641555786133, 0.021047359466552736, 0.02108415985107422, 0.021007360458374022, 0.021304895401000976, 0.021017311096191406, 0.02166579246520996, 0.020893503189086914, 0.02075791931152344, 0.020692991256713866, 0.020615903854370118, 0.02117827224731445, 0.021125280380249023, 0.020977664947509765, 0.020827487945556642, 0.02086160087585449, 0.021282720565795898, 0.02254841613769531, 0.021162143707275392, 0.02109561538696289, 0.021230400085449217, 0.02087116813659668, 0.020839744567871094, 0.02084864044189453, 0.024189695358276368, 0.022792127609252928, 0.021466304779052734, 0.02123014450073242, 0.021112159729003908, 0.020986719131469725, 0.02090985679626465, 0.020723648071289062, 0.02082774353027344, 0.021179391860961915, 0.021058752059936525, 0.020953439712524415, 0.02130463981628418, 0.021750688552856445, 0.021507232666015626, 0.02134886360168457, 0.021115232467651367, 0.021039136886596678, 0.020961248397827148, 0.021106687545776368, 0.020895904541015625, 0.020887392044067383, 0.020923711776733397, 0.02092915153503418, 0.02124345588684082, 0.021290752410888673, 0.021177087783813477, 0.02125132751464844, 0.021322496414184572, 0.021333728790283203, 0.021120704650878907, 0.021059904098510742, 0.020981727600097658, 0.02107321548461914, 0.02095622444152832, 0.021055423736572265, 0.020948448181152345, 0.02097551918029785, 0.02114009666442871, 0.02190665626525879, 0.02142915153503418, 0.02128678321838379, 0.02104934310913086, 0.021760000228881835, 0.021210464477539062, 0.02112371253967285, 0.02148969650268555, 0.02126345634460449, 0.02104025650024414, 0.02095804786682129, 0.020902816772460937, 0.020823583602905274, 0.020918272018432618, 0.021016191482543946, 0.02089049530029297, 0.020895488739013673, 0.02118681526184082, 0.021542560577392577, 0.02143471908569336, 0.021530624389648437, 0.02105548858642578, 0.020989503860473633, 0.020927072525024414, 0.020911968231201172, 0.020868671417236327, 0.021393856048583983, 0.021091999053955077, 0.021225088119506835, 0.021282976150512695, 0.021105215072631835, 0.020951040267944337, 0.020850528717041017, 0.02089107131958008, 0.020981727600097658, 0.020936735153198244, 0.021494367599487304, 0.02125164794921875, 0.020810304641723634, 0.02083625602722168, 0.020913951873779296, 0.020766271591186523, 0.020837120056152344, 0.020852544784545898, 0.020803775787353516, 0.020792415618896484, 0.020781984329223634, 0.020926464080810548, 0.020837568283081056, 0.020748640060424806, 0.020736703872680663, 0.020833791732788084, 0.02078544044494629, 0.021126976013183595, 0.020871295928955078, 0.021084224700927735, 0.020867136001586913, 0.020918207168579103, 0.020919807434082033, 0.020769184112548827, 0.020741216659545897, 0.02082467269897461, 0.020826080322265624, 0.020724159240722656, 0.02064793586730957, 0.02072809600830078, 0.02083951950073242, 0.020944063186645507, 0.020818431854248046, 0.02073855972290039, 0.020848512649536133, 0.020817567825317383, 0.020723615646362305, 0.020755104064941406, 0.020851551055908205, 0.020893728256225586, 0.020739103317260744, 0.0207314567565918, 0.020728063583374024, 0.02101193618774414, 0.020755231857299803, 0.020762176513671876, 0.020850879669189453, 0.02084390449523926, 0.02082815933227539, 0.020859487533569337, 0.020811487197875976, 0.020826431274414064, 0.021620256423950195, 0.020957664489746095, 0.020788671493530274, 0.020843103408813478, 0.020725791931152343, 0.020913312911987305, 0.021201087951660157, 0.020961759567260742, 0.02078323173522949, 0.020940319061279297, 0.020863168716430663, 0.021997791290283203, 0.02109449577331543, 0.020946304321289064, 0.020817632675170897, 0.020892160415649414, 0.020889280319213867, 0.02094102478027344, 0.020943328857421874, 0.020964576721191407, 0.020787168502807617, 0.02094163131713867, 0.02114963150024414, 0.02117340850830078, 0.0216135368347168, 0.021456832885742187, 0.02129715156555176, 0.021456960678100587, 0.021049280166625977, 0.021118080139160156, 0.02127961540222168, 0.021419551849365233, 0.02098633575439453, 0.020921728134155274, 0.020839040756225585, 0.021166080474853514, 0.02105958366394043, 0.021127168655395507, 0.021339744567871095, 0.0211329288482666, 0.02092131233215332, 0.02105529594421387, 0.02210006332397461, 0.021555103302001954, 0.021767776489257814, 0.021735200881958006, 0.021985055923461914, 0.02148636817932129, 0.02106755256652832, 0.02098204803466797, 0.021323776245117186, 0.0209134407043457, 0.020753087997436522, 0.021528608322143556, 0.02085273551940918, 0.021051359176635743, 0.02080975914001465, 0.020817695617675783, 0.020854080200195312, 0.02085148811340332, 0.020907167434692384, 0.022262752532958983, 0.020960416793823242, 0.021109439849853515, 0.02101238441467285, 0.020967456817626955, 0.020928096771240235, 0.020822656631469726, 0.020868255615234376, 0.020822912216186523, 0.020772096633911132, 0.020730592727661132, 0.020782848358154297, 0.020785280227661133, 0.02089792060852051, 0.020840448379516603, 0.02068070411682129, 0.02099836730957031, 0.020737600326538087, 0.020779104232788087, 0.020719615936279297, 0.020732032775878907, 0.02102070426940918, 0.020868896484375, 0.02075881576538086, 0.02076063919067383, 0.02074118423461914, 0.021062559127807617, 0.02082377624511719, 0.021040767669677735, 0.02069353675842285, 0.020936063766479492, 0.02117398452758789, 0.021794975280761717, 0.02091427230834961, 0.02092838478088379, 0.02140847969055176, 0.021411104202270506, 0.02122137641906738, 0.021021472930908204, 0.02091961669921875, 0.020880064010620116, 0.020912128448486327, 0.021128608703613282, 0.020868864059448242, 0.02174172782897949, 0.021395456314086913, 0.02128281593322754, 0.02104319953918457, 0.021071168899536134, 0.020914112091064453, 0.020982112884521485, 0.020891263961791993, 0.020970176696777344, 0.021004383087158202, 0.02091007995605469, 0.020952896118164064, 0.021203104019165038, 0.020963584899902344, 0.020864736557006835, 0.020985919952392577, 0.02173321533203125, 0.021899423599243163, 0.021386272430419923, 0.02121196746826172, 0.021054719924926756, 0.021492544174194335, 0.02106582450866699, 0.021041183471679686, 0.021161279678344726, 0.020984256744384765, 0.021347679138183594, 0.021242015838623046, 0.02118115234375, 0.021009920120239257, 0.021191167831420898, 0.021091360092163086, 0.021146751403808593, 0.020965215682983398, 0.021020095825195314, 0.020910591125488282, 0.021012544631958008, 0.021192607879638673, 0.021204320907592774, 0.022756095886230468, 0.021693567276000976, 0.02167487907409668, 0.021452928543090822, 0.021382207870483397, 0.02139801597595215, 0.02122540855407715, 0.021079423904418946, 0.02115260887145996, 0.02121084785461426, 0.020974016189575194, 0.02125209617614746, 0.02106368064880371, 0.020977567672729493, 0.02088969612121582, 0.021096223831176757, 0.021233055114746095, 0.021213760375976564, 0.021067935943603514, 0.021119039535522462, 0.021174144744873048, 0.021156288146972655, 0.02113302421569824, 0.021153791427612305, 0.02234543991088867, 0.02141049575805664, 0.021207103729248045, 0.021155839920043946, 0.021212480545043946, 0.021070528030395507, 0.02102272033691406, 0.021155839920043946, 0.021175487518310547, 0.021180992126464845, 0.021242111206054688, 0.021288671493530274, 0.021110815048217775, 0.021059167861938476, 0.020892127990722657, 0.020903680801391603, 0.020886175155639647, 0.020759424209594726, 0.020852960586547852, 0.020961536407470702, 0.021455295562744142, 0.021161983489990235, 0.021079296112060546, 0.021017120361328124, 0.021418079376220703, 0.021251264572143554, 0.020943008422851562, 0.02079635238647461, 0.02090729522705078, 0.02119327926635742, 0.021051103591918946, 0.020996416091918945, 0.02108137512207031, 0.021267135620117186, 0.02102662467956543, 0.02130963134765625, 0.021370624542236327, 0.021108991622924806, 0.020903263092041015, 0.021915712356567384, 0.02119753646850586, 0.021208959579467772, 0.02091823959350586, 0.021147167205810547, 0.02115225601196289, 0.02091007995605469, 0.020899839401245117, 0.020952991485595703, 0.02112214469909668, 0.020853599548339843, 0.021020639419555665, 0.02116828727722168, 0.021133344650268556, 0.021192447662353515, 0.021647615432739256, 0.0216428165435791, 0.02164803123474121, 0.02165875244140625, 0.021513055801391602, 0.02150307273864746, 0.021549888610839844, 0.021514144897460938, 0.021460639953613282, 0.022456384658813475, 0.02178451156616211, 0.022179744720458985, 0.02173910331726074, 0.02158028793334961, 0.02154457664489746, 0.021255615234375, 0.02131839942932129, 0.02151878356933594, 0.02146512031555176, 0.02145449638366699, 0.02154911994934082, 0.02147942352294922, 0.021493888854980468, 0.02161984062194824, 0.021633535385131835, 0.02152191925048828, 0.021539583206176757, 0.021329919815063478, 0.022374399185180666, 0.021575519561767578, 0.021480735778808595, 0.021281120300292968, 0.021183008193969725, 0.021390527725219727, 0.021266496658325196, 0.020966400146484376, 0.020960479736328124, 0.02097727966308594, 0.020923295974731446, 0.020942848205566408, 0.021091903686523437, 0.021260095596313477, 0.0211278076171875, 0.02135264015197754, 0.021184576034545897, 0.021176063537597656, 0.021139455795288087, 0.021549055099487305, 0.02109235191345215, 0.020962560653686523, 0.020948831558227538, 0.02088969612121582, 0.020937536239624025, 0.02083635139465332, 0.02089779281616211, 0.020818975448608397, 0.02125721549987793, 0.021010400772094727, 0.02088140869140625, 0.022466560363769532, 0.022321151733398437, 0.0218308162689209, 0.02130620765686035, 0.02102272033691406, 0.020971519470214844, 0.020963455200195314, 0.02086672019958496, 0.020813407897949218, 0.02081817626953125, 0.020816192626953126, 0.02083420753479004, 0.020846208572387694]",tokens/s,47.03258971943089,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,7183.446016,7954.366464,0.0,7551.844352,7485.12768,s,1,13.05186328125,13.05186328125,0.0,13.05186328125,13.05186328125,13.05186328125,13.05186328125,[13.05186328125],,kWh,0.00016644103901251127,1.8352348596961447e-05,5.357615397201532e-05,0.00023836954158148804,,MB,3067.4944,8243.77344,0.0,7826.571264,7735.225344,s,10,3.4783906860351568,0.34783906860351566,0.0006250609153924044,0.3476747131347656,0.34849228515624997,0.34884717102050783,0.3491310797119141,"[0.34775958251953126, 0.34741879272460935, 0.34758984375, 0.3472236938476562, 0.3470616455078125, 0.3481561279296875, 0.3473443603515625, 0.34841342163085937, 0.34920205688476563, 0.3482211608886719]",tokens/s,735.9725318601332,kWh,1.0181509952873192e-05,1.1228322003074389e-06,6.786308111034296e-06,1.8090650264214926e-05,tokens/kWh,14150956.22662017,MB,3076.468736,8285.71648,0.0,7868.514304,7758.594048,s,10,25.921688720703123,2.592168872070313,0.005162705224382333,2.5920682373046873,2.599454931640625,2.5996454345703124,2.5997978369140626,"[2.583898681640625, 2.585599365234375, 2.590199462890625, 2.588203125, 2.59345947265625, 2.590677001953125, 2.5961591796875, 2.594243896484375, 2.59941259765625, 2.5998359375]",tokens/s,24.30397212110767,kWh,7.607393861254469e-05,8.39104668678588e-06,5.0385848737767485e-05,0.000134850834037098,tokens/kWh,467182.8724668358,,s,630,25.91884516906737,0.04114102407788474,0.00032152810535914574,0.04113300704956055,0.04156822471618652,0.04163842487335205,0.04189364490509034,"[0.04143385696411133, 0.041048095703125, 0.04115824127197266, 0.04095779037475586, 0.04093199920654297, 0.041129886627197264, 0.04081612777709961, 0.04082745742797852, 0.04078995132446289, 0.040804351806640625, 0.04096819305419922, 0.040812545776367185, 0.040925182342529294, 0.04083840179443359, 0.040898880004882815, 0.04073516845703125, 0.04059939193725586, 0.04076559829711914, 0.040632320404052735, 0.04060575866699219, 0.04078950500488281, 0.040745471954345705, 0.040685504913330076, 0.04066025543212891, 0.040702625274658205, 0.040708160400390624, 0.040753150939941404, 0.040890270233154294, 0.04091299057006836, 0.04080025482177734, 0.04080640029907227, 0.04070182418823242, 0.04076931381225586, 0.04076544189453125, 0.040758846282958984, 0.040954654693603515, 0.041344799041748044, 0.04146531295776367, 0.04144803237915039, 0.04124675369262695, 0.04113840103149414, 0.041619361877441405, 0.04145151901245117, 0.04142489624023438, 0.041299968719482424, 0.041183231353759765, 0.04128768157958984, 0.04131244659423828, 0.041320255279541016, 0.041266590118408206, 0.04114627075195312, 0.041181953430175784, 0.04126019287109375, 0.041114238739013674, 0.04104617691040039, 0.040971294403076175, 0.0410285758972168, 0.041008129119873046, 0.04099961471557617, 0.04094559860229492, 0.04116633605957031, 0.041245567321777345, 0.041627647399902344, 0.041584129333496096, 0.041105377197265626, 0.04108752059936523, 0.04110131072998047, 0.04110745620727539, 0.040828929901123044, 0.04081024169921875, 0.04093772888183594, 0.040955039978027345, 0.040796321868896486, 0.040710430145263675, 0.04046684646606445, 0.04048691177368164, 0.04082483291625977, 0.04099862289428711, 0.040907039642333984, 0.04078182220458984, 0.04063385772705078, 0.040629920959472654, 0.0406447982788086, 0.04066144180297852, 0.04064601516723633, 0.04063852691650391, 0.04063926315307617, 0.040665088653564455, 0.040707233428955075, 0.040882240295410155, 0.04094646453857422, 0.040962177276611327, 0.040871807098388675, 0.040959999084472655, 0.04077769470214844, 0.04096412658691406, 0.04080640029907227, 0.04103952026367187, 0.04105388641357422, 0.041384384155273436, 0.04138809585571289, 0.04126736068725586, 0.04176051330566406, 0.04177331161499023, 0.04144057464599609, 0.04141945648193359, 0.0411907844543457, 0.041089664459228514, 0.04105340957641602, 0.04107155227661133, 0.04103583908081055, 0.04105398559570313, 0.04103372955322265, 0.04117299270629883, 0.04122137451171875, 0.041226303100585934, 0.04130192184448242, 0.04124892807006836, 0.041088703155517575, 0.041065406799316403, 0.04101055908203125, 0.04119820785522461, 0.04119734573364258, 0.041604991912841796, 0.04164230346679688, 0.04176284790039062, 0.04146460723876953, 0.04078374481201172, 0.040960254669189455, 0.04099852752685547, 0.0409881591796875, 0.04095654296875, 0.04098252868652344, 0.041191425323486325, 0.0408350715637207, 0.04079001617431641, 0.040736766815185545, 0.0406036491394043, 0.040820735931396485, 0.04068966293334961, 0.04097552108764648, 0.040983390808105466, 0.04061183929443359, 0.04056268692016601, 0.040837120056152344, 0.0407628173828125, 0.04057555389404297, 0.04065657424926758, 0.04083744049072266, 0.040703998565673825, 0.04081049728393555, 0.040822784423828126, 0.04103577423095703, 0.041342945098876954, 0.041420833587646484, 0.041422271728515626, 0.041659969329833985, 0.04149350357055664, 0.041508800506591795, 0.04127475357055664, 0.04117068862915039, 0.041582561492919924, 0.04135747146606445, 0.041331520080566404, 0.041325695037841795, 0.04130918502807617, 0.041342174530029294, 0.04126787185668945, 0.04127129745483398, 0.04105625534057617, 0.041078784942626956, 0.04069366455078125, 0.04075734329223633, 0.04076867294311524, 0.040779937744140626, 0.04095660781860352, 0.041009151458740234, 0.040976383209228515, 0.04097644805908203, 0.04124051284790039, 0.04154982376098633, 0.04152428817749024, 0.04137984085083008, 0.04203392028808594, 0.041853248596191404, 0.04168649673461914, 0.04162572860717773, 0.04144588851928711, 0.04146755218505859, 0.04137881469726563, 0.040460830688476564, 0.040664737701416015, 0.04064748764038086, 0.04067737579345703, 0.04046847915649414, 0.04050534439086914, 0.040472606658935546, 0.040691680908203125, 0.040836894989013675, 0.04077795028686523, 0.040656639099121095, 0.040707935333251954, 0.04110172653198242, 0.04136262512207031, 0.04121052932739258, 0.041223617553710935, 0.04112047958374023, 0.04143088150024414, 0.0411874885559082, 0.0409918098449707, 0.04081321716308594, 0.040825119018554686, 0.04089420700073242, 0.04125020980834961, 0.04124143981933594, 0.04103782272338867, 0.04121964645385742, 0.041023296356201173, 0.04094022369384766, 0.04101932907104492, 0.04107049560546875, 0.04108502578735351, 0.04112166213989258, 0.04115075302124024, 0.04102707290649414, 0.040984928131103514, 0.04095910263061524, 0.04097273635864258, 0.040783679962158204, 0.040800895690917965, 0.04090582275390625, 0.04103251266479492, 0.041051456451416016, 0.04103247833251953, 0.041377761840820315, 0.04129721450805664, 0.04142716979980469, 0.041468353271484376, 0.041260673522949216, 0.04166912078857422, 0.041474048614501956, 0.04127328109741211, 0.04150067138671875, 0.041320640563964846, 0.0412894401550293, 0.04153139114379883, 0.041309310913085935, 0.04134396743774414, 0.04136870574951172, 0.04137638473510742, 0.04140438461303711, 0.04139651107788086, 0.04151327896118164, 0.041072353363037106, 0.04108265686035156, 0.04104214477539062, 0.041134784698486325, 0.04106854248046875, 0.041082878112792966, 0.04090009689331055, 0.04107881546020508, 0.04089699172973633, 0.04093132781982422, 0.04091289520263672, 0.04104399871826172, 0.04120528030395508, 0.041338912963867186, 0.041111518859863284, 0.0410810546875, 0.04103916931152344, 0.040981407165527346, 0.04078182220458984, 0.04075420761108398, 0.04060220718383789, 0.04060780715942383, 0.040823104858398435, 0.04067532730102539, 0.04069363021850586, 0.04074665451049805, 0.04089657592773437, 0.041038238525390625, 0.04081856155395508, 0.0408427848815918, 0.040785568237304684, 0.040872417449951175, 0.04087039947509766, 0.04090044784545899, 0.041209983825683597, 0.041250816345214845, 0.041545726776123046, 0.04179558563232422, 0.04170668792724609, 0.0415588493347168, 0.04154777526855469, 0.04132659149169922, 0.041721473693847655, 0.041622081756591794, 0.041420608520507815, 0.041224193572998044, 0.04114636611938476, 0.04123648071289063, 0.04127651214599609, 0.04120054244995117, 0.04126438522338867, 0.04135331344604492, 0.041247615814208986, 0.04126902389526367, 0.04101027297973633, 0.041211967468261716, 0.04139523315429688, 0.041618785858154296, 0.04158512115478516, 0.04128351974487305, 0.041998016357421876, 0.041910144805908205, 0.041406463623046875, 0.04074905776977539, 0.040861248016357425, 0.040803905487060546, 0.04082902526855469, 0.040833824157714846, 0.040754497528076174, 0.04064736175537109, 0.04047872161865235, 0.040497089385986326, 0.04050950241088867, 0.04065849685668945, 0.0405667839050293, 0.040847808837890624, 0.04096588897705078, 0.040814849853515626, 0.04073651123046875, 0.04085785675048828, 0.04114851379394531, 0.041075775146484375, 0.041216865539550784, 0.04120124816894531, 0.041086719512939456, 0.040841888427734375, 0.04132454299926758, 0.041320030212402346, 0.04110172653198242, 0.041312255859375, 0.04138393783569336, 0.04144140625, 0.04131414413452148, 0.04130950546264649, 0.04126784133911133, 0.04112188720703125, 0.04108083343505859, 0.041041343688964844, 0.04116656112670898, 0.04128239822387696, 0.04124671936035156, 0.04106870269775391, 0.041047039031982424, 0.04081545639038086, 0.04085935974121094, 0.040831295013427735, 0.04098057556152344, 0.040913822174072266, 0.041136798858642576, 0.04138604736328125, 0.041519359588623045, 0.04144287872314453, 0.041587039947509764, 0.04155811309814453, 0.04174643325805664, 0.041543872833251956, 0.04162073516845703, 0.041493057250976566, 0.041330463409423826, 0.041549217224121096, 0.04136019134521484, 0.0413675537109375, 0.04137596893310547, 0.041334304809570316, 0.04139004898071289, 0.04160921478271484, 0.04151091384887695, 0.041381889343261716, 0.041191455841064456, 0.041138145446777345, 0.04126310348510742, 0.04113974380493164, 0.04106851196289062, 0.04113423919677734, 0.04115286254882813, 0.041162113189697265, 0.04111219024658203, 0.0409804801940918, 0.04118329620361328, 0.04120572662353516, 0.04110691070556641, 0.04089497756958008, 0.04084739303588867, 0.04078307342529297, 0.04075542449951172, 0.04061231994628906, 0.04058323287963867, 0.04047568130493164, 0.04062700653076172, 0.040855712890625, 0.04069375991821289, 0.040754814147949216, 0.04073932647705078, 0.04113804626464844, 0.04113177490234375, 0.04101350402832031, 0.04090639877319336, 0.04126755142211914, 0.0416255989074707, 0.04156582260131836, 0.0413548469543457, 0.041738689422607424, 0.04155791854858398, 0.0413897590637207, 0.04143385696411133, 0.04155596923828125, 0.04142617416381836, 0.04140723037719726, 0.041248767852783204, 0.041170177459716795, 0.04104473495483398, 0.0410909423828125, 0.04112601470947266, 0.04117631912231445, 0.0411195182800293, 0.04096457672119141, 0.04092364883422851, 0.04121916961669922, 0.04145654296875, 0.04125900650024414, 0.04154982376098633, 0.041629695892333986, 0.04161520004272461, 0.04200668716430664, 0.041801376342773436, 0.04177484893798828, 0.04161939239501953, 0.04160988616943359, 0.04149260711669922, 0.04054771041870117, 0.0405428466796875, 0.04065689468383789, 0.04072476959228516, 0.040626014709472656, 0.040689537048339844, 0.04104137420654297, 0.04108752059936523, 0.041129695892333985, 0.041492542266845706, 0.04132476806640625, 0.041192478179931644, 0.04149347305297851, 0.04134473419189453, 0.041509151458740234, 0.04132761764526367, 0.041165664672851564, 0.04093571090698242, 0.04084665679931641, 0.04091551971435547, 0.04089606475830078, 0.041017951965332033, 0.04092019271850586, 0.04095663833618164, 0.04101862335205078, 0.041057022094726565, 0.04087807846069336, 0.04086131286621094, 0.0408600959777832, 0.04088825607299805, 0.04089651107788086, 0.04100207901000977, 0.04077865600585937, 0.040810142517089844, 0.041078590393066404, 0.04122403335571289, 0.041696990966796875, 0.041845054626464845, 0.0415340461730957, 0.04159699249267578, 0.04149398422241211, 0.041419296264648436, 0.0415098876953125, 0.04125593566894531, 0.041412479400634764, 0.04129308700561524, 0.041213855743408204, 0.04144355010986328, 0.04126793670654297, 0.041306110382080076, 0.04128153610229492, 0.04108083343505859, 0.04101103973388672, 0.04123855972290039, 0.041164928436279294, 0.041137535095214846, 0.041286209106445315, 0.041609249114990234, 0.04156800079345703, 0.04163411331176758, 0.04163071823120117, 0.041807998657226564, 0.04193689727783203, 0.040767040252685544, 0.041076416015625, 0.040891136169433594, 0.040894462585449216, 0.04084291076660156, 0.04087664031982422, 0.04097391891479492, 0.04087411117553711, 0.040718368530273434, 0.04074006271362305, 0.04062112045288086, 0.04064636611938476, 0.04060160064697266, 0.040890369415283206, 0.040967742919921876, 0.041006816864013675, 0.04123289489746094, 0.04124636840820312, 0.041089599609375, 0.04122774505615234, 0.041572929382324215, 0.041398239135742185, 0.041207008361816407, 0.04125273513793945, 0.04129270553588867, 0.041306110382080076, 0.04132659149169922, 0.04144076919555664, 0.04138444900512695, 0.04142694473266602, 0.041181182861328124, 0.041180255889892575, 0.04115766525268555, 0.041242496490478515, 0.0412710075378418, 0.04124496078491211, 0.04117081451416016, 0.041205921173095704, 0.04111356735229492, 0.04112108612060547, 0.041455615997314454, 0.04136956787109375, 0.04149935913085938, 0.04156115341186523, 0.04159779357910156, 0.04155302429199219, 0.04160115051269531, 0.04152201461791992, 0.04141683197021485, 0.04167433547973633, 0.04157024002075195, 0.041407039642333984, 0.04148553466796875, 0.0414951057434082, 0.04134044647216797, 0.04138236618041992, 0.04133599853515625, 0.041428062438964845, 0.04147990417480469, 0.04158054351806641, 0.04204246520996094, 0.04171459197998047, 0.04166156768798828, 0.041159137725830075, 0.04124415969848633, 0.04109142303466797, 0.041069118499755856, 0.041010238647460937, 0.04092339324951172, 0.04089212799072266, 0.04088246536254883, 0.04090099334716797, 0.040939136505126955, 0.04097708892822265, 0.04100246429443359, 0.040906814575195315, 0.04096457672119141, 0.04093952178955078, 0.04093513488769531, 0.04087222290039062, 0.04076748657226562, 0.04079347229003906, 0.04076300811767578, 0.04112636947631836, 0.04127948760986328, 0.041169120788574216, 0.04106051254272461, 0.041323806762695314, 0.04161011123657227, 0.04144844818115234, 0.04162822341918945, 0.0417591667175293, 0.0414384651184082, 0.0413908462524414, 0.04140201568603516, 0.041336193084716796, 0.04117833709716797, 0.04118038558959961, 0.041196063995361326, 0.04115456008911133, 0.04121993637084961, 0.04177104187011719, 0.04108505630493164, 0.041045982360839846, 0.040910049438476564, 0.04083967971801758, 0.04143679809570312, 0.04150243377685547, 0.04130300903320312, 0.041431041717529295, 0.04150495910644531, 0.041993118286132815, 0.041599903106689456, 0.04173836898803711, 0.041567455291748046, 0.041630622863769534, 0.04159875106811523, 0.04164195251464844, 0.041422847747802735, 0.04140630340576172, 0.041533409118652345, 0.04145721435546875, 0.04156275177001953, 0.04148777770996094, 0.04147232055664062]",tokens/s,24.306638505324607,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1385.43104,1490.944,0.0,1088.421888,1083.532288,s,1,8.7369814453125,8.7369814453125,0.0,8.7369814453125,8.7369814453125,8.7369814453125,8.7369814453125,[8.7369814453125],,kWh,3.6059133045841916e-05,3.970364003930227e-06,1.140584245799825e-05,5.143533950777039e-05,,MB,1446.612992,1608.384512,0.0,1191.182336,1163.39968,s,10,0.46376950454711907,0.04637695045471192,0.00021769163711412995,0.046336879730224614,0.04643349304199219,0.046723611068725586,0.046955705490112305,"[0.047013729095458985, 0.046335872650146485, 0.04624025726318359, 0.0462380485534668, 0.046278495788574216, 0.04636902236938477, 0.0462490234375, 0.046337886810302736, 0.046354976654052735, 0.04635219192504883]",tokens/s,5519.9834721774005,kWh,1.3890664257504223e-06,1.5316059616651312e-07,9.228601379715392e-07,2.4650871598884745e-06,tokens/kWh,103850283.33504522,MB,1471.647744,1650.327552,0.0,1233.125376,1163.40224,s,10,12.033753295898437,1.2033753295898437,0.009487129960434573,1.2037210693359377,1.21644482421875,1.2175835571289062,1.2184945434570311,"[1.204896484375, 1.1981719970703124, 1.2062767333984374, 1.183076904296875, 1.201373291015625, 1.2187222900390624, 1.2161917724609375, 1.202545654296875, 1.1969656982421875, 1.205532470703125]",tokens/s,52.35274352971222,kWh,3.538494603300023e-05,3.902537602249012e-06,1.6164764643227814e-05,5.5452248278477066e-05,tokens/kWh,1136112.6366530475,,s,630,12.030422121047964,0.019095908128647577,0.00046385684618631244,0.019122207641601562,0.019462639999389648,0.019647172641754147,0.02043157560348511,"[0.01975503921508789, 0.019511903762817383, 0.019384319305419923, 0.019746816635131836, 0.019639776229858397, 0.019622432708740235, 0.019566144943237305, 0.019481023788452147, 0.019346656799316405, 0.019112480163574218, 0.01908121681213379, 0.019457664489746094, 0.01991641616821289, 0.0195020809173584, 0.019349376678466798, 0.01914591979980469, 0.019336128234863283, 0.01917647933959961, 0.01926652717590332, 0.019171327590942384, 0.019754720687866212, 0.019060895919799804, 0.019500255584716797, 0.01919036865234375, 0.019196224212646485, 0.01922662353515625, 0.019244384765625, 0.018985279083251955, 0.019593568801879884, 0.018925695419311522, 0.018965696334838866, 0.01897923278808594, 0.019020063400268555, 0.01903126335144043, 0.01894790458679199, 0.019104736328125, 0.019066879272460938, 0.01896361541748047, 0.019253183364868164, 0.019366495132446288, 0.019183135986328124, 0.01922332763671875, 0.019150655746459962, 0.01889504051208496, 0.0192544002532959, 0.019727231979370118, 0.018937376022338866, 0.01884617614746094, 0.01862819290161133, 0.018728736877441407, 0.018665952682495116, 0.018589792251586915, 0.01854879951477051, 0.01860723114013672, 0.018512351989746094, 0.01850387191772461, 0.018498815536499024, 0.018418655395507813, 0.01857302474975586, 0.01841958427429199, 0.018514335632324217, 0.018757568359375, 0.01930556869506836, 0.019641311645507812, 0.019265087127685546, 0.019216447830200194, 0.01918604850769043, 0.019208192825317383, 0.019283008575439454, 0.01913337516784668, 0.01917532730102539, 0.019170591354370117, 0.01936790466308594, 0.019227487564086914, 0.01916486358642578, 0.019651968002319335, 0.021269439697265625, 0.02076367950439453, 0.019191839218139647, 0.01935152053833008, 0.019339935302734375, 0.019294527053833006, 0.019048223495483397, 0.01946236801147461, 0.01934236717224121, 0.018968671798706056, 0.0189117431640625, 0.018751871109008788, 0.018745344161987306, 0.01869824028015137, 0.01863884735107422, 0.01860806465148926, 0.01878432083129883, 0.018865215301513673, 0.019067840576171877, 0.019227935791015626, 0.01874403190612793, 0.01868185615539551, 0.018642847061157226, 0.018619712829589845, 0.018495487213134765, 0.018571680068969726, 0.018489728927612303, 0.018593791961669923, 0.018578624725341795, 0.018590015411376955, 0.018548383712768554, 0.01847929573059082, 0.01842799949645996, 0.018569791793823242, 0.018763776779174804, 0.018472959518432617, 0.01857535934448242, 0.018479103088378905, 0.0185928955078125, 0.01947724723815918, 0.021145536422729493, 0.019025152206420898, 0.018688255310058594, 0.018952896118164062, 0.0188656005859375, 0.0187205753326416, 0.018702816009521485, 0.01871833610534668, 0.01866819190979004, 0.01905254364013672, 0.018993471145629885, 0.018925567626953126, 0.018994304656982423, 0.01973084831237793, 0.019189727783203124, 0.019255712509155275, 0.02005615997314453, 0.019328575134277343, 0.019118080139160155, 0.0192391357421875, 0.01924118423461914, 0.01929417610168457, 0.01916111946105957, 0.01922371292114258, 0.019109888076782225, 0.019276639938354493, 0.019281919479370118, 0.01942323112487793, 0.019318784713745117, 0.019587072372436523, 0.0197072639465332, 0.019186431884765626, 0.019220352172851562, 0.01888768005371094, 0.01905971145629883, 0.01904800033569336, 0.019107711791992187, 0.018986591339111326, 0.019018720626831055, 0.01884979248046875, 0.018884607315063476, 0.01897881507873535, 0.01911324882507324, 0.019155424118041994, 0.01913609504699707, 0.01903887939453125, 0.019029151916503905, 0.019077375411987306, 0.019305055618286132, 0.019180992126464843, 0.019130464553833007, 0.019085792541503905, 0.01919094467163086, 0.01904316711425781, 0.019183616638183593, 0.019088800430297852, 0.019001792907714844, 0.019079103469848632, 0.01904047966003418, 0.019314687728881837, 0.019183263778686524, 0.019072608947753908, 0.018911231994628908, 0.019249792098999022, 0.01926361656188965, 0.01922217559814453, 0.018859455108642578, 0.01900636863708496, 0.018863136291503907, 0.01909654426574707, 0.018860031127929687, 0.018786304473876952, 0.01880678367614746, 0.01884956741333008, 0.01868060874938965, 0.018543903350830077, 0.018582239151000976, 0.01867990493774414, 0.019136415481567384, 0.01940275192260742, 0.019702848434448243, 0.01908627128601074, 0.01894620704650879, 0.018747039794921875, 0.018680063247680664, 0.018654880523681642, 0.018786144256591798, 0.01885638427734375, 0.01880473518371582, 0.02043894386291504, 0.019787872314453125, 0.018840736389160156, 0.01871343994140625, 0.018680927276611328, 0.01885686492919922, 0.018625663757324218, 0.01860051155090332, 0.01866783905029297, 0.01879987144470215, 0.018704992294311523, 0.018771167755126952, 0.01852511978149414, 0.018531391143798828, 0.018443199157714845, 0.01862838363647461, 0.01860348892211914, 0.018735776901245116, 0.018794208526611327, 0.018882783889770507, 0.01878646469116211, 0.018707584381103516, 0.018490240097045897, 0.018626560211181642, 0.018546527862548828, 0.018602144241333007, 0.018565120697021483, 0.018648672103881835, 0.01876419258117676, 0.01891913604736328, 0.01868400001525879, 0.018654783248901366, 0.018559232711791992, 0.01857164764404297, 0.019521535873413084, 0.01856716728210449, 0.01841939163208008, 0.01858336067199707, 0.018473024368286132, 0.01861881637573242, 0.018681503295898436, 0.018585952758789062, 0.01883135986328125, 0.018733055114746093, 0.01881088066101074, 0.018570432662963866, 0.018555200576782227, 0.01909939193725586, 0.018923040390014648, 0.018743776321411134, 0.01877382469177246, 0.018782400131225587, 0.01880678367614746, 0.018650943756103516, 0.01863248062133789, 0.01867407989501953, 0.018986528396606445, 0.01878819274902344, 0.018924160003662108, 0.01921023941040039, 0.019599359512329103, 0.01925302314758301, 0.01919817543029785, 0.01914575958251953, 0.019306528091430665, 0.019259456634521485, 0.019336063385009764, 0.01927292823791504, 0.019347904205322265, 0.019157344818115235, 0.019195711135864258, 0.01936790466308594, 0.019337440490722658, 0.019250431060791016, 0.01930112075805664, 0.019209440231323243, 0.019366880416870118, 0.019228416442871092, 0.019103807449340822, 0.019062240600585936, 0.01912224006652832, 0.019034528732299806, 0.01901148796081543, 0.018831520080566405, 0.018950143814086915, 0.019099647521972657, 0.01904643249511719, 0.018888671875, 0.018867647171020508, 0.01887673568725586, 0.018874624252319335, 0.018791904449462892, 0.018754079818725587, 0.01935139274597168, 0.01926473617553711, 0.01891993522644043, 0.018778560638427734, 0.018685951232910156, 0.018745344161987306, 0.018990400314331055, 0.01901638412475586, 0.019131584167480467, 0.019026655197143555, 0.019230815887451173, 0.019171327590942384, 0.019334943771362304, 0.019237375259399413, 0.019053600311279298, 0.019260095596313476, 0.019316640853881836, 0.01962393569946289, 0.019176671981811524, 0.019211040496826173, 0.020413536071777344, 0.019346336364746093, 0.019308544158935546, 0.019286016464233398, 0.01934364891052246, 0.01922617530822754, 0.019269023895263672, 0.01972096061706543, 0.019539968490600586, 0.01943142318725586, 0.019403839111328126, 0.01936275291442871, 0.01932659149169922, 0.01931500816345215, 0.01949411201477051, 0.019424095153808593, 0.019419168472290037, 0.019412960052490234, 0.019503103256225587, 0.019298303604125978, 0.019398176193237304, 0.019344959259033203, 0.01939753532409668, 0.019366912841796875, 0.019401023864746094, 0.019355520248413086, 0.01920031929016113, 0.019248735427856444, 0.01937295913696289, 0.01942527961730957, 0.01969152069091797, 0.01989151954650879, 0.01943846321105957, 0.01943459129333496, 0.019380447387695312, 0.01934713554382324, 0.019413824081420897, 0.01939455986022949, 0.019263103485107423, 0.01910531234741211, 0.01923967933654785, 0.019330240249633788, 0.019296928405761717, 0.019403007507324217, 0.01925654411315918, 0.019369951248168947, 0.019120960235595702, 0.019273311614990234, 0.019455936431884764, 0.01939708709716797, 0.01901312065124512, 0.0189752311706543, 0.01890287971496582, 0.019005599975585936, 0.01905411148071289, 0.019038528442382813, 0.019238687515258788, 0.01919833564758301, 0.018950143814086915, 0.019142656326293944, 0.019683263778686524, 0.01921023941040039, 0.019129663467407226, 0.019296960830688478, 0.01922662353515625, 0.019361791610717775, 0.019212287902832033, 0.019216384887695313, 0.01905219268798828, 0.019212640762329102, 0.019142879486083984, 0.01928374481201172, 0.01908924865722656, 0.019139999389648436, 0.01908780860900879, 0.019175455093383788, 0.01924540710449219, 0.019227807998657226, 0.019165983200073244, 0.01947555160522461, 0.01970249557495117, 0.01952582359313965, 0.019269632339477538, 0.01989836883544922, 0.020281152725219728, 0.020054208755493165, 0.01939993667602539, 0.019491167068481446, 0.019277952194213868, 0.019337087631225585, 0.019234783172607423, 0.01929427146911621, 0.019276159286499023, 0.019240608215332033, 0.01924540710449219, 0.019305919647216795, 0.019173952102661134, 0.01919795227050781, 0.01921023941040039, 0.019726335525512697, 0.0192491512298584, 0.01924095916748047, 0.019154880523681642, 0.01930860710144043, 0.019193599700927735, 0.019226879119873048, 0.019154495239257812, 0.019157440185546874, 0.01904800033569336, 0.019269472122192384, 0.01916169548034668, 0.019169279098510742, 0.019094560623168947, 0.01914076805114746, 0.019255264282226563, 0.01954476737976074, 0.019208032608032225, 0.019302688598632812, 0.01924857521057129, 0.019151456832885744, 0.01917103958129883, 0.019269920349121093, 0.019281919479370118, 0.019560319900512695, 0.019386144638061525, 0.019285568237304686, 0.019319583892822265, 0.019294208526611328, 0.019311712265014647, 0.019235551834106444, 0.01984841537475586, 0.019577407836914064, 0.0194564151763916, 0.019412511825561522, 0.01961417579650879, 0.019358848571777342, 0.019465087890625, 0.01940665626525879, 0.019249343872070314, 0.01933075141906738, 0.019423328399658202, 0.019271167755126953, 0.01926937675476074, 0.019182207107543946, 0.019267168045043945, 0.019245824813842773, 0.019004703521728516, 0.018785247802734373, 0.01918649673461914, 0.01899158477783203, 0.019204160690307618, 0.019168863296508788, 0.01915171241760254, 0.01905561637878418, 0.018806943893432616, 0.019028799057006836, 0.019290111541748048, 0.01919152069091797, 0.019073408126831056, 0.01882102394104004, 0.0188723201751709, 0.018694143295288086, 0.018743295669555664, 0.01865020751953125, 0.018563615798950196, 0.01869657516479492, 0.01861222457885742, 0.01863270378112793, 0.018540576934814455, 0.018572320938110353, 0.01895840072631836, 0.018694751739501952, 0.018792736053466798, 0.019216384887695313, 0.01907711982727051, 0.018973695755004884, 0.018877439498901367, 0.018812736511230468, 0.01860380744934082, 0.01853686332702637, 0.018646368026733397, 0.01928668785095215, 0.019111711502075194, 0.01942710494995117, 0.019115711212158205, 0.019016416549682617, 0.019705087661743163, 0.019184383392333984, 0.019167520523071288, 0.01914441680908203, 0.019299552917480468, 0.019360095977783202, 0.019431135177612305, 0.019306943893432616, 0.01932316780090332, 0.01946214485168457, 0.019275583267211915, 0.019230335235595704, 0.019188287734985352, 0.019178592681884765, 0.01938649559020996, 0.019743520736694335, 0.019326496124267577, 0.019007648468017578, 0.019360063552856445, 0.018976768493652343, 0.01931043243408203, 0.019271488189697265, 0.019253599166870118, 0.01891926383972168, 0.019177248001098633, 0.018906784057617188, 0.01891337585449219, 0.018657567977905274, 0.018702688217163085, 0.018647039413452148, 0.018817024230957033, 0.018952192306518553, 0.01879680061340332, 0.018747039794921875, 0.01867385673522949, 0.018937759399414063, 0.018747392654418944, 0.018572704315185547, 0.018481151580810547, 0.018579551696777344, 0.018530847549438477, 0.01867081642150879, 0.018545408248901368, 0.018364416122436524, 0.01852726364135742, 0.01862460708618164, 0.019590015411376952, 0.019064031600952148, 0.01900332832336426, 0.018776256561279295, 0.018618240356445312, 0.018631135940551758, 0.01870470428466797, 0.018636320114135744, 0.018714656829833986, 0.018811328887939453, 0.01885593605041504, 0.018984672546386718, 0.019120031356811524, 0.019159423828125, 0.019194976806640625, 0.019153600692749025, 0.01916860771179199, 0.01964031982421875, 0.01921023941040039, 0.019283519744873048, 0.019167680740356446, 0.019122175216674805, 0.018970815658569336, 0.01905955123901367, 0.01908835220336914, 0.01915660858154297, 0.019222623825073244, 0.019257631301879883, 0.018810272216796875, 0.018881120681762696, 0.02103910446166992, 0.01973219108581543, 0.019615007400512696, 0.018922496795654296, 0.018745344161987306, 0.018747392654418944, 0.01863910484313965, 0.018603200912475585, 0.018606655120849608, 0.01850707244873047, 0.018481855392456056, 0.018497535705566406, 0.018448383331298827, 0.018468767166137694, 0.018592063903808593, 0.01856003189086914, 0.01855564880371094, 0.01854204750061035, 0.019365728378295897, 0.025774784088134765, 0.02098703956604004, 0.019008352279663087, 0.019535871505737306, 0.0189069766998291, 0.01868921661376953, 0.01865171241760254, 0.018823583602905272, 0.019301952362060545, 0.01904207992553711, 0.018718624114990236, 0.018733728408813478, 0.018886751174926757, 0.018655231475830078, 0.018720767974853517, 0.01891289520263672, 0.01870031929016113, 0.01897916793823242, 0.018944000244140623, 0.01880678367614746, 0.01924015998840332, 0.019493343353271485, 0.019486047744750976, 0.018821184158325194, 0.018742176055908204, 0.01871455955505371, 0.01932908821105957, 0.01880678367614746, 0.01942755126953125, 0.019312416076660156, 0.019453376770019532]",tokens/s,52.3672397910108,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1408.589824,1946.025984,0.0,1543.503872,1510.249472,s,1,8.70253125,8.70253125,0.0,8.70253125,8.70253125,8.70253125,8.70253125,[8.70253125],,kWh,4.410949675417062e-05,4.855038932975557e-06,1.3165010532015242e-05,6.212954621916141e-05,,MB,1442.750464,2021.523456,0.0,1604.32128,1585.251328,s,10,0.42386870574951174,0.04238687057495117,0.00023077278369478652,0.0422879695892334,0.042591729736328125,0.04279312973022461,0.042954249725341793,"[0.04299452972412109, 0.042469505310058595, 0.042546974182128904, 0.04237779235839844, 0.04215350341796875, 0.04229788970947266, 0.04225001525878906, 0.042234848022460934, 0.04226559829711914, 0.04227804946899414]",tokens/s,6039.606050824735,kWh,1.2755362315139124e-06,1.4066779404590764e-07,8.425843713013372e-07,2.2587883968611574e-06,tokens/kWh,113335096.0876818,MB,1467.498496,2084.438016,0.0,1667.23584,1589.165568,s,10,11.449416503906248,1.1449416503906247,0.007212182297105797,1.1466785888671875,1.1526762939453126,1.1532949340820313,1.1537898461914062,"[1.13501904296875, 1.148018798828125, 1.140659912109375, 1.1306737060546874, 1.15391357421875, 1.1515023193359375, 1.147565185546875, 1.143733154296875, 1.152538818359375, 1.1457919921875]",tokens/s,55.024638136367926,kWh,3.3275282684740005e-05,3.669366048257714e-06,1.6527707302698777e-05,5.347235603569651e-05,tokens/kWh,1178178.869805982,,s,630,11.447266712188712,0.01817026462252178,0.0003665087769169961,0.018102463722229005,0.018550351524353026,0.018770316410064696,0.019589432277679446,"[0.017948320388793945, 0.017827295303344728, 0.01803539276123047, 0.017772735595703124, 0.01775788879394531, 0.018045440673828125, 0.018644704818725585, 0.019851360321044922, 0.018042240142822265, 0.01806399917602539, 0.01791744041442871, 0.017973760604858398, 0.018282495498657226, 0.018218624114990235, 0.0181026554107666, 0.018302175521850587, 0.018139039993286133, 0.018119552612304687, 0.017979391098022462, 0.01800396728515625, 0.017928192138671875, 0.017847583770751952, 0.017734207153320313, 0.017873056411743166, 0.017855648040771485, 0.01786147117614746, 0.0179814395904541, 0.018106367111206053, 0.017976480484008787, 0.018211679458618166, 0.018400831222534178, 0.017934783935546875, 0.017993728637695314, 0.01782086372375488, 0.017822336196899415, 0.017846464157104492, 0.018069503784179687, 0.017782560348510744, 0.01782806396484375, 0.01780735969543457, 0.01799087905883789, 0.017871648788452148, 0.018184383392333983, 0.017868608474731446, 0.01781046485900879, 0.01787183952331543, 0.017750015258789064, 0.01813430404663086, 0.019196287155151367, 0.018041183471679687, 0.017994911193847656, 0.018008480072021483, 0.017926591873168946, 0.017821279525756836, 0.017919424057006837, 0.017809919357299805, 0.01777302360534668, 0.01778656005859375, 0.01806729507446289, 0.017892000198364257, 0.01779897689819336, 0.017795072555541993, 0.017797119140625, 0.018741312026977538, 0.018328096389770506, 0.018123903274536134, 0.018031455993652343, 0.018044031143188477, 0.018025375366210936, 0.017901567459106444, 0.017956127166748048, 0.017870784759521485, 0.01785321617126465, 0.017915903091430666, 0.01787664031982422, 0.017791328430175782, 0.01790947151184082, 0.0179400634765625, 0.017844640731811523, 0.018024736404418946, 0.018513919830322266, 0.017868799209594728, 0.017843328475952148, 0.018029439926147462, 0.017876575469970703, 0.017881023406982423, 0.01776483154296875, 0.017829887390136717, 0.017852224349975587, 0.017785024642944337, 0.01775174331665039, 0.017749759674072267, 0.017764192581176758, 0.01772208023071289, 0.01786460876464844, 0.018080959320068358, 0.017809791564941405, 0.017817920684814453, 0.017975519180297852, 0.01779916763305664, 0.018153472900390624, 0.01798080062866211, 0.01780169677734375, 0.017898719787597658, 0.017832576751708986, 0.017862592697143555, 0.0179116153717041, 0.018011743545532227, 0.018096479415893554, 0.01838489532470703, 0.018582143783569337, 0.018677120208740235, 0.018536096572875978, 0.018643552780151368, 0.018637184143066407, 0.018687936782836916, 0.019091520309448242, 0.01900275230407715, 0.018995840072631835, 0.018767711639404296, 0.01873321533203125, 0.019279104232788086, 0.021144319534301757, 0.019591167449951173, 0.018895904541015626, 0.018803680419921875, 0.019003040313720704, 0.01874668884277344, 0.01877244758605957, 0.018503679275512695, 0.018378656387329103, 0.019091392517089845, 0.018430335998535156, 0.0182807674407959, 0.018231008529663088, 0.018447967529296876, 0.018268512725830077, 0.017887136459350587, 0.017776607513427734, 0.017911968231201173, 0.017868799209594728, 0.018052799224853516, 0.01802681541442871, 0.01817190361022949, 0.018191680908203126, 0.018084096908569335, 0.017959360122680665, 0.01800601577758789, 0.01778892707824707, 0.01788857650756836, 0.017738048553466796, 0.01776883125305176, 0.017974464416503907, 0.0177139835357666, 0.01769241523742676, 0.017895679473876953, 0.01825926399230957, 0.01805500793457031, 0.018047840118408202, 0.017870624542236327, 0.017922271728515626, 0.017904991149902343, 0.017827871322631837, 0.01806399917602539, 0.017987232208251953, 0.017783136367797853, 0.017889280319213868, 0.017901567459106444, 0.017820703506469728, 0.018079872131347655, 0.01815433692932129, 0.018431999206542968, 0.018777503967285156, 0.018489952087402343, 0.018366464614868162, 0.017924095153808595, 0.018054496765136718, 0.018489376068115234, 0.017975296020507812, 0.017911712646484376, 0.01790617561340332, 0.017868671417236328, 0.017977216720581054, 0.017922527313232423, 0.018126848220825196, 0.018053119659423827, 0.018192384719848635, 0.017922048568725587, 0.01794236755371094, 0.01809609603881836, 0.017957408905029296, 0.01805072021484375, 0.01802275276184082, 0.017856096267700194, 0.01789334487915039, 0.01796895980834961, 0.017937023162841795, 0.017934335708618163, 0.017991167068481445, 0.01795337677001953, 0.01816102409362793, 0.017887775421142577, 0.017934335708618163, 0.01777663993835449, 0.017887231826782226, 0.018050975799560547, 0.017864799499511717, 0.017872896194458008, 0.01784566307067871, 0.01781340789794922, 0.01777324867248535, 0.017814592361450197, 0.017758432388305663, 0.017791744232177734, 0.017778656005859376, 0.01774985694885254, 0.01795907211303711, 0.017758207321166994, 0.01779657554626465, 0.017723968505859375, 0.017743616104125975, 0.017743776321411133, 0.017987455368041992, 0.018174016952514648, 0.018034048080444336, 0.017949695587158202, 0.017854015350341798, 0.01797164726257324, 0.01797452735900879, 0.017934335708618163, 0.017924448013305665, 0.018143648147583007, 0.01806060791015625, 0.018057024002075196, 0.017982336044311525, 0.018053119659423827, 0.01807360076904297, 0.018257535934448243, 0.01821939277648926, 0.017958911895751953, 0.018074655532836915, 0.018042911529541017, 0.017871231079101563, 0.01776470375061035, 0.018016000747680665, 0.017870431900024415, 0.017902463912963868, 0.017829887390136717, 0.018037824630737304, 0.018279487609863282, 0.01809779167175293, 0.017903871536254883, 0.018326528549194337, 0.018077695846557617, 0.017960960388183594, 0.017786880493164063, 0.018042303085327147, 0.01867763137817383, 0.01801491165161133, 0.018095712661743164, 0.01808220863342285, 0.018030080795288086, 0.01804038429260254, 0.018090208053588866, 0.01827084732055664, 0.018324832916259765, 0.018485471725463866, 0.018502176284790037, 0.018423807144165038, 0.018735103607177735, 0.01863680076599121, 0.01855023956298828, 0.01867123222351074, 0.018528831481933593, 0.018590047836303712, 0.018685216903686522, 0.018616512298583986, 0.019005983352661134, 0.018783552169799805, 0.018711231231689454, 0.018775520324707032, 0.01958518409729004, 0.01886400032043457, 0.018618080139160158, 0.0184102725982666, 0.01851798439025879, 0.018489120483398437, 0.01857151985168457, 0.018372608184814454, 0.018245695114135742, 0.01839014434814453, 0.018223936080932618, 0.01815340805053711, 0.018259328842163088, 0.01801228713989258, 0.018051647186279298, 0.018150783538818358, 0.017825408935546874, 0.017724416732788087, 0.0179150390625, 0.01771001625061035, 0.01767398452758789, 0.01821897506713867, 0.017744064331054688, 0.017786880493164063, 0.01781760025024414, 0.017894655227661132, 0.018146047592163084, 0.018204959869384765, 0.01797270393371582, 0.017751359939575197, 0.01795167922973633, 0.020707328796386718, 0.018264064788818358, 0.01793164825439453, 0.018620512008666993, 0.01799900817871094, 0.01829360008239746, 0.018448543548583985, 0.018227039337158205, 0.018103424072265624, 0.018098560333251953, 0.018000383377075196, 0.017982559204101564, 0.018099103927612305, 0.01819443130493164, 0.01817190361022949, 0.01820444869995117, 0.01802262306213379, 0.017903871536254883, 0.01866691207885742, 0.01822857666015625, 0.018407903671264648, 0.01824412727355957, 0.018335744857788085, 0.0184520320892334, 0.018325952529907225, 0.018233343124389647, 0.018085023880004884, 0.018174816131591796, 0.01820467185974121, 0.01819443130493164, 0.018391040802001952, 0.01826790428161621, 0.01840937614440918, 0.018940256118774413, 0.018370559692382812, 0.018159616470336915, 0.018280448913574218, 0.019216384887695313, 0.01984511947631836, 0.018542591094970702, 0.01818227195739746, 0.01806118392944336, 0.01801625633239746, 0.01805660820007324, 0.018214879989624025, 0.018168352127075196, 0.017967199325561522, 0.018038400650024412, 0.018164096832275392, 0.01839030456542969, 0.01831190490722656, 0.018176000595092775, 0.017976863861083985, 0.018252416610717772, 0.018340768814086913, 0.01810223960876465, 0.018452800750732423, 0.01855135917663574, 0.018835615158081055, 0.01819968032836914, 0.018205503463745117, 0.0179814395904541, 0.018124160766601564, 0.017973312377929686, 0.018186784744262694, 0.01801747131347656, 0.018146879196166994, 0.01794268798828125, 0.017938432693481447, 0.018325504302978517, 0.01822496032714844, 0.01864137649536133, 0.018362207412719725, 0.01828201675415039, 0.018240127563476562, 0.01847862434387207, 0.018485279083251954, 0.018476896286010742, 0.018474687576293947, 0.018148256301879884, 0.018135040283203126, 0.018114559173583983, 0.01817804718017578, 0.01834560012817383, 0.018289024353027344, 0.018461856842041015, 0.018133119583129884, 0.01816649627685547, 0.018235263824462892, 0.018290815353393556, 0.018137088775634767, 0.018165760040283203, 0.018222240447998046, 0.018201440811157227, 0.01803878402709961, 0.017950719833374023, 0.01778835105895996, 0.01816761589050293, 0.018250463485717773, 0.018356256484985352, 0.018490560531616212, 0.01811542320251465, 0.018163679122924804, 0.018314720153808594, 0.01824412727355957, 0.01842995262145996, 0.018427328109741212, 0.018111040115356445, 0.0180644474029541, 0.01808889579772949, 0.018130815505981446, 0.0183657283782959, 0.01841219139099121, 0.018347776412963868, 0.01829318428039551, 0.018192384719848635, 0.018087615966796877, 0.018153696060180663, 0.018012256622314454, 0.01804287910461426, 0.017954208374023437, 0.01808355140686035, 0.01798780822753906, 0.017935007095336915, 0.018085344314575195, 0.018186399459838867, 0.018449024200439455, 0.018388736724853517, 0.0180031681060791, 0.018538591384887695, 0.01813055992126465, 0.018016576766967773, 0.0179138240814209, 0.017831104278564453, 0.017898399353027342, 0.01780531120300293, 0.01781350326538086, 0.018237632751464845, 0.018478143692016603, 0.01820252799987793, 0.01912918472290039, 0.018173952102661133, 0.018275903701782226, 0.018169599533081053, 0.017947328567504882, 0.017979232788085938, 0.01797711944580078, 0.017876800537109376, 0.01789334487915039, 0.018094911575317382, 0.01798940849304199, 0.018058944702148437, 0.01802681541442871, 0.018124063491821288, 0.018164447784423828, 0.017901567459106444, 0.017909183502197265, 0.017906240463256836, 0.018061311721801757, 0.01818623924255371, 0.017987583160400392, 0.01802444839477539, 0.018141183853149414, 0.018341888427734376, 0.018223104476928712, 0.018206720352172853, 0.018241312026977537, 0.018116640090942382, 0.018071744918823244, 0.018305023193359374, 0.018466079711914062, 0.018277088165283204, 0.018226495742797853, 0.018133344650268553, 0.018305152893066407, 0.018178272247314452, 0.018154720306396484, 0.01814124870300293, 0.018078432083129883, 0.01807107162475586, 0.018671295166015626, 0.018321695327758788, 0.018423583984375, 0.01832364845275879, 0.018116544723510743, 0.01820732879638672, 0.01817990493774414, 0.018170047760009765, 0.018182144165039063, 0.018128896713256838, 0.01812678337097168, 0.018272319793701173, 0.0187391357421875, 0.01832841682434082, 0.01817795181274414, 0.018417760848999022, 0.018112512588500978, 0.018013408660888672, 0.018186271667480467, 0.018137855529785155, 0.018001920700073244, 0.018593376159667968, 0.018596256256103515, 0.018431999206542968, 0.018357343673706054, 0.018335744857788085, 0.018041759490966796, 0.018457855224609375, 0.018115327835083007, 0.018028831481933592, 0.018138847351074218, 0.018316511154174805, 0.018103071212768555, 0.01825174331665039, 0.018161312103271484, 0.018166112899780273, 0.018307104110717773, 0.01815510368347168, 0.018071487426757814, 0.01817852783203125, 0.018087648391723634, 0.018169183731079102, 0.01832851219177246, 0.018102272033691406, 0.01816707229614258, 0.01890787124633789, 0.01824070358276367, 0.02099078369140625, 0.01809388732910156, 0.018192575454711913, 0.01878188705444336, 0.018635072708129884, 0.018664831161499022, 0.018244224548339842, 0.018081920623779297, 0.018204544067382814, 0.018228960037231446, 0.018197824478149414, 0.018057376861572265, 0.018120512008666993, 0.018131967544555663, 0.018067264556884767, 0.01814956855773926, 0.018171327590942383, 0.018155296325683593, 0.01842051124572754, 0.018259967803955078, 0.018331647872924805, 0.018033695220947266, 0.018245952606201172, 0.01815225601196289, 0.018147167205810548, 0.018311168670654295, 0.018132991790771484, 0.018201696395874024, 0.01820057678222656, 0.019594751358032226, 0.018059776306152343, 0.01798080062866211, 0.01792473602294922, 0.018019487380981445, 0.01804579162597656, 0.0178855037689209, 0.017843904495239257, 0.019120128631591796, 0.019312639236450196, 0.018326976776123046, 0.01862713623046875, 0.018962432861328125, 0.018361791610717774, 0.01824825668334961, 0.018104320526123048, 0.018083232879638672, 0.018060127258300782, 0.018179840087890625, 0.01807155227661133, 0.018089984893798827, 0.01804083251953125, 0.017994911193847656, 0.01830944061279297, 0.01812944030761719, 0.017989728927612306, 0.018241439819335938, 0.01820857620239258, 0.018036928176879883, 0.018163263320922853, 0.0182542724609375, 0.018163711547851562, 0.018071744918823244, 0.018136896133422852, 0.01799782371520996, 0.01811859130859375, 0.018024511337280273, 0.01801215934753418, 0.01795849609375, 0.01814067268371582, 0.01803766441345215, 0.017987455368041992, 0.01840140724182129, 0.01821696090698242, 0.017962720870971678, 0.018105791091918944, 0.01808675193786621, 0.018028543472290038, 0.01814313507080078, 0.0180296630859375, 0.01817065620422363, 0.01813088035583496, 0.017997312545776366, 0.018014015197753905, 0.017947616577148436, 0.01822492790222168, 0.018300159454345703, 0.018228191375732422, 0.018102272033691406, 0.018261920928955077, 0.01808371162414551, 0.01802672004699707]",tokens/s,55.03497173951526,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4329.836544,6253.576192,0.0,5851.05408,5850.293248,s,1,11.33033203125,11.33033203125,0.0,11.33033203125,11.33033203125,11.33033203125,11.33033203125,[11.33033203125],,kWh,0.00010916968367081381,1.2034633693470011e-05,3.2951415249997396e-05,0.0001541557326142812,,MB,4098.90816,6368.919552,0.0,5951.717376,5922.919424,s,10,1.987487030029297,0.1987487030029297,0.00044175136681290746,0.1987540512084961,0.1992450622558594,0.19936573333740235,0.1994622702026367,"[0.19848031616210937, 0.19794140625, 0.1994864044189453, 0.19838563537597656, 0.19876669311523437, 0.19921824645996095, 0.1984017333984375, 0.1991894989013672, 0.1988756866455078, 0.19874140930175782]",tokens/s,1288.058720042195,kWh,5.849412818666829e-06,6.450827106505325e-07,3.867303093840063e-06,1.0361798623157426e-05,tokens/kWh,24706135.422075227,MB,4103.00416,6383.599616,0.0,5966.39744,5922.921984,s,10,19.912346069335936,1.9912346069335936,0.0065722237768895365,1.9903682861328125,2.0012720703125,2.002472216796875,2.003432333984375,"[1.9913990478515624, 1.9926356201171875, 1.9893375244140625, 1.98461279296875, 1.985252685546875, 1.983283203125, 1.9865452880859376, 2.00367236328125, 2.00100537109375, 1.9946021728515626]",tokens/s,31.638662657142646,kWh,5.8267843823000545e-05,6.426385932732122e-06,3.830348064275957e-05,0.00010299771039849225,tokens/kWh,611664.0822039306,,s,630,19.909214776992812,0.03160192821744889,0.00041140173709075127,0.031552735328674315,0.031929820442199705,0.03217103271484375,0.03366073356628419,"[0.03238924789428711, 0.031821279525756835, 0.03166624069213867, 0.03177536010742187, 0.03151801681518555, 0.03173446464538574, 0.0318525447845459, 0.031581375122070314, 0.03174687957763672, 0.03155558395385742, 0.031643648147583005, 0.03148185539245606, 0.031891359329223636, 0.03146928024291992, 0.031272800445556644, 0.03147964859008789, 0.03162784004211426, 0.03169647979736328, 0.03170483207702637, 0.03166902351379394, 0.03167027282714844, 0.03163443183898926, 0.03169203186035156, 0.03167616081237793, 0.03174828720092773, 0.03150419235229492, 0.03186182403564453, 0.031885408401489256, 0.03167631912231445, 0.03148422431945801, 0.03132249641418457, 0.03136940765380859, 0.0312873592376709, 0.031202463150024413, 0.03167932891845703, 0.031528032302856446, 0.03396905517578125, 0.031459327697753905, 0.031088640213012695, 0.03169385528564453, 0.031230527877807616, 0.031316383361816406, 0.03147776031494141, 0.0312729606628418, 0.031850496292114255, 0.03173990440368652, 0.031088800430297853, 0.03132777595520019, 0.031606176376342776, 0.03132508850097656, 0.03189273643493652, 0.03139641571044922, 0.031344831466674807, 0.03138559913635254, 0.03136672019958496, 0.03137731170654297, 0.03178755187988281, 0.031494144439697266, 0.032020481109619144, 0.031489696502685544, 0.03155788803100586, 0.03140412712097168, 0.031327360153198244, 0.032331775665283204, 0.031440895080566404, 0.03147558403015137, 0.031666015625, 0.031547679901123046, 0.03160876846313477, 0.03188947105407715, 0.031511999130249026, 0.03156812858581543, 0.031740224838256836, 0.03167436790466309, 0.031428159713745116, 0.0314454402923584, 0.03162691116333008, 0.03167465591430664, 0.03163552093505859, 0.03144252777099609, 0.031672639846801756, 0.0317604808807373, 0.03172761535644531, 0.03167231941223145, 0.031589536666870116, 0.031720287322998045, 0.031641183853149416, 0.03160105514526367, 0.03143475151062012, 0.03175628852844238, 0.03219478225708008, 0.031691743850708005, 0.031593088150024415, 0.031494335174560545, 0.031455232620239255, 0.03134828758239746, 0.031334175109863284, 0.031392223358154295, 0.03145107269287109, 0.031418624877929686, 0.03133795166015625, 0.031801887512207035, 0.031194143295288086, 0.031331296920776366, 0.03141222381591797, 0.031364799499511715, 0.031381664276123045, 0.03150044822692871, 0.03154944038391113, 0.03140310478210449, 0.031435680389404294, 0.032196063995361325, 0.03144144058227539, 0.03180316734313965, 0.03160598373413086, 0.03135708808898926, 0.03373529434204101, 0.03186495971679688, 0.03147737693786621, 0.03150281524658203, 0.032161792755126956, 0.0315863037109375, 0.031512575149536134, 0.03148521614074707, 0.03142934417724609, 0.0317890567779541, 0.03261030578613281, 0.03157196807861328, 0.03152304077148437, 0.031487775802612306, 0.03155558395385742, 0.03118489646911621, 0.031336448669433595, 0.03454073715209961, 0.03149497604370117, 0.031547391891479495, 0.03166531181335449, 0.03147657585144043, 0.031326303482055666, 0.03139779281616211, 0.031168352127075194, 0.03124239921569824, 0.031272863388061525, 0.031778783798217775, 0.03129151916503906, 0.03134668731689453, 0.032179454803466796, 0.03139583969116211, 0.03121788787841797, 0.031492288589477536, 0.03156822395324707, 0.03142860794067383, 0.03141561508178711, 0.03132486343383789, 0.03134464073181152, 0.03155763244628906, 0.031596832275390625, 0.03153276824951172, 0.03206070327758789, 0.031500415802001955, 0.031396448135375975, 0.03148972892761231, 0.03174841690063476, 0.031547231674194334, 0.031514656066894534, 0.0317010555267334, 0.03161615943908692, 0.03172035217285156, 0.03171952056884766, 0.03148380851745605, 0.0320467529296875, 0.03154364776611328, 0.03157606315612793, 0.031702976226806644, 0.03170515251159668, 0.03164182472229004, 0.03150620841979981, 0.03205120086669922, 0.031505855560302734, 0.03137766456604004, 0.031192832946777344, 0.03120185661315918, 0.031291391372680666, 0.03145724868774414, 0.031743967056274414, 0.0312807674407959, 0.03123356819152832, 0.03136400032043457, 0.031219327926635742, 0.0321003532409668, 0.031422464370727536, 0.03140633583068848, 0.03138944053649902, 0.031281152725219724, 0.031483680725097656, 0.03132028770446777, 0.03125888061523437, 0.031170303344726563, 0.031005760192871094, 0.03114182472229004, 0.0311592960357666, 0.031011968612670898, 0.03098918342590332, 0.033535999298095705, 0.031151424407958983, 0.031189088821411134, 0.03107695960998535, 0.031105024337768555, 0.031164415359497072, 0.03104902458190918, 0.031059776306152344, 0.03092755126953125, 0.03104582405090332, 0.031307039260864256, 0.03127984046936035, 0.03150620841979981, 0.032712928771972655, 0.031868255615234375, 0.031381727218627926, 0.031457408905029294, 0.03401507186889648, 0.0316168327331543, 0.03129990386962891, 0.03142076873779297, 0.03135078430175781, 0.031346080780029296, 0.031627424240112306, 0.03160051155090332, 0.03130195236206055, 0.03129484748840332, 0.03130252838134766, 0.03147999954223633, 0.03143251228332519, 0.03132134437561035, 0.031233856201171875, 0.03202489471435547, 0.031324735641479494, 0.03128531265258789, 0.031471616744995115, 0.03143391990661621, 0.034374462127685544, 0.031378656387329104, 0.03128531265258789, 0.03130441665649414, 0.0314204158782959, 0.03128070449829102, 0.031474111557006836, 0.032016063690185545, 0.03144735908508301, 0.031367136001586915, 0.031331584930419924, 0.03147855949401855, 0.03197750473022461, 0.03153977584838867, 0.031279264450073244, 0.03136307144165039, 0.03129958343505859, 0.03140403175354004, 0.03139583969116211, 0.03133369636535645, 0.0320307502746582, 0.03131868743896484, 0.03137945556640625, 0.0312729606628418, 0.03131830406188965, 0.03208367919921875, 0.03161727905273438, 0.03151785659790039, 0.031438848495483396, 0.03125699234008789, 0.03146742439270019, 0.031432191848754884, 0.03169910430908203, 0.031783552169799806, 0.0315819206237793, 0.03143503952026367, 0.03145244789123535, 0.03204947280883789, 0.03184681510925293, 0.03175628852844238, 0.03146041679382324, 0.03164873504638672, 0.03160470390319824, 0.031766048431396486, 0.03213343811035156, 0.03163881683349609, 0.0317203197479248, 0.03253430557250977, 0.03172294425964355, 0.03170528030395508, 0.03152956771850586, 0.03254886245727539, 0.032026622772216795, 0.031238143920898437, 0.031180799484252928, 0.03124239921569824, 0.031173728942871095, 0.031244800567626952, 0.03117695999145508, 0.03172540855407715, 0.031388959884643554, 0.031150976181030274, 0.031143936157226562, 0.03130092811584473, 0.031345344543457034, 0.03142652893066406, 0.03116444778442383, 0.031117311477661135, 0.03127507209777832, 0.031211456298828124, 0.03131596755981445, 0.031233600616455078, 0.03129798316955566, 0.031205152511596678, 0.031036672592163087, 0.031877695083618166, 0.031238079071044922, 0.03127545547485352, 0.03183206367492676, 0.030879743576049806, 0.030998783111572267, 0.030886816024780273, 0.0313209285736084, 0.03109008026123047, 0.031138399124145507, 0.031238336563110352, 0.03133209609985352, 0.0319117431640625, 0.03159270477294922, 0.03141632080078125, 0.03183011245727539, 0.031726848602294924, 0.03123062324523926, 0.03125862312316895, 0.03161702346801758, 0.03140812873840332, 0.03154243278503418, 0.031224672317504882, 0.03149964714050293, 0.031556224822998045, 0.032058815002441406, 0.031224384307861328, 0.03129958343505859, 0.031264448165893556, 0.031209632873535155, 0.03127107238769531, 0.031156223297119142, 0.03125657653808594, 0.031180255889892577, 0.03107689666748047, 0.03187507247924805, 0.03105177688598633, 0.03243977737426758, 0.031130144119262695, 0.03121561622619629, 0.03105776023864746, 0.03112995147705078, 0.03139129638671875, 0.03132236862182617, 0.031389696121215824, 0.0337116813659668, 0.03155193519592285, 0.03180953598022461, 0.03150185585021972, 0.03149603271484375, 0.031505023956298825, 0.03189078330993653, 0.03163929557800293, 0.03168963241577148, 0.031703264236450195, 0.03142223930358887, 0.03160201644897461, 0.0315644474029541, 0.03175628852844238, 0.03162495994567871, 0.03153945541381836, 0.03154025650024414, 0.03149718475341797, 0.03217859268188476, 0.03183244705200195, 0.031055423736572267, 0.0311013126373291, 0.031160383224487304, 0.03100262451171875, 0.031158432006835938, 0.031243104934692383, 0.03129651260375976, 0.031342336654663086, 0.030871519088745115, 0.030935487747192382, 0.03087139129638672, 0.030980096817016602, 0.030983360290527343, 0.03101139259338379, 0.03099430465698242, 0.03127948760986328, 0.031309280395507816, 0.031748640060424806, 0.031191072463989257, 0.03176364707946777, 0.031216352462768555, 0.031342144012451174, 0.033156768798828125, 0.031357791900634764, 0.031318016052246093, 0.03122790336608887, 0.03133846473693848, 0.03190764808654785, 0.03149231910705566, 0.031406080245971676, 0.03126249694824219, 0.03192048072814942, 0.03195276832580567, 0.03171123123168945, 0.031637344360351566, 0.031969247817993166, 0.031639232635498046, 0.03182028770446777, 0.03152835273742676, 0.03162991905212403, 0.03147776031494141, 0.03158639907836914, 0.03177872085571289, 0.03158835220336914, 0.031534912109375, 0.031801536560058595, 0.03172944068908692, 0.03177084732055664, 0.03165184020996094, 0.03190169525146484, 0.031757823944091795, 0.03149081611633301, 0.03173964881896973, 0.03162467193603516, 0.031709728240966795, 0.03161494445800781, 0.03192972755432129, 0.03184854316711426, 0.03188534355163574, 0.03180803108215332, 0.03183395195007324, 0.03249929428100586, 0.03209379196166992, 0.03187167930603027, 0.03229647827148437, 0.03200454330444336, 0.03237206268310547, 0.03196537590026855, 0.03162803268432617, 0.03174928092956543, 0.03185545539855957, 0.03177267265319824, 0.03174748802185059, 0.031758943557739255, 0.03184771156311035, 0.031806175231933596, 0.03212083053588867, 0.03165369606018066, 0.03164780807495117, 0.03184448051452637, 0.03173990440368652, 0.03164159965515137, 0.03160016059875488, 0.03178339195251465, 0.03169244766235352, 0.03179692840576172, 0.03141904067993164, 0.0317174072265625, 0.031578079223632816, 0.03139529609680176, 0.03161664009094238, 0.03176950454711914, 0.031473663330078124, 0.031735807418823245, 0.03162675285339355, 0.031690816879272464, 0.0319431037902832, 0.03150028800964356, 0.03151603126525879, 0.031526912689208986, 0.0334381103515625, 0.03164172744750977, 0.031578208923339846, 0.0319180793762207, 0.03181484794616699, 0.031628095626831054, 0.031528959274291994, 0.03275571060180664, 0.031757535934448244, 0.03179513549804688, 0.031709888458251956, 0.031637535095214844, 0.03177484893798828, 0.03188531112670898, 0.03179110336303711, 0.03174393653869629, 0.03167852783203125, 0.03181977653503418, 0.03154944038391113, 0.031878656387329105, 0.03164620780944824, 0.03151180839538574, 0.03153791999816895, 0.03201228713989258, 0.03237923049926758, 0.03197830390930176, 0.03226729583740234, 0.03398617553710938, 0.03193065643310547, 0.03179680061340332, 0.03162771224975586, 0.03191168022155762, 0.032305217742919924, 0.0319073600769043, 0.03175820732116699, 0.03171350479125976, 0.03174831962585449, 0.03193276786804199, 0.03158220863342285, 0.03157811164855957, 0.031528959274291994, 0.03178700828552246, 0.031555456161499025, 0.03170688056945801, 0.03218675231933594, 0.03280831909179688, 0.03151116752624512, 0.03185417556762695, 0.03162063980102539, 0.03152288055419922, 0.032045886993408206, 0.031825599670410154, 0.031670591354370115, 0.0320552978515625, 0.03153510475158691, 0.03162521553039551, 0.031524864196777344, 0.03157196807861328, 0.03159244728088379, 0.031653663635253904, 0.031705312728881836, 0.031660032272338864, 0.03155353546142578, 0.031641504287719724, 0.032366687774658204, 0.0318026237487793, 0.031736095428466796, 0.03171958351135254, 0.03173388862609863, 0.03171142387390137, 0.03168172836303711, 0.03153113555908203, 0.03158188819885254, 0.03149651145935058, 0.031547231674194334, 0.03165065574645996, 0.0314654712677002, 0.03137945556640625, 0.0315160961151123, 0.03151663970947265, 0.031468128204345705, 0.03150351905822754, 0.03142563247680664, 0.0312619514465332, 0.0315248966217041, 0.03157244873046875, 0.03136511993408203, 0.032202720642089844, 0.03140403175354004, 0.03168841552734375, 0.03179270362854004, 0.03165462493896484, 0.03187862396240235, 0.031425151824951175, 0.031561344146728516, 0.03165350341796875, 0.03166275215148926, 0.03142201614379883, 0.03155548858642578, 0.031618719100952146, 0.031631488800048825, 0.03160550308227539, 0.03167008018493652, 0.03170528030395508, 0.03168371200561523, 0.03160972785949707, 0.03181766319274902, 0.03178825569152832, 0.03164246368408203, 0.031664127349853514, 0.03191577529907227, 0.03189545631408691, 0.031672672271728514, 0.03187004852294922, 0.031669151306152346, 0.031811359405517575, 0.031909536361694336, 0.03190227127075195, 0.032050846099853515, 0.03173999977111817, 0.03182003211975098, 0.03196723175048828, 0.03173686408996582, 0.0318002872467041, 0.03169484710693359, 0.03182159996032715, 0.03160073661804199, 0.03155161666870117, 0.03162931251525879, 0.03154934310913086, 0.031596063613891605, 0.031533632278442386, 0.03201804733276367, 0.03143075180053711, 0.03150876808166504, 0.03134668731689453, 0.031322111129760744, 0.031344512939453124, 0.03140006446838379, 0.03141567993164063, 0.031576704025268555, 0.031719423294067385, 0.03144067192077637, 0.031475936889648434, 0.031548799514770506, 0.03160716819763183, 0.03151619148254395, 0.03164384078979492, 0.031524768829345705, 0.03138009643554687]",tokens/s,31.643638739988457,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8213.446656,11251.089408,0.0,10848.567296,10616.027648,s,1,14.78121484375,14.78121484375,0.0,14.78121484375,14.78121484375,14.78121484375,14.78121484375,[14.78121484375],,kWh,0.0002216113200000071,2.4437422025175433e-05,6.652866433399726e-05,0.0003125774063591798,,MB,4051.382272,11672.61696,0.0,11255.414784,11070.470656,s,10,3.7072559814453125,0.37072559814453127,0.0016514955874063807,0.37090721130371096,0.3719229248046875,0.37241461791992186,0.3728079724121094,"[0.36627883911132814, 0.3705299377441406, 0.37290631103515626, 0.37088775634765625, 0.37029959106445315, 0.37181365966796875, 0.3712012939453125, 0.3709266662597656, 0.37174774169921876, 0.3706641845703125]",tokens/s,690.5376949454559,kWh,1.0849438628086383e-05,1.1964937800458096e-06,7.175890514370403e-06,1.9221822922502597e-05,tokens/kWh,13318195.731597653,MB,4055.629824,11674.714112,0.0,11257.511936,11070.473216,s,10,28.617343505859377,2.861734350585938,0.004188017939021872,2.8611508789062503,2.8643912841796872,2.868191125488281,2.871230998535156,"[2.85651708984375, 2.860181884765625, 2.859625732421875, 2.8588779296875, 2.857465087890625, 2.86347802734375, 2.862119873046875, 2.8635400390625, 2.863546875, 2.871990966796875]",tokens/s,22.01462200259811,kWh,8.376885722149708e-05,9.239831935810024e-06,5.579215986002944e-05,0.00014880084901733656,tokens/kWh,423384.6810421086,,s,630,28.614166778564442,0.045419312346927705,0.0004494754846813241,0.045390863418579105,0.04581254425048828,0.04596101360321045,0.04755982044219971,"[0.04830428695678711, 0.04540620803833008, 0.04493068695068359, 0.04485718536376953, 0.04479619216918945, 0.044742912292480466, 0.04469721603393555, 0.044810558319091795, 0.04508790588378906, 0.04485123062133789, 0.04474969482421875, 0.04497817611694336, 0.04544112014770508, 0.045104736328125, 0.04486380767822266, 0.04478275299072266, 0.04503577423095703, 0.0453392333984375, 0.04504576110839844, 0.04490358352661133, 0.045284191131591794, 0.04603084945678711, 0.04566339111328125, 0.04521609497070313, 0.045160961151123044, 0.04541219329833984, 0.04595119857788086, 0.04471388626098633, 0.04493046569824219, 0.04514879989624023, 0.045453311920166016, 0.04497817611694336, 0.045010433197021485, 0.04525270462036133, 0.04545577621459961, 0.0451641616821289, 0.04518739318847656, 0.04590188980102539, 0.04550409698486328, 0.045259166717529296, 0.04530147171020508, 0.045880897521972656, 0.04572246551513672, 0.04556592178344727, 0.04542051315307617, 0.04537897491455078, 0.04573238372802734, 0.0452935676574707, 0.04517679977416992, 0.04546694564819336, 0.04595990371704101, 0.045281280517578126, 0.045367294311523435, 0.04532015991210937, 0.04519670486450195, 0.04533110427856445, 0.04555513763427734, 0.04578732681274414, 0.045805919647216795, 0.04524863815307617, 0.04544063949584961, 0.04572390365600586, 0.045780990600585936, 0.04852131271362305, 0.04556579208374024, 0.04495171356201172, 0.04478771209716797, 0.044987838745117185, 0.04501497650146485, 0.0446673583984375, 0.04485062408447266, 0.04492790222167969, 0.045076286315917966, 0.04497315216064453, 0.04473334503173828, 0.04503327941894531, 0.044998302459716796, 0.04537343978881836, 0.04535145568847656, 0.04560067367553711, 0.045570144653320314, 0.0453570556640625, 0.04501913452148437, 0.04542009735107422, 0.04616236877441406, 0.045848575592041016, 0.045464736938476566, 0.0451572151184082, 0.045195262908935545, 0.04518307113647461, 0.04511449432373047, 0.04525279998779297, 0.045174751281738285, 0.04525878524780273, 0.044941921234130856, 0.04503756713867187, 0.04516454315185547, 0.04554326248168945, 0.04543401718139648, 0.045663230895996096, 0.04554342269897461, 0.04534886550903321, 0.04566377639770508, 0.0456135368347168, 0.045723648071289064, 0.04587216186523437, 0.04577788925170898, 0.04577487945556641, 0.04566742324829102, 0.045461727142333985, 0.04508902359008789, 0.04537705612182617, 0.04538252639770508, 0.04531216049194336, 0.04522991943359375, 0.04546559906005859, 0.04544326400756836, 0.04534457778930664, 0.04507638549804688, 0.04575187301635742, 0.04556451034545898, 0.045432769775390625, 0.04547993469238281, 0.045485729217529296, 0.0457874870300293, 0.04583395385742187, 0.04750140762329102, 0.04535385513305664, 0.04499216079711914, 0.0449760627746582, 0.044872097015380856, 0.044875774383544925, 0.044622879028320316, 0.044981216430664064, 0.04487977600097656, 0.04479939270019531, 0.04492489624023437, 0.04531840133666992, 0.045181407928466796, 0.04492083358764649, 0.04523212814331055, 0.04553113555908203, 0.04565606307983398, 0.04558643341064453, 0.045338623046875, 0.04547788619995117, 0.04602265548706055, 0.04574425506591797, 0.045246337890625, 0.04512102508544922, 0.045476448059082034, 0.045393310546875, 0.045070846557617186, 0.04501708984375, 0.04520307159423828, 0.044990848541259766, 0.0450334701538086, 0.045039264678955075, 0.04523379135131836, 0.04524303817749024, 0.04530380630493164, 0.04539398574829102, 0.045601951599121095, 0.04558729553222656, 0.04522393417358399, 0.04531814575195312, 0.04614553451538086, 0.045906974792480466, 0.04572835159301758, 0.04569935989379883, 0.04566819381713867, 0.045559902191162106, 0.04525475311279297, 0.04526454544067383, 0.04568310546875, 0.0454774398803711, 0.0452182731628418, 0.045264289855957034, 0.04542316818237305, 0.045297664642333986, 0.04536064147949219, 0.045427200317382815, 0.04573593521118164, 0.04583155059814453, 0.045582977294921875, 0.04548172760009766, 0.04537164688110352, 0.04590387344360351, 0.04576425552368164, 0.04764640045166016, 0.045093185424804685, 0.04489731216430664, 0.04479647827148438, 0.04505641555786133, 0.044744705200195314, 0.04465011215209961, 0.04489459228515625, 0.044994110107421874, 0.04483321762084961, 0.044655616760253904, 0.04495872116088867, 0.045363166809082034, 0.045131072998046876, 0.04508086395263672, 0.045297664642333986, 0.04568431854248047, 0.04513631820678711, 0.04511276626586914, 0.0453392333984375, 0.045541374206542966, 0.04551718521118164, 0.04516659164428711, 0.04519472122192383, 0.045453857421875, 0.045230079650878906, 0.04513801574707031, 0.04502518463134766, 0.04522358322143555, 0.045451614379882814, 0.045182430267333984, 0.04502399826049805, 0.04518608093261719, 0.04546022415161133, 0.045331489562988284, 0.045122528076171876, 0.045185089111328125, 0.04811148834228516, 0.04522921752929687, 0.045362239837646486, 0.04595833587646484, 0.04567292785644531, 0.04534700775146484, 0.045360576629638674, 0.045781566619873044, 0.045571937561035156, 0.045170848846435546, 0.04516454315185547, 0.04538982391357422, 0.04519475173950195, 0.04544681549072266, 0.04566470336914062, 0.04566672134399414, 0.04543814468383789, 0.04572153472900391, 0.04568972778320313, 0.045486080169677735, 0.04532633590698242, 0.04539788818359375, 0.04543091201782227, 0.045856769561767576, 0.045852127075195315, 0.04548662567138672, 0.04678601455688477, 0.04475913619995117, 0.04471139144897461, 0.04467366409301758, 0.04503919982910156, 0.04473523330688477, 0.04476054382324219, 0.04502582550048828, 0.04538140869140625, 0.04501478576660156, 0.04484524917602539, 0.045209888458251954, 0.04528451156616211, 0.04505072021484375, 0.04505535888671875, 0.045120094299316404, 0.04513180923461914, 0.04506623840332031, 0.045151935577392575, 0.04548230361938477, 0.04576870346069336, 0.045486080169677735, 0.04511235046386719, 0.045017440795898436, 0.04498495864868164, 0.04555311965942383, 0.045158977508544924, 0.04508169555664063, 0.04502531051635742, 0.04541526412963867, 0.04523379135131836, 0.045369728088378906, 0.0452935676574707, 0.04517027282714844, 0.045470016479492184, 0.04556198501586914, 0.045292606353759764, 0.045636512756347655, 0.04529328155517578, 0.045185310363769535, 0.04567801666259766, 0.04591788864135742, 0.04563443374633789, 0.045535232543945314, 0.045416255950927735, 0.045967552185058595, 0.045418495178222655, 0.04553468704223633, 0.04574262237548828, 0.045526302337646485, 0.04521420669555664, 0.045389503479003904, 0.045781505584716796, 0.045717281341552736, 0.04528358459472656, 0.045426334381103516, 0.04573603057861328, 0.045590782165527345, 0.04550041580200195, 0.04544716644287109, 0.04576169586181641, 0.04576752090454102, 0.04577280044555664, 0.04681523132324219, 0.045842048645019534, 0.044943550109863284, 0.0450684814453125, 0.044886016845703126, 0.04481539154052734, 0.044865535736083983, 0.04500976181030274, 0.04484505462646484, 0.045154430389404296, 0.04534272003173828, 0.0452628173828125, 0.04495724868774414, 0.045144542694091794, 0.04514806365966797, 0.04558227157592774, 0.04557020950317383, 0.04513564682006836, 0.045168094635009766, 0.04581043243408203, 0.04557209777832031, 0.04554751968383789, 0.04542649459838867, 0.04544633483886719, 0.045456382751464845, 0.04524364852905274, 0.045179393768310545, 0.04518118286132813, 0.04547772979736328, 0.045088897705078124, 0.04529359817504883, 0.04545491027832031, 0.045467552185058595, 0.04535919952392578, 0.04523628616333008, 0.04649148941040039, 0.04526953506469727, 0.04531609725952149, 0.04555788803100586, 0.04598278427124024, 0.04567942428588867, 0.04544655990600586, 0.045666431427001955, 0.04565449523925781, 0.04542259216308594, 0.0456703987121582, 0.045676544189453126, 0.04547795104980469, 0.04536313629150391, 0.04526620864868164, 0.04572438430786133, 0.045841407775878903, 0.045290496826171874, 0.0453570556640625, 0.04573084640502929, 0.04575664138793945, 0.04549299240112305, 0.04548339080810547, 0.04551129531860352, 0.04577679824829101, 0.04575651168823242, 0.045902976989746096, 0.04579008102416992, 0.04781260681152344, 0.04546355056762695, 0.04528537750244141, 0.04482457733154297, 0.04470495986938477, 0.04492780685424805, 0.04492710494995117, 0.04504358291625977, 0.04470579147338867, 0.04485887908935547, 0.04506265640258789, 0.04529151916503906, 0.04488191986083984, 0.04529558563232422, 0.04509027099609375, 0.04512825775146485, 0.045266529083251954, 0.045582752227783206, 0.04554905700683594, 0.04549052810668945, 0.045407936096191405, 0.04519164657592773, 0.045409950256347656, 0.04557398223876953, 0.04536371231079102, 0.04499456024169922, 0.04519116973876953, 0.04511334228515625, 0.045303359985351566, 0.045255104064941404, 0.045153377532958984, 0.045391902923583985, 0.045491073608398436, 0.04524236679077148, 0.04521993637084961, 0.04557609558105469, 0.04532633590698242, 0.04545273590087891, 0.04564019012451172, 0.04592851257324219, 0.045709022521972655, 0.045682945251464845, 0.045625377655029296, 0.04585647964477539, 0.04572598266601562, 0.04539507293701172, 0.045603710174560545, 0.0454447021484375, 0.04526326370239258, 0.04524358367919922, 0.045439647674560546, 0.04570272064208984, 0.045606815338134765, 0.04538643264770508, 0.04542390441894531, 0.04566425704956055, 0.045609695434570316, 0.045717601776123044, 0.04575328063964844, 0.04584908676147461, 0.04578684616088867, 0.045763328552246095, 0.04615718460083008, 0.04758367919921875, 0.045778942108154294, 0.04489011383056641, 0.04472217559814453, 0.044717281341552735, 0.04495439910888672, 0.04493856048583984, 0.04480790328979492, 0.04466787338256836, 0.04515404891967773, 0.04529587173461914, 0.045216896057128905, 0.044985214233398435, 0.045006847381591795, 0.04565401458740234, 0.04573295974731445, 0.045321247100830075, 0.04523187255859375, 0.04531577682495117, 0.04539836883544922, 0.0450601921081543, 0.04533657455444336, 0.04578236770629883, 0.04548255920410156, 0.04542438507080078, 0.04542031860351563, 0.04520198440551758, 0.04498636627197266, 0.04498339080810547, 0.04501724624633789, 0.045404769897460937, 0.04522588729858398, 0.045254913330078125, 0.045466976165771486, 0.045478721618652344, 0.04527907180786133, 0.045901630401611326, 0.04586105728149414, 0.04568473434448242, 0.04595916748046875, 0.045604705810546875, 0.04551446533203125, 0.045625377655029296, 0.045580703735351565, 0.04565366363525391, 0.0456376953125, 0.0457562255859375, 0.045347297668457034, 0.045279232025146485, 0.04522598266601562, 0.04564287948608398, 0.045385761260986326, 0.04546441650390625, 0.0456888313293457, 0.04560486221313476, 0.04540143966674805, 0.04566697692871094, 0.04596700668334961, 0.045767009735107424, 0.046088191986083986, 0.04610166549682617, 0.04579414367675781, 0.045854305267333986, 0.04833100891113281, 0.04544515228271485, 0.045192577362060546, 0.045154945373535156, 0.0448526725769043, 0.04479238510131836, 0.044776737213134764, 0.04490518569946289, 0.04517881774902344, 0.04498233413696289, 0.04500275039672851, 0.04518262481689453, 0.04525833511352539, 0.04509894561767578, 0.045370174407958985, 0.045230079650878906, 0.04554956817626953, 0.04545536041259766, 0.045123584747314455, 0.04517174530029297, 0.045781982421875, 0.04576051330566406, 0.04542668914794922, 0.04532223892211914, 0.045428737640380856, 0.045497566223144534, 0.045380382537841796, 0.04549836730957031, 0.04526694488525391, 0.04500300979614258, 0.04496063995361328, 0.0449986572265625, 0.04541846466064453, 0.04528425598144531, 0.0453221435546875, 0.0454771842956543, 0.04589852905273437, 0.04563270568847656, 0.04526918411254883, 0.045392513275146484, 0.04586454391479492, 0.045513118743896484, 0.04559830474853516, 0.045539871215820316, 0.045594497680664064, 0.04534230422973633, 0.04560240173339844, 0.04563027191162109, 0.04541439819335937, 0.045776031494140626, 0.04516540908813477, 0.04527632141113281, 0.045624160766601564, 0.04558233642578125, 0.0452784309387207, 0.04540444946289063, 0.045703678131103515, 0.046045185089111325, 0.04572774505615235, 0.04566444778442383, 0.045758113861083985, 0.04578524780273437, 0.046268383026123044, 0.046811134338378906, 0.04522188949584961, 0.045080352783203125, 0.04491846466064453, 0.045068832397460935, 0.04489731216430664, 0.04498732757568359, 0.045348831176757816, 0.04520294570922852, 0.04496559906005859, 0.04538851165771484, 0.045674625396728515, 0.045532958984375, 0.04520982360839844, 0.04527004623413086, 0.04551676940917969, 0.045369823455810546, 0.04522185516357422, 0.04580614471435547, 0.04616191864013672, 0.04594623947143555, 0.04557888031005859, 0.045649921417236325, 0.04531228637695312, 0.04554665756225586, 0.045075103759765624, 0.04520924758911133, 0.04553855895996094, 0.045342880249023436, 0.045099327087402344, 0.04544527816772461, 0.04565030288696289, 0.045765918731689455, 0.04544790267944336, 0.04532611083984375, 0.045588577270507816, 0.04580774307250977, 0.04545721435546875, 0.045571422576904295, 0.04586172866821289, 0.04584646224975586, 0.045809726715087894, 0.04618854522705078, 0.046015777587890626, 0.04588598251342774, 0.046145408630371094, 0.045676864624023435, 0.045414241790771484, 0.045698814392089844, 0.045541408538818356, 0.045676769256591795, 0.04573404693603516, 0.04576665496826172, 0.04573523330688477, 0.045717823028564454, 0.045468032836914064, 0.045581630706787106, 0.04596192169189453, 0.04592844772338867, 0.04575027084350586, 0.045991519927978515, 0.04607567977905273, 0.04611340713500976]",tokens/s,22.017066052468383,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 57452 has 14.69 GiB memory in use. Of the allocated memory 14.29 GiB is allocated by PyTorch, and 313.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4049.547264,4705.878016,0.0,4303.355904,4034.388992,s,1,10.5053056640625,10.5053056640625,0.0,10.5053056640625,10.5053056640625,10.5053056640625,10.5053056640625,[10.5053056640625],,kWh,9.060257018750993e-05,9.986510360185263e-06,2.905224546401186e-05,0.00012964132601170705,,MB,3917.53728,4764.598272,0.0,4347.396096,4202.566656,s,10,1.9243718109130858,0.19243718109130858,0.00040031148293792255,0.1924959487915039,0.1928400894165039,0.19286035232543947,0.1928765626525879,"[0.19269830322265624, 0.192880615234375, 0.19146934509277344, 0.19256825256347657, 0.1922827911376953, 0.192193603515625, 0.1927659149169922, 0.19242364501953124, 0.19225375366210937, 0.19283558654785157]",tokens/s,1330.3042507078287,kWh,5.646056747916907e-06,6.226544424135589e-07,3.746359834692209e-06,1.0015071025022675e-05,tokens/kWh,25561476.235204272,MB,3921.03936,4764.598272,0.0,4347.396096,4213.08416,s,10,16.147903686523435,1.6147903686523435,0.010170812412002548,1.6120728759765623,1.624737951660156,1.6318188049316407,1.6374834875488282,"[1.638899658203125, 1.6087904052734374, 1.6083995361328125, 1.6195789794921875, 1.6172752685546874, 1.6153553466796875, 1.6231644287109375, 1.60288037109375, 1.60859912109375, 1.6049605712890624]",tokens/s,39.01435209362683,kWh,4.734075092541211e-05,5.22142910041727e-06,3.1445140540706935e-05,8.400732056653631e-05,tokens/kWh,749934.6434945763,,s,630,16.145550596237186,0.02562785808926537,0.0004553077857957729,0.025504623413085938,0.026056082916259767,0.02633349266052246,0.02722982696533203,"[0.026589183807373046, 0.025815040588378906, 0.02573244857788086, 0.025702688217163087, 0.025694591522216797, 0.02572470474243164, 0.02539952087402344, 0.025491167068481445, 0.02621779251098633, 0.025868671417236328, 0.02543267250061035, 0.02533785629272461, 0.02568806457519531, 0.025462783813476563, 0.0254682559967041, 0.0257391357421875, 0.025658143997192382, 0.0263404483795166, 0.02573807907104492, 0.027848480224609375, 0.026476831436157228, 0.026202112197875976, 0.02614886474609375, 0.026212352752685547, 0.026259456634521484, 0.026279552459716797, 0.025964544296264647, 0.026038623809814452, 0.026210336685180663, 0.02632499122619629, 0.02595430374145508, 0.02584774398803711, 0.02597887992858887, 0.02582943916320801, 0.025824735641479492, 0.025866783142089844, 0.025825056076049804, 0.025999584197998048, 0.025948160171508788, 0.02591948890686035, 0.025903104782104492, 0.025968639373779297, 0.026179584503173828, 0.02637824058532715, 0.026036191940307617, 0.02601091194152832, 0.025989343643188476, 0.026505760192871094, 0.02764912033081055, 0.025809823989868166, 0.02602969551086426, 0.02605708885192871, 0.025931776046752928, 0.026179584503173828, 0.02591744041442871, 0.02668339157104492, 0.02613043212890625, 0.02611199951171875, 0.026055999755859375, 0.025881280899047853, 0.02599635124206543, 0.02564352035522461, 0.025571775436401368, 0.026587583541870116, 0.02582486343383789, 0.02576742362976074, 0.025483776092529296, 0.025415103912353517, 0.025228256225585936, 0.025274368286132814, 0.025314847946166993, 0.02549193572998047, 0.02556723213195801, 0.025480480194091798, 0.025481952667236327, 0.025643007278442383, 0.025507776260375977, 0.025473087310791016, 0.026206207275390626, 0.025452415466308595, 0.02530726432800293, 0.025413631439208984, 0.02539708709716797, 0.025338016510009765, 0.025304224014282225, 0.025475936889648436, 0.025117727279663087, 0.026063711166381835, 0.02570863914489746, 0.02526620864868164, 0.02524278450012207, 0.025313791275024415, 0.025565088272094725, 0.025397695541381837, 0.02532691192626953, 0.025327775955200197, 0.02535593605041504, 0.025484159469604493, 0.02586128044128418, 0.02570460891723633, 0.025830080032348633, 0.025464544296264647, 0.025532384872436524, 0.025536415100097656, 0.025722431182861327, 0.025354944229125976, 0.025435359954833984, 0.025380992889404298, 0.025385791778564454, 0.025644800186157227, 0.02535215950012207, 0.025295135498046874, 0.025440256118774415, 0.025431392669677734, 0.025637535095214842, 0.02533955192565918, 0.025401695251464844, 0.02532966423034668, 0.025933439254760743, 0.02544883155822754, 0.025343360900878905, 0.02553830337524414, 0.026090368270874024, 0.026381471633911132, 0.02558652877807617, 0.025558111190795898, 0.0264769287109375, 0.025665536880493164, 0.025686016082763673, 0.025491455078125, 0.025686016082763673, 0.025700319290161134, 0.02543414306640625, 0.02531532859802246, 0.025792512893676758, 0.025527711868286132, 0.025401952743530274, 0.02535385513305664, 0.0253986873626709, 0.025498592376708984, 0.025377920150756836, 0.02534694480895996, 0.025400384902954102, 0.025410303115844725, 0.025415327072143553, 0.025379360198974608, 0.02533171272277832, 0.02549478340148926, 0.025606815338134765, 0.02598921585083008, 0.025589216232299806, 0.025600351333618165, 0.02539743995666504, 0.025501184463500977, 0.025584096908569335, 0.025355615615844727, 0.02548396873474121, 0.025433631896972658, 0.02612272071838379, 0.02529484748840332, 0.0253439998626709, 0.025233407974243165, 0.025413631439208984, 0.025268192291259765, 0.025331743240356447, 0.02530713653564453, 0.02541676712036133, 0.025379711151123047, 0.02541164779663086, 0.02560185623168945, 0.026005023956298827, 0.025600671768188477, 0.02536857604980469, 0.025378143310546875, 0.025764511108398436, 0.025444351196289062, 0.025504831314086915, 0.025561088562011718, 0.025366687774658205, 0.025547136306762697, 0.025622943878173828, 0.02550579261779785, 0.025405311584472658, 0.025349439620971678, 0.025320255279541015, 0.025366527557373047, 0.025267744064331056, 0.025335552215576172, 0.027119552612304688, 0.026318239212036132, 0.027213695526123047, 0.02544304084777832, 0.025366527557373047, 0.025384960174560548, 0.02536857604980469, 0.025767295837402344, 0.02538764762878418, 0.025568639755249024, 0.025475135803222658, 0.025337760925292968, 0.025362239837646485, 0.025514848709106447, 0.02545254325866699, 0.025581567764282227, 0.02574131202697754, 0.02588467216491699, 0.02571459197998047, 0.02575574493408203, 0.02547711944580078, 0.025452384948730467, 0.025343391418457033, 0.02656092834472656, 0.02689468765258789, 0.025868288040161135, 0.025778175354003906, 0.025503744125366212, 0.025574687957763673, 0.025655424118041992, 0.025811552047729492, 0.026191551208496092, 0.026286399841308594, 0.025761119842529295, 0.025827999114990233, 0.025823232650756835, 0.025593856811523437, 0.025616384506225585, 0.02618124771118164, 0.02700694465637207, 0.02553891181945801, 0.025655359268188477, 0.025380863189697265, 0.025407487869262696, 0.025391103744506836, 0.02549456024169922, 0.025445056915283204, 0.025748960494995116, 0.025406272888183593, 0.025397119522094728, 0.025464767456054686, 0.02541564750671387, 0.025352415084838868, 0.025761568069458007, 0.025354015350341798, 0.026095775604248046, 0.026177248001098632, 0.02576233673095703, 0.025624607086181642, 0.025531776428222658, 0.025449087142944336, 0.025624576568603515, 0.025413631439208984, 0.025608192443847655, 0.02675071907043457, 0.02598956871032715, 0.030310400009155275, 0.025858047485351563, 0.02558729553222656, 0.025467071533203125, 0.025485536575317384, 0.025384288787841797, 0.025445024490356447, 0.025347583770751952, 0.025274879455566408, 0.025436159133911132, 0.025452287673950195, 0.02528019142150879, 0.025399871826171875, 0.02533523178100586, 0.025390975952148436, 0.025157760620117188, 0.025184831619262694, 0.025266176223754884, 0.025436159133911132, 0.025335712432861326, 0.025314592361450197, 0.025396032333374022, 0.02535628890991211, 0.025636863708496094, 0.025987071990966795, 0.025448448181152345, 0.02532966423034668, 0.02530633544921875, 0.025389856338500976, 0.025222368240356445, 0.025497568130493163, 0.025276607513427734, 0.025475711822509767, 0.025452415466308595, 0.02531283187866211, 0.025459104537963868, 0.025614271163940428, 0.025426143646240233, 0.026124223709106446, 0.02723641586303711, 0.02675916862487793, 0.025991167068481445, 0.025868064880371095, 0.025888383865356444, 0.025678272247314452, 0.02558172798156738, 0.025646976470947266, 0.02561590385437012, 0.025567680358886718, 0.025552255630493164, 0.025576223373413087, 0.02634067153930664, 0.025487680435180664, 0.025561471939086915, 0.025591615676879884, 0.0253986873626709, 0.02543619155883789, 0.025682687759399414, 0.025444351196289062, 0.026024991989135743, 0.025512928009033202, 0.02669545555114746, 0.02605683135986328, 0.02576803207397461, 0.025703935623168944, 0.02565715217590332, 0.02555673599243164, 0.025610624313354494, 0.025995391845703125, 0.02545644760131836, 0.02532969665527344, 0.025810783386230468, 0.02576665687561035, 0.025858047485351563, 0.025849472045898436, 0.02694710350036621, 0.02546774482727051, 0.025486976623535156, 0.02538470458984375, 0.02548134422302246, 0.02540390396118164, 0.025462783813476563, 0.02547302436828613, 0.025575103759765624, 0.025566816329956055, 0.025518815994262697, 0.025581567764282227, 0.025548799514770508, 0.025632768630981444, 0.02549760055541992, 0.025443872451782226, 0.025458879470825195, 0.025301279067993163, 0.02535628890991211, 0.025415552139282226, 0.025534591674804687, 0.025628671646118165, 0.02550579261779785, 0.02538447952270508, 0.025745887756347657, 0.02541494369506836, 0.02569260787963867, 0.025516319274902343, 0.025427167892456054, 0.02549839973449707, 0.025655296325683592, 0.025499616622924805, 0.025731103897094727, 0.02545254325866699, 0.025495552062988282, 0.025405439376831054, 0.025284608840942382, 0.025309183120727538, 0.026177536010742186, 0.025394271850585938, 0.025395263671875, 0.02539401626586914, 0.02712166404724121, 0.026046464920043946, 0.026419008255004883, 0.0255797119140625, 0.025395200729370116, 0.025390592575073243, 0.025512447357177736, 0.02693529510498047, 0.0259849910736084, 0.025601600646972655, 0.025695968627929687, 0.025638784408569336, 0.025590240478515627, 0.02560451126098633, 0.025571231842041017, 0.02567532730102539, 0.025614463806152343, 0.025617984771728514, 0.025445215225219725, 0.02553446388244629, 0.025753599166870117, 0.02597996711730957, 0.027271583557128908, 0.025790496826171874, 0.025719295501708983, 0.025761632919311522, 0.025667295455932618, 0.02593404769897461, 0.02557734489440918, 0.025558687210083007, 0.026364448547363282, 0.02575974464416504, 0.02568822479248047, 0.02566092872619629, 0.025540159225463866, 0.025762752532958986, 0.025779264450073242, 0.02583033561706543, 0.02582022476196289, 0.026022655487060547, 0.02559814453125, 0.025526079177856445, 0.025702592849731445, 0.025618431091308593, 0.025655296325683592, 0.025609407424926758, 0.025941951751708985, 0.02565519905090332, 0.025805791854858397, 0.025837568283081053, 0.025736288070678712, 0.025781152725219726, 0.026162208557128905, 0.02577916717529297, 0.02568383979797363, 0.02566771125793457, 0.025587711334228515, 0.025600000381469725, 0.02570649528503418, 0.02590716743469238, 0.025894943237304686, 0.02565488052368164, 0.025766239166259766, 0.02563283157348633, 0.025415327072143553, 0.025416032791137695, 0.025694047927856446, 0.025466976165771486, 0.02555196762084961, 0.026119136810302736, 0.026467807769775392, 0.02559030342102051, 0.025497087478637694, 0.025626815795898438, 0.025464895248413087, 0.025384384155273436, 0.02529475212097168, 0.02526095962524414, 0.02529689598083496, 0.025296415328979492, 0.02548784065246582, 0.0252044792175293, 0.02523366355895996, 0.026271583557128907, 0.02565750312805176, 0.02589891242980957, 0.02577824020385742, 0.025497152328491212, 0.025314943313598633, 0.02517897605895996, 0.02509004783630371, 0.025184032440185546, 0.025233631134033204, 0.025143295288085937, 0.025298656463623045, 0.02542620849609375, 0.025290752410888673, 0.025231359481811523, 0.025247743606567383, 0.02512643241882324, 0.025202592849731444, 0.025135679244995116, 0.025169919967651368, 0.02517363166809082, 0.02518668746948242, 0.025468896865844727, 0.025412927627563475, 0.025277151107788085, 0.02528665542602539, 0.025744640350341796, 0.02557209587097168, 0.025423360824584962, 0.025362783432006836, 0.025297056198120116, 0.02524959945678711, 0.025221311569213867, 0.025364479064941405, 0.025279584884643554, 0.02537276840209961, 0.02549843215942383, 0.025480287551879883, 0.025495904922485352, 0.02551456069946289, 0.02547068786621094, 0.025452384948730467, 0.02557353591918945, 0.025667423248291014, 0.025577375411987305, 0.025647647857666017, 0.02575155258178711, 0.025995264053344725, 0.02569830322265625, 0.025649120330810547, 0.026170495986938477, 0.025709440231323242, 0.02531724739074707, 0.02529827117919922, 0.025836256027221678, 0.025534528732299805, 0.02530713653564453, 0.02530713653564453, 0.02583683204650879, 0.025485408782958983, 0.025414272308349608, 0.0257126407623291, 0.025298944473266603, 0.025657344818115234, 0.025587711334228515, 0.025384767532348633, 0.025523744583129882, 0.025445024490356447, 0.025534271240234375, 0.025542367935180665, 0.025450912475585938, 0.02544646453857422, 0.025421632766723632, 0.025479360580444334, 0.025502912521362303, 0.02562678337097168, 0.025623199462890624, 0.02546073532104492, 0.02531328010559082, 0.02591948890686035, 0.026048511505126954, 0.02550783920288086, 0.025405439376831054, 0.02545254325866699, 0.025290752410888673, 0.02521404838562012, 0.0252589111328125, 0.025377920150756836, 0.0253940486907959, 0.025280511856079102, 0.025290431976318358, 0.025464607238769532, 0.025293312072753905, 0.025202016830444336, 0.025417919158935546, 0.025202560424804687, 0.025324159622192383, 0.025325567245483398, 0.02592972755432129, 0.025403392791748046, 0.025319391250610352, 0.02544233512878418, 0.025380672454833983, 0.025210271835327147, 0.025170719146728516, 0.025196544647216795, 0.025285728454589845, 0.025098175048828126, 0.02517091178894043, 0.025245023727416993, 0.029876895904541016, 0.026179584503173828, 0.025563135147094726, 0.026474016189575195, 0.025577951431274414, 0.025289951324462892, 0.025190528869628907, 0.025284608840942382, 0.025413791656494142, 0.02524211120605469, 0.02519785690307617, 0.02523209571838379, 0.025389055252075195, 0.025243648529052733, 0.025185279846191407, 0.02527129554748535, 0.025521184921264647, 0.025224159240722657, 0.02567987251281738, 0.025363679885864257, 0.025815263748168945, 0.025395231246948244, 0.025444896697998046, 0.025331615447998047, 0.025393247604370117, 0.02527846336364746, 0.025194496154785157, 0.025542303085327147, 0.02571708869934082, 0.02535628890991211, 0.025410720825195313, 0.025578336715698244, 0.025325567245483398, 0.025438207626342774, 0.025341951370239257, 0.02535628890991211, 0.025384960174560548, 0.025446048736572267, 0.025383264541625976, 0.025597536087036132, 0.025844127655029296, 0.025982303619384764, 0.02550441551208496, 0.025454591751098633, 0.028104703903198244, 0.02593382453918457, 0.02533990478515625, 0.025296255111694334, 0.02536307144165039, 0.025264127731323242, 0.025266176223754884, 0.02525388717651367, 0.025288703918457032, 0.025255519866943358, 0.025239423751831056, 0.025544511795043946, 0.025366687774658205, 0.025299520492553712, 0.02525526428222656, 0.025604768753051756, 0.02560380744934082, 0.02561257553100586, 0.025417375564575195, 0.025428319931030275, 0.02534604835510254, 0.02535628890991211]",tokens/s,39.02003813650216,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4300.55424,4676.517888,0.0,4273.995776,4273.234432,s,1,10.820720703125,10.820720703125,0.0,10.820720703125,10.820720703125,10.820720703125,10.820720703125,[10.820720703125],,kWh,9.512343624586113e-05,1.0485225694639873e-05,2.847835611599625e-05,0.00013408701805649726,,MB,4254.969856,4789.764096,0.0,4372.56192,4345.727488,s,10,1.9644503326416016,0.19644503326416013,0.0008045770852380953,0.1964391174316406,0.19738758087158204,0.1977894401550293,0.1981109275817871,"[0.19536070251464843, 0.1964598388671875, 0.19641839599609376, 0.19528067016601564, 0.19729827880859374, 0.19819129943847658, 0.19622557067871094, 0.1961429443359375, 0.19659730529785155, 0.19647532653808594]",tokens/s,1303.163514731147,kWh,5.765246581454211e-06,6.357933603106546e-07,3.823619616627635e-06,1.0224659558392501e-05,tokens/kWh,25037508.441038772,MB,4261.715968,4804.44416,0.0,4387.241984,4345.730048,s,10,21.512536865234374,2.1512536865234373,0.010980271495375848,2.15154931640625,2.166158349609375,2.1687353027343748,2.170796865234375,"[2.154579345703125, 2.150176025390625, 2.14037841796875, 2.130984130859375, 2.152922607421875, 2.14452099609375, 2.165585693359375, 2.171312255859375, 2.153082763671875, 2.14899462890625]",tokens/s,29.285249059497026,kWh,6.298984471563116e-05,6.947670121471453e-06,4.1460138832571774e-05,0.00011139765366967438,tokens/kWh,565541.5345354837,,s,630,21.509618190765405,0.03414225109645298,0.000738374715947229,0.0340239200592041,0.03446051788330078,0.03507052555084228,0.03745293838500978,"[0.03487631988525391, 0.03398342514038086, 0.034319999694824216, 0.03427555084228515, 0.034092639923095705, 0.03432710266113281, 0.0364315185546875, 0.03405244827270508, 0.03415961456298828, 0.03403459167480469, 0.0341828498840332, 0.03435561752319336, 0.03417497634887695, 0.035665023803710935, 0.03515071868896484, 0.03420959854125977, 0.03441648101806641, 0.034207809448242185, 0.034545120239257814, 0.03427990341186524, 0.03414169692993164, 0.03402140808105469, 0.03403363037109375, 0.034046817779541015, 0.034085983276367186, 0.03417795181274414, 0.034045951843261715, 0.03407068634033203, 0.03413705444335938, 0.03405503845214844, 0.03409920120239258, 0.03396198272705078, 0.033965473175048826, 0.03403222274780274, 0.03368531036376953, 0.03381212615966797, 0.03388675308227539, 0.03383903884887695, 0.03396614456176758, 0.03398640060424805, 0.0339967041015625, 0.03393356704711914, 0.034045951843261715, 0.03397017669677734, 0.03394985580444336, 0.03405807876586914, 0.0342599983215332, 0.03420217514038086, 0.03416016006469726, 0.03404889678955078, 0.03403308868408203, 0.03396396636962891, 0.03404249572753906, 0.03397129440307617, 0.034270111083984374, 0.034045951843261715, 0.03402342224121094, 0.034187263488769534, 0.03410128021240234, 0.03441430282592774, 0.034533214569091794, 0.034052257537841794, 0.034251007080078125, 0.035145439147949216, 0.0340728645324707, 0.034246974945068356, 0.03398419189453125, 0.03398643112182617, 0.03414643096923828, 0.03407273483276367, 0.03426006317138672, 0.03518112182617188, 0.03406252670288086, 0.03427532958984375, 0.03394559860229492, 0.03402342224121094, 0.03401318359375, 0.033850528717041015, 0.03393990325927734, 0.033925537109375, 0.03373884963989258, 0.0337509765625, 0.033769439697265625, 0.033751041412353515, 0.03396976089477539, 0.034067935943603515, 0.03407097625732422, 0.03409766387939453, 0.033987998962402344, 0.0338089599609375, 0.034002494812011716, 0.03392515182495117, 0.03407302474975586, 0.03415193557739258, 0.03579305648803711, 0.03431238555908203, 0.034213504791259765, 0.034047615051269534, 0.03401206588745117, 0.03397449493408203, 0.03391984176635742, 0.034070911407470705, 0.03390867233276367, 0.034003585815429685, 0.033964031219482424, 0.034105342864990236, 0.03395993423461914, 0.033984481811523436, 0.033929248809814454, 0.033939712524414065, 0.03431808090209961, 0.03420163345336914, 0.03401433563232422, 0.03429462432861328, 0.034202945709228515, 0.03403193664550781, 0.034065792083740234, 0.03403696060180664, 0.03408259201049805, 0.0340398063659668, 0.03463372802734375, 0.03444323348999023, 0.03440006256103516, 0.03407455825805664, 0.034361568450927735, 0.034257217407226564, 0.03493241500854492, 0.03414287948608399, 0.03501369476318359, 0.03420630264282227, 0.034163040161132814, 0.03423846435546875, 0.03532799911499023, 0.034296897888183596, 0.033989566802978516, 0.03451599884033203, 0.03381257629394531, 0.033712577819824216, 0.03386947250366211, 0.03370051193237305, 0.03358854293823242, 0.03367168045043945, 0.03357462310791016, 0.033675647735595705, 0.03361199951171875, 0.03383075332641602, 0.03357855987548828, 0.033856094360351564, 0.03346636962890625, 0.033486591339111325, 0.033812671661376956, 0.033384513854980466, 0.0337174072265625, 0.033662910461425784, 0.03372534561157227, 0.03357820892333984, 0.033704734802246096, 0.03353129577636719, 0.033666942596435545, 0.03348758316040039, 0.03356905746459961, 0.03347222518920898, 0.03361996841430664, 0.03356585693359375, 0.03380879974365234, 0.03387046432495117, 0.034227169036865235, 0.033872737884521484, 0.03393302536010742, 0.03660201644897461, 0.03411391830444336, 0.03428326416015625, 0.0340555534362793, 0.03399948883056641, 0.03401932907104492, 0.03387142562866211, 0.03403376007080078, 0.03418313598632813, 0.033869407653808595, 0.03369244766235351, 0.03361177444458008, 0.033554431915283206, 0.033623519897460936, 0.03689731216430664, 0.0349288330078125, 0.03372768020629883, 0.03357462310791016, 0.03372943878173828, 0.033314815521240236, 0.0341195182800293, 0.03350339126586914, 0.03356467056274414, 0.033463550567626954, 0.033430431365966795, 0.033761119842529295, 0.03355033493041992, 0.03357440185546875, 0.03339699172973633, 0.03343407821655273, 0.03343942260742187, 0.03357894515991211, 0.03378752136230469, 0.03366099166870117, 0.033627998352050784, 0.033688190460205075, 0.033781185150146484, 0.03354268646240234, 0.033847328186035155, 0.03364863967895508, 0.03364483261108398, 0.033580734252929685, 0.03374208068847656, 0.033645343780517575, 0.03352166366577149, 0.033525089263916015, 0.033767166137695315, 0.033872512817382815, 0.03385308837890625, 0.033808383941650394, 0.03431388854980469, 0.03394854354858398, 0.03383305740356445, 0.033914878845214845, 0.03394131088256836, 0.034181312561035154, 0.03407654571533203, 0.034207775115966794, 0.03424800109863281, 0.03406428909301758, 0.03398649597167969, 0.03410015869140625, 0.034028640747070314, 0.0338985595703125, 0.033954654693603516, 0.03382787322998047, 0.03435779190063477, 0.033944000244140626, 0.03417001724243164, 0.03432860946655274, 0.03402441787719727, 0.034170143127441405, 0.03379667282104492, 0.03408595275878906, 0.034019489288330075, 0.0338803825378418, 0.033933792114257816, 0.03398860931396484, 0.033750526428222655, 0.03357952117919922, 0.03358310317993164, 0.0335687370300293, 0.03365871810913086, 0.034694366455078125, 0.03406454467773438, 0.0340560302734375, 0.03388905715942383, 0.03386307144165039, 0.034033985137939454, 0.03365097427368164, 0.03415824127197266, 0.03419375991821289, 0.0344637451171875, 0.03533609771728516, 0.03618825531005859, 0.03405414581298828, 0.03415859222412109, 0.033841152191162106, 0.03422617721557617, 0.033891361236572264, 0.03382985687255859, 0.03387353515625, 0.033970783233642575, 0.03436316680908203, 0.03404390335083008, 0.034187263488769534, 0.034095104217529294, 0.03401113510131836, 0.03402547073364258, 0.033915103912353514, 0.03406118392944336, 0.03404512023925781, 0.034297569274902344, 0.033978527069091796, 0.034066272735595704, 0.037556095123291014, 0.03457772827148437, 0.034132801055908206, 0.03393484878540039, 0.03446015930175781, 0.03429580688476563, 0.03414828872680664, 0.03397024154663086, 0.033810497283935544, 0.033787841796875, 0.03381024169921875, 0.03374905776977539, 0.03384656143188477, 0.03394851303100586, 0.03383091354370117, 0.034047008514404294, 0.03424764633178711, 0.033888256072998044, 0.03385548782348633, 0.03386732864379883, 0.03387187194824219, 0.033957630157470706, 0.03389440155029297, 0.03403667068481445, 0.0340948486328125, 0.03391664123535156, 0.03405366516113281, 0.034073055267333986, 0.033935905456542965, 0.035161857604980466, 0.034356670379638674, 0.0348458251953125, 0.034182014465332033, 0.03422342300415039, 0.03388896179199219, 0.033857536315917966, 0.03403760147094727, 0.033914047241210936, 0.03394249725341797, 0.03382886505126953, 0.03393324661254883, 0.03417916870117187, 0.03429724884033203, 0.03436806488037109, 0.034236415863037106, 0.034061630249023436, 0.03407321548461914, 0.03401331329345703, 0.033828800201416015, 0.03399884796142578, 0.03403936004638672, 0.03392124938964844, 0.03390262222290039, 0.033914657592773435, 0.033999073028564454, 0.034156734466552735, 0.033949695587158206, 0.03416585540771484, 0.03387894439697266, 0.03384950256347656, 0.034018783569335936, 0.03393526458740234, 0.03407497787475586, 0.03392934417724609, 0.03397814559936523, 0.03442073440551758, 0.03426950454711914, 0.0342745590209961, 0.03399744033813477, 0.03400470352172852, 0.03397257614135742, 0.034095230102539065, 0.033851230621337894, 0.0339865608215332, 0.03386368179321289, 0.0336814079284668, 0.03383840179443359, 0.03407737731933594, 0.033873920440673826, 0.033931262969970705, 0.03441388702392578, 0.03429225540161133, 0.03404201507568359, 0.03410739135742188, 0.03383705520629883, 0.03394540786743164, 0.03467305755615235, 0.033888031005859375, 0.033852928161621096, 0.034035873413085935, 0.033923198699951175, 0.03410316848754883, 0.033831520080566405, 0.03374198532104492, 0.03511702346801758, 0.03434880065917969, 0.03428348922729492, 0.03400912094116211, 0.03399654388427734, 0.03417113494873047, 0.034095104217529294, 0.034043487548828126, 0.0339439697265625, 0.03393859100341797, 0.033979232788085935, 0.034162654876708984, 0.03389404678344726, 0.03379811096191406, 0.033855552673339846, 0.03448611068725586, 0.03619686508178711, 0.03394307327270508, 0.03393580627441406, 0.034266464233398436, 0.03413910293579102, 0.034251873016357424, 0.034046142578125, 0.03401916885375977, 0.034014015197753905, 0.03451881790161133, 0.03822979354858398, 0.03407807922363281, 0.034032543182373046, 0.0343513298034668, 0.036208351135253905, 0.03474825668334961, 0.03434672164916992, 0.03425651168823242, 0.034122398376464844, 0.03409929656982422, 0.03410140609741211, 0.034125152587890624, 0.034699134826660157, 0.03428195190429688, 0.035408096313476564, 0.0342196159362793, 0.03407823944091797, 0.034143104553222656, 0.03423027038574219, 0.035213279724121097, 0.034275360107421875, 0.03409305572509766, 0.0345272331237793, 0.034219104766845705, 0.034204574584960935, 0.03427065658569336, 0.03413590240478516, 0.033876064300537106, 0.03395792007446289, 0.03371203231811523, 0.033816318511962894, 0.034380062103271485, 0.03445769500732422, 0.034172542572021486, 0.034780094146728516, 0.03592192077636719, 0.034081951141357425, 0.03476345443725586, 0.03446527862548828, 0.037200382232666016, 0.035366912841796876, 0.03457228851318359, 0.03469036865234375, 0.03464262390136719, 0.03430179214477539, 0.03434921646118164, 0.03424848175048828, 0.03430012893676758, 0.034285728454589846, 0.03416048049926758, 0.03419516754150391, 0.034371646881103515, 0.03440150451660156, 0.03437180709838867, 0.0343026237487793, 0.03424630355834961, 0.03487091064453125, 0.03426780700683594, 0.03425484848022461, 0.034353057861328126, 0.03419327926635742, 0.03437311935424805, 0.03431209564208984, 0.03419852828979492, 0.03419126510620117, 0.03419295883178711, 0.03429420852661133, 0.03411167907714844, 0.03419283294677734, 0.036659679412841796, 0.03710060882568359, 0.034122688293457035, 0.03404751968383789, 0.03402595138549805, 0.034078720092773435, 0.03386163330078125, 0.034239967346191405, 0.03393795013427734, 0.03401871871948242, 0.03421654510498047, 0.03406134414672852, 0.033969120025634764, 0.03407238388061523, 0.03398060989379883, 0.033982463836669925, 0.0346951675415039, 0.034652225494384764, 0.03447129440307617, 0.03444089508056641, 0.034360126495361326, 0.03408102416992188, 0.03429328155517578, 0.03429814529418945, 0.03420979309082031, 0.034399295806884764, 0.034517951965332035, 0.0343287353515625, 0.03441804885864258, 0.034224609375, 0.03599155044555664, 0.03444518280029297, 0.03367782211303711, 0.03362223815917969, 0.03392921447753906, 0.03391897583007813, 0.03387980651855469, 0.03383526229858398, 0.03383500671386719, 0.033775615692138675, 0.03378496170043945, 0.03377859115600586, 0.03363808059692383, 0.03368076705932617, 0.0336778564453125, 0.03387968063354492, 0.033942176818847654, 0.03398204803466797, 0.03397683334350586, 0.034045951843261715, 0.034326175689697265, 0.034132320404052736, 0.03412377548217774, 0.034084190368652345, 0.034413215637207034, 0.039298561096191405, 0.034259456634521485, 0.03511916732788086, 0.0439007682800293, 0.034154014587402345, 0.034025089263916015, 0.033769920349121095, 0.033659393310546876, 0.03346185684204102, 0.03341353607177734, 0.033390560150146485, 0.033368095397949216, 0.033271808624267575, 0.03352278518676758, 0.03342019271850586, 0.033296382904052735, 0.0335432014465332, 0.03379846572875977, 0.03361449432373047, 0.03341721725463867, 0.033380352020263675, 0.03344591903686524, 0.03359126281738281, 0.033467967987060546, 0.033683902740478514, 0.03559628677368164, 0.034039680480957034, 0.03431568145751953, 0.03403142547607422, 0.03395062255859375, 0.0384532470703125, 0.03382588958740235, 0.03370073699951172, 0.033763393402099606, 0.03372233581542969, 0.03397046279907227, 0.03383238220214844, 0.03396819305419922, 0.033968353271484376, 0.03462332916259766, 0.033976318359375, 0.03397564697265625, 0.03399884796142578, 0.03453728103637695, 0.034204574584960935, 0.033984447479248045, 0.03402288055419922, 0.03396252822875977, 0.034288928985595706, 0.03414614486694336, 0.03402998352050781, 0.03386140823364258, 0.03394796752929687, 0.03411391830444336, 0.03401728057861328, 0.0385167350769043, 0.03390985488891601, 0.03392761611938477, 0.03374335861206055, 0.03381142425537109, 0.03387670516967774, 0.03383868789672852, 0.03408761596679687, 0.03384524917602539, 0.03387801742553711, 0.033964031219482424, 0.034301952362060545, 0.03401523208618164, 0.03426508712768555, 0.03428694534301758, 0.03411011123657227, 0.03413148880004883, 0.034046432495117185, 0.03419744110107422, 0.034006526947021484, 0.03394198226928711, 0.03400495910644531, 0.033955615997314455, 0.03400534439086914, 0.03390873718261719, 0.03385343933105469, 0.033882110595703126, 0.03378364944458008, 0.03378169631958008, 0.03398883056640625, 0.03816463851928711, 0.033879905700683596, 0.03381248092651367, 0.033683006286621096, 0.033581504821777346, 0.03349689483642578, 0.03365631866455078, 0.03361248016357422, 0.0336445426940918, 0.033815616607666014, 0.03382921600341797, 0.033960670471191404, 0.03389139175415039, 0.03389836883544922, 0.03399916839599609, 0.03422809600830078, 0.03400518417358398]",tokens/s,29.289222821745618,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 4 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 28.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 190475 has 14.74 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 80.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1349, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1142, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 868, in forward hidden_states = self.mlp(hidden_states) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 781, in forward final_hidden_states.index_add_(0, top_x, current_hidden_states.to(hidden_states.dtype)) RuntimeError: CUDA error: invalid configuration argument CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1584.623616,1596.915712,0.0,1201.668096,1189.151232,s,1,8.3438662109375,8.3438662109375,0.0,8.3438662109375,8.3438662109375,8.3438662109375,8.3438662109375,[8.3438662109375],,kWh,3.5593928066667024e-05,3.918479009886834e-06,1.0983897675998638e-05,5.04963047525525e-05,,MB,1721.929728,1794.048,0.0,1384.12032,1351.367168,s,10,0.41199757003784176,0.04119975700378418,0.00014483756846233586,0.041238176345825196,0.04129707107543945,0.04141133575439453,0.04150274749755859,"[0.04152560043334961, 0.041245952606201175, 0.04123302459716797, 0.04099168014526367, 0.041271678924560545, 0.04107120132446289, 0.04107059097290039, 0.04125964736938476, 0.041243328094482425, 0.04108486557006836]",tokens/s,6213.628880784091,kWh,1.24244321833335e-06,1.370178162340395e-07,8.233245357276679e-07,2.2027855702950574e-06,tokens/kWh,116216486.7303491,MB,1731.1744,1835.99104,0.0,1426.06336,1407.548416,s,10,10.576904418945313,1.0576904418945312,0.005352611792629986,1.0581462402343749,1.062176708984375,1.0641183227539062,1.0656716137695312,"[1.0660599365234376, 1.0612806396484376, 1.057637939453125, 1.04605126953125, 1.058654541015625, 1.055569580078125, 1.0571251220703124, 1.0617452392578124, 1.051779052734375, 1.0610010986328124]",tokens/s,59.5637414356838,kWh,3.0963543785417386e-05,3.414185807765365e-06,1.5384993986472118e-05,4.976272357965487e-05,tokens/kWh,1266007.8763405364,,s,630,10.574133043289184,0.016784338163951085,0.00033400692043555826,0.016730128288269044,0.017058774185180665,0.01727484941482544,0.01835901706695557,"[0.017328544616699217, 0.017398239135742188, 0.019746816635131836, 0.017338048934936522, 0.017328447341918945, 0.017270784378051757, 0.017137664794921875, 0.017006591796875, 0.017165567398071287, 0.017231744766235353, 0.017054559707641602, 0.018394336700439454, 0.01705196762084961, 0.016991968154907226, 0.016888799667358397, 0.01684432029724121, 0.016972192764282225, 0.016801664352416992, 0.016859136581420898, 0.016817760467529298, 0.01684115219116211, 0.01670550346374512, 0.016723968505859374, 0.01664169692993164, 0.016574560165405275, 0.016689407348632813, 0.016590368270874022, 0.016595424652099608, 0.016736255645751954, 0.016760576248168946, 0.016652671813964844, 0.01663167953491211, 0.01663801574707031, 0.01651456069946289, 0.016541280746459962, 0.016528480529785155, 0.016549055099487304, 0.016645952224731444, 0.016554752349853517, 0.01653555107116699, 0.016547840118408205, 0.016520736694335937, 0.016498687744140626, 0.01650326347351074, 0.016541696548461913, 0.01648227119445801, 0.016515104293823243, 0.016457696914672852, 0.016565439224243163, 0.018501760482788086, 0.01876652717590332, 0.016746623992919922, 0.016576416015625, 0.016617151260375978, 0.016764863967895508, 0.016619903564453125, 0.01661302375793457, 0.016699743270874024, 0.017123327255249024, 0.017155519485473635, 0.016878143310546875, 0.01692006492614746, 0.0168985595703125, 0.016728384017944336, 0.016622976303100587, 0.01651161575317383, 0.01637990379333496, 0.016433151245117187, 0.01636672019958496, 0.01643814468383789, 0.01662156867980957, 0.016702720642089844, 0.01674515151977539, 0.01691372871398926, 0.0167063045501709, 0.016736480712890626, 0.01679952049255371, 0.016730112075805666, 0.01668016052246094, 0.016747295379638674, 0.016592256546020506, 0.016581087112426757, 0.01652956771850586, 0.01678473663330078, 0.01681475257873535, 0.016930816650390625, 0.016777215957641603, 0.017083423614501953, 0.01678428840637207, 0.016756383895874024, 0.01665884780883789, 0.016875520706176757, 0.01662156867980957, 0.016691200256347655, 0.016614463806152342, 0.016645055770874023, 0.017143808364868163, 0.01717647933959961, 0.01663542366027832, 0.016659008026123048, 0.016740447998046876, 0.016750495910644533, 0.01670924758911133, 0.0169168643951416, 0.016764928817749023, 0.016730112075805666, 0.016912128448486326, 0.01686070442199707, 0.01667555236816406, 0.01684819221496582, 0.01668908882141113, 0.016730880737304686, 0.01666633605957031, 0.016752927780151368, 0.016631200790405275, 0.01797590446472168, 0.016836063385009765, 0.016726560592651367, 0.019449567794799803, 0.018272544860839842, 0.016942399978637696, 0.017070783615112304, 0.01716223907470703, 0.017334112167358397, 0.016857536315917968, 0.016805599212646485, 0.017108991622924806, 0.016888896942138673, 0.016927679061889647, 0.016903615951538085, 0.01703993606567383, 0.017079296112060546, 0.016873855590820313, 0.016760799407958986, 0.016554079055786132, 0.016622047424316406, 0.01680803108215332, 0.016699392318725585, 0.01674822425842285, 0.016685375213623045, 0.016904447555541994, 0.016625408172607423, 0.016751808166503908, 0.016812864303588866, 0.016680448532104493, 0.016653888702392577, 0.016587455749511718, 0.016648448944091798, 0.01679769515991211, 0.016662080764770507, 0.016693695068359375, 0.0166297607421875, 0.01663385581970215, 0.016570367813110352, 0.016699392318725585, 0.016598304748535155, 0.016769376754760743, 0.016673152923583984, 0.016644096374511717, 0.016539392471313478, 0.016615680694580078, 0.01653555107116699, 0.017320032119750976, 0.01705743980407715, 0.016765024185180662, 0.016692480087280272, 0.016739519119262695, 0.016801504135131835, 0.016760671615600586, 0.0166810245513916, 0.016773216247558592, 0.016748544692993163, 0.017016576766967773, 0.017344768524169923, 0.016951295852661134, 0.016928768157958983, 0.016832063674926758, 0.01681171226501465, 0.016934783935546874, 0.016954496383666993, 0.01690188789367676, 0.017088064193725584, 0.01713190460205078, 0.01683990478515625, 0.016741216659545897, 0.01664204788208008, 0.016516319274902345, 0.016431903839111327, 0.016578527450561525, 0.016645023345947266, 0.016582847595214844, 0.016561983108520507, 0.01684889602661133, 0.016512672424316407, 0.016559616088867187, 0.016452287673950194, 0.01645756721496582, 0.01655958366394043, 0.016626527786254883, 0.016451583862304688, 0.016530815124511718, 0.016783872604370118, 0.016490623474121092, 0.016556032180786134, 0.01645952033996582, 0.016435136795043947, 0.016539520263671875, 0.016550336837768555, 0.016581888198852538, 0.016547935485839844, 0.016520095825195313, 0.01647385597229004, 0.016523103713989257, 0.016455743789672853, 0.016530784606933593, 0.01644825553894043, 0.016472063064575194, 0.016457727432250976, 0.01657241630554199, 0.01646905517578125, 0.01728595161437988, 0.016627840042114258, 0.016590560913085937, 0.016470176696777344, 0.016766176223754883, 0.0165948486328125, 0.016559104919433593, 0.016447488784790038, 0.016590719223022462, 0.016558271408081054, 0.01665017509460449, 0.016562175750732423, 0.016570367813110352, 0.016554079055786132, 0.01651702308654785, 0.0164454402923584, 0.016453632354736326, 0.016493791580200194, 0.01653430366516113, 0.016523263931274415, 0.016559232711791994, 0.0165382080078125, 0.016419103622436523, 0.016434656143188477, 0.016575008392333983, 0.01659903907775879, 0.016850944519042968, 0.01692576026916504, 0.016927391052246093, 0.01716409683227539, 0.017289695739746095, 0.017037120819091797, 0.017079456329345703, 0.016754400253295897, 0.016631200790405275, 0.01657494354248047, 0.016573856353759766, 0.01667363166809082, 0.0166759033203125, 0.016702272415161132, 0.01671548843383789, 0.016795936584472655, 0.016744447708129884, 0.01671891212463379, 0.01672492790222168, 0.01665433692932129, 0.016670719146728515, 0.016861183166503906, 0.016855039596557618, 0.01683660888671875, 0.01676288032531738, 0.016762432098388673, 0.01680019187927246, 0.01683251190185547, 0.01688800048828125, 0.016942047119140626, 0.01675734329223633, 0.017033472061157226, 0.016856639862060548, 0.016818239212036134, 0.016914815902709962, 0.017098623275756834, 0.016900224685668944, 0.01684480094909668, 0.01678745651245117, 0.016787391662597656, 0.016863296508789063, 0.01679155158996582, 0.01699839973449707, 0.016766752243041992, 0.01661302375793457, 0.016728639602661133, 0.016898048400878905, 0.016931039810180664, 0.01682377624511719, 0.017117504119873048, 0.016855039596557618, 0.016854944229125975, 0.016803936004638673, 0.016902143478393555, 0.016943103790283204, 0.01705369567871094, 0.016817983627319337, 0.016791872024536133, 0.016781183242797853, 0.016744447708129884, 0.016693248748779296, 0.01676892852783203, 0.01670751953125, 0.01673846435546875, 0.01676697540283203, 0.016703487396240235, 0.016596607208251953, 0.016658176422119142, 0.01666227149963379, 0.01702230453491211, 0.0168799991607666, 0.01670172882080078, 0.01679555130004883, 0.016953407287597658, 0.016898080825805663, 0.016985952377319338, 0.016903392791748045, 0.016772031784057617, 0.016693248748779296, 0.01660927963256836, 0.0165928955078125, 0.016532512664794923, 0.016659423828125, 0.01661747169494629, 0.016900096893310547, 0.016664575576782227, 0.016639488220214844, 0.016574464797973632, 0.016961887359619142, 0.016873632431030273, 0.016631807327270508, 0.016727584838867188, 0.016798240661621094, 0.016813568115234375, 0.01678585624694824, 0.01679897689819336, 0.01671855926513672, 0.016750272750854493, 0.01678927993774414, 0.01684867286682129, 0.01686387252807617, 0.016807903289794923, 0.016863040924072266, 0.016690784454345704, 0.016626495361328125, 0.016631391525268553, 0.01653798484802246, 0.016498687744140626, 0.016488447189331054, 0.01646329689025879, 0.0164913272857666, 0.016444320678710937, 0.016633760452270507, 0.016689407348632813, 0.01655171203613281, 0.016547935485839844, 0.01662211227416992, 0.016566560745239257, 0.016674623489379883, 0.016979232788085937, 0.017308576583862305, 0.016637727737426757, 0.01656787109375, 0.016578943252563476, 0.016628000259399416, 0.016543807983398436, 0.016602880477905275, 0.01651321601867676, 0.016502784729003905, 0.016477888107299804, 0.01911020851135254, 0.017278175354003906, 0.016667776107788086, 0.016669536590576173, 0.01653334426879883, 0.01648044776916504, 0.016506879806518555, 0.016483999252319335, 0.0165382080078125, 0.016617216110229493, 0.016639999389648438, 0.01660723114013672, 0.01659903907775879, 0.016506879806518555, 0.01652115249633789, 0.01674025535583496, 0.01664620780944824, 0.01660323143005371, 0.01649446487426758, 0.016496768951416017, 0.01658233642578125, 0.016546112060546875, 0.01665023994445801, 0.016553216934204102, 0.016547584533691408, 0.016550943374633788, 0.01783724784851074, 0.017103616714477538, 0.0185262393951416, 0.016793600082397463, 0.016610912322998047, 0.016648607254028322, 0.01658060836791992, 0.016700576782226563, 0.016797664642333985, 0.016843648910522462, 0.01681203269958496, 0.016699392318725585, 0.016742399215698242, 0.016920576095581053, 0.01680384063720703, 0.01670966339111328, 0.016764543533325196, 0.016926752090454102, 0.017097024917602538, 0.017333343505859376, 0.016874399185180664, 0.01700864028930664, 0.016750175476074217, 0.01687183952331543, 0.016852447509765625, 0.01687606430053711, 0.01675596809387207, 0.01668118476867676, 0.016637760162353514, 0.016626399993896486, 0.016785408020019533, 0.017003807067871093, 0.01672643280029297, 0.01680732727050781, 0.016868255615234376, 0.016965375900268555, 0.016799936294555663, 0.016928831100463868, 0.01697996711730957, 0.016926752090454102, 0.016794143676757814, 0.01716806411743164, 0.017211103439331056, 0.017242303848266603, 0.01677676773071289, 0.01679657554626465, 0.01689948844909668, 0.01687196731567383, 0.016846847534179688, 0.016836063385009765, 0.016726335525512694, 0.016909664154052734, 0.016792512893676757, 0.016822208404541017, 0.016723775863647462, 0.016558271408081054, 0.016999679565429686, 0.01686809539794922, 0.01678745651245117, 0.01687481689453125, 0.016824287414550783, 0.01672435188293457, 0.01664601516723633, 0.016613439559936525, 0.016730144500732423, 0.01661929512023926, 0.016646751403808592, 0.016504480361938478, 0.01646134376525879, 0.01653446388244629, 0.01653708839416504, 0.016662912368774416, 0.016721920013427736, 0.016674783706665038, 0.016619455337524413, 0.016899391174316405, 0.0174021110534668, 0.016900640487670898, 0.017296672821044922, 0.017191648483276367, 0.01679769515991211, 0.016891904830932617, 0.01687731170654297, 0.016957696914672853, 0.016941055297851563, 0.016852991104125976, 0.016649375915527342, 0.016585151672363282, 0.016784000396728515, 0.016728992462158202, 0.0168723201751709, 0.017110015869140623, 0.017101472854614257, 0.016959840774536133, 0.01683456039428711, 0.016924671173095703, 0.017020256042480468, 0.01702355194091797, 0.016892000198364256, 0.016748544692993163, 0.01686028861999512, 0.017437568664550783, 0.01700454330444336, 0.016893888473510744, 0.01685264015197754, 0.01695568084716797, 0.01704560089111328, 0.016857120513916017, 0.016803647994995115, 0.016771263122558593, 0.016724319458007814, 0.01665603256225586, 0.016633216857910155, 0.016680864334106444, 0.017133440017700195, 0.016792352676391602, 0.016750656127929687, 0.01661747169494629, 0.016672767639160157, 0.01676643180847168, 0.016740896224975585, 0.016767999649047852, 0.016731136322021483, 0.017012224197387696, 0.01703558349609375, 0.01695120048522949, 0.017004352569580078, 0.017011167526245118, 0.01684480094909668, 0.01672208023071289, 0.016693088531494142, 0.016574304580688478, 0.01651113510131836, 0.016521472930908204, 0.016649984359741212, 0.016590368270874022, 0.01681046485900879, 0.01650908851623535, 0.016848735809326172, 0.016594944000244142, 0.016547840118408205, 0.016518592834472656, 0.0165545597076416, 0.016637535095214845, 0.016559871673583984, 0.017420448303222657, 0.016605247497558595, 0.016597087860107423, 0.016499040603637695, 0.01647542381286621, 0.01646870422363281, 0.016441343307495117, 0.016597024917602538, 0.01649660873413086, 0.016488544464111327, 0.016578079223632813, 0.016577024459838868, 0.01661939239501953, 0.01639833641052246, 0.0163918399810791, 0.016468320846557617, 0.01642905616760254, 0.016451040267944337, 0.016513792037963868, 0.016477983474731447, 0.016530048370361327, 0.016579999923706054, 0.01673916816711426, 0.016836639404296874, 0.016770687103271485, 0.016836896896362304, 0.016791072845458985, 0.016822816848754883, 0.01683977508544922, 0.01680476760864258, 0.016692672729492188, 0.016716352462768556, 0.016789535522460937, 0.016601055145263673, 0.01658470344543457, 0.016422592163085937, 0.016568639755249023, 0.01692451286315918, 0.016436672210693358, 0.017430944442749022, 0.0164150390625, 0.016357152938842774, 0.016462047576904296, 0.0166748161315918, 0.0167587833404541, 0.01715814399719238, 0.01707827186584473, 0.017126848220825195, 0.017044031143188476, 0.017405120849609376, 0.017578784942626952, 0.01694108772277832, 0.01699839973449707, 0.016914560317993165, 0.016905567169189454, 0.016779808044433592, 0.016989343643188475, 0.017310560226440428, 0.016952352523803713, 0.016819168090820312, 0.016731487274169923, 0.016722591400146484, 0.016719104766845704, 0.016609727859497072, 0.01663724708557129, 0.016616191864013672, 0.01661516761779785, 0.01656268882751465, 0.01658880043029785, 0.016766656875610353, 0.016654560089111328, 0.016663904190063476, 0.016601856231689454, 0.016633087158203125, 0.016589183807373047, 0.01680175971984863, 0.018142656326293947, 0.018082239151000976, 0.01688425636291504, 0.01669113540649414, 0.016951072692871095, 0.016978336334228517, 0.016882816314697267]",tokens/s,59.57935250302398,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1583.558656,1596.915712,0.0,1201.668096,1189.151232,s,1,8.3514072265625,8.3514072265625,0.0,8.3514072265625,8.3514072265625,8.3514072265625,8.3514072265625,[8.3514072265625],,kWh,3.551230208750174e-05,3.909955984242548e-06,1.0968342108001738e-05,5.039060017974603e-05,,MB,1587.183616,1794.048,0.0,1384.12032,1351.367168,s,10,0.41280178833007813,0.041280178833007816,0.0002324199835202943,0.04122892761230469,0.04140379981994628,0.04166441097259521,0.041872899894714356,"[0.04192502212524414, 0.04125302505493164, 0.04117756652832031, 0.04118697738647461, 0.04127635192871094, 0.041204830169677735, 0.0410423698425293, 0.041086944580078125, 0.04130281448364258, 0.04134588623046875]",tokens/s,6201.523521387975,kWh,1.2442479582092324e-06,1.372182518467848e-07,8.22242974578723e-07,2.20370918463474e-06,tokens/kWh,116167778.30076134,MB,1592.111104,1835.99104,0.0,1426.06336,1407.548416,s,10,10.592636230468752,1.0592636230468753,0.004145041345814589,1.0582247924804689,1.0626305419921875,1.0657195678710938,1.0681907885742188,"[1.0590074462890624, 1.05448046875, 1.061748046875, 1.06880859375, 1.0547740478515626, 1.057442138671875, 1.061944091796875, 1.0614659423828126, 1.057037109375, 1.0559283447265626]",tokens/s,59.47527945761627,kWh,3.0436203563040093e-05,3.3566527589631704e-06,1.536185330602108e-05,4.915470962802435e-05,tokens/kWh,1281667.6260881033,,s,630,10.589003307342528,0.016807941757686556,0.00032476302278186965,0.016749680519104006,0.016978409004211426,0.017119202995300294,0.017825050716400153,"[0.017150848388671876, 0.017287168502807617, 0.016746496200561522, 0.016875520706176757, 0.01684217643737793, 0.016746112823486328, 0.01671673583984375, 0.01660313606262207, 0.016697216033935546, 0.01676710319519043, 0.016852991104125976, 0.01664963150024414, 0.016642656326293945, 0.016740352630615234, 0.01662761688232422, 0.01687126350402832, 0.016965856552124025, 0.016768224716186525, 0.0168437442779541, 0.01672707176208496, 0.01672025680541992, 0.016713823318481445, 0.01672540855407715, 0.01683888053894043, 0.01675132751464844, 0.016748544692993163, 0.016778783798217775, 0.016869855880737306, 0.016906240463256835, 0.016884895324707033, 0.016795967102050783, 0.016860864639282228, 0.01676153564453125, 0.016793760299682617, 0.01678745651245117, 0.016789344787597655, 0.017066144943237306, 0.017964927673339844, 0.016765056610107423, 0.01677644729614258, 0.01664896011352539, 0.016655359268188476, 0.0167490234375, 0.016868896484375, 0.016741439819335936, 0.016914304733276368, 0.01686124801635742, 0.017123327255249024, 0.016726015090942382, 0.016764928817749023, 0.016697343826293946, 0.016793535232543944, 0.01669536018371582, 0.016729663848876954, 0.016681407928466795, 0.016717119216918944, 0.016652095794677736, 0.016714624404907227, 0.01670528030395508, 0.01675699234008789, 0.01669468879699707, 0.016810527801513674, 0.016659616470336914, 0.016749183654785157, 0.016668607711791992, 0.01669945526123047, 0.016674144744873047, 0.016882335662841797, 0.016611072540283205, 0.016762752532958985, 0.016673152923583984, 0.016633024215698244, 0.016607072830200194, 0.016788448333740234, 0.01660054397583008, 0.016798112869262697, 0.016689279556274413, 0.016690431594848634, 0.0166878719329834, 0.016842752456665038, 0.016728063583374024, 0.01683660888671875, 0.016707231521606445, 0.016718175888061522, 0.0168787841796875, 0.01697260856628418, 0.016783327102661134, 0.01677110481262207, 0.016748544692993163, 0.0167956485748291, 0.016639999389648438, 0.01669526481628418, 0.01668239974975586, 0.016749055862426757, 0.01680499267578125, 0.017029375076293946, 0.017089279174804687, 0.01679270362854004, 0.016719968795776367, 0.01685772705078125, 0.016692703247070312, 0.016610208511352538, 0.016727712631225584, 0.0165449275970459, 0.01688675117492676, 0.016852319717407225, 0.016724544525146483, 0.016691295623779297, 0.0166748161315918, 0.016713727951049806, 0.01661952018737793, 0.016750591278076172, 0.01667036819458008, 0.016617183685302735, 0.016656511306762694, 0.016629535675048827, 0.01650524711608887, 0.01678995132446289, 0.016786367416381835, 0.016861568450927733, 0.016788127899169922, 0.016697248458862304, 0.016739519119262695, 0.016595775604248048, 0.016594911575317384, 0.016773151397705077, 0.01666454315185547, 0.016793407440185548, 0.01705923271179199, 0.016935712814331056, 0.01679769515991211, 0.01662512016296387, 0.01689244842529297, 0.016794784545898438, 0.016609119415283202, 0.016607967376708985, 0.016785696029663087, 0.016838623046875, 0.016666591644287108, 0.01674799919128418, 0.01674710464477539, 0.016951295852661134, 0.017088512420654296, 0.01672563171386719, 0.016713216781616212, 0.01674025535583496, 0.016724960327148437, 0.01664780807495117, 0.01675712013244629, 0.016729663848876954, 0.01662816047668457, 0.016697343826293946, 0.016961536407470702, 0.016728063583374024, 0.016752639770507814, 0.01668057632446289, 0.016719968795776367, 0.016828447341918944, 0.01676860809326172, 0.016793695449829102, 0.016886335372924804, 0.016971967697143556, 0.017194911956787108, 0.01717033576965332, 0.016977088928222656, 0.01704140853881836, 0.0169268798828125, 0.016884479522705078, 0.016854944229125975, 0.016809120178222656, 0.016867647171020506, 0.016760671615600586, 0.01700524711608887, 0.017864704132080078, 0.017680383682250975, 0.017104032516479493, 0.017005407333374023, 0.01711916732788086, 0.01694918441772461, 0.01677939224243164, 0.016744447708129884, 0.01671507263183594, 0.01667910385131836, 0.01684940719604492, 0.016678911209106445, 0.01672985649108887, 0.01665795135498047, 0.016636640548706054, 0.016772224426269532, 0.016833536148071288, 0.016675840377807616, 0.016683008193969725, 0.016719871520996094, 0.01668819236755371, 0.016677824020385743, 0.016683008193969725, 0.016769023895263673, 0.016721920013427736, 0.016963584899902344, 0.017119232177734374, 0.016730112075805666, 0.01660416030883789, 0.0168089599609375, 0.016657888412475588, 0.016544288635253906, 0.0166014404296875, 0.016544511795043945, 0.01658358383178711, 0.016699199676513673, 0.01747577667236328, 0.020911552429199218, 0.021582239151000975, 0.01703651237487793, 0.016823232650756834, 0.016908031463623047, 0.016689407348632813, 0.016858272552490235, 0.016670751571655273, 0.016933439254760742, 0.016679168701171875, 0.016742399215698242, 0.016693248748779296, 0.01678745651245117, 0.016702592849731444, 0.0171276798248291, 0.0171812801361084, 0.016777023315429688, 0.0167174072265625, 0.016748512268066406, 0.016695968627929686, 0.016877407073974608, 0.016871583938598632, 0.01666864013671875, 0.01665363121032715, 0.016890592575073242, 0.016731584548950195, 0.01673263931274414, 0.016752735137939453, 0.016708992004394532, 0.01670412826538086, 0.016852991104125976, 0.016746496200561522, 0.01667024040222168, 0.016656448364257812, 0.016756160736083985, 0.016673215866088865, 0.016828128814697266, 0.01674671936035156, 0.01690809631347656, 0.016742687225341796, 0.0191779842376709, 0.01718003273010254, 0.01734294319152832, 0.016876575469970703, 0.01684124755859375, 0.016887231826782225, 0.016709856033325195, 0.01688991928100586, 0.01694371223449707, 0.016648191452026367, 0.016611263275146483, 0.016643295288085936, 0.016622432708740233, 0.01663795280456543, 0.01666662406921387, 0.01683865547180176, 0.016602655410766602, 0.016743104934692384, 0.016647680282592774, 0.016760671615600586, 0.016683584213256837, 0.01685286331176758, 0.016611551284790037, 0.016763776779174806, 0.016698432922363282, 0.01693270492553711, 0.016570367813110352, 0.016774560928344725, 0.016681568145751953, 0.01665843200683594, 0.016583744049072265, 0.016813119888305663, 0.01696460723876953, 0.016740447998046876, 0.01677187156677246, 0.016670719146728515, 0.016655359268188476, 0.01660211181640625, 0.016645824432373047, 0.01678163146972656, 0.016603328704833983, 0.01672502326965332, 0.016577184677124022, 0.01676915168762207, 0.016704704284667967, 0.01694921684265137, 0.016802656173706056, 0.016879039764404295, 0.016646080017089844, 0.016617311477661132, 0.016681695938110353, 0.016738367080688477, 0.01660678482055664, 0.016839040756225585, 0.01662099266052246, 0.01686300849914551, 0.01670639991760254, 0.01682841682434082, 0.016660480499267577, 0.016683008193969725, 0.016717824935913086, 0.01679120063781738, 0.016662879943847655, 0.01677107238769531, 0.016732160568237304, 0.01710095977783203, 0.017058431625366213, 0.016781312942504883, 0.016774431228637695, 0.01671446418762207, 0.01663702392578125, 0.016760992050170898, 0.016763647079467772, 0.017102848052978514, 0.016842752456665038, 0.01677836799621582, 0.0168143367767334, 0.01681878471374512, 0.016817535400390625, 0.016865951538085937, 0.01686499214172363, 0.016711519241333007, 0.017183168411254883, 0.016670719146728515, 0.01690323257446289, 0.016747264862060546, 0.016810176849365234, 0.01676803207397461, 0.016712671279907228, 0.01697996711730957, 0.016957536697387695, 0.016762176513671876, 0.01680182456970215, 0.016696895599365234, 0.016747520446777343, 0.016631807327270508, 0.016710975646972655, 0.01673664093017578, 0.016744768142700196, 0.016693248748779296, 0.016648191452026367, 0.016622751235961915, 0.01667568016052246, 0.016692703247070312, 0.016910879135131836, 0.01663795280456543, 0.016627328872680664, 0.016654720306396486, 0.01676892852783203, 0.016601184844970703, 0.016717824935913086, 0.01657241630554199, 0.016764928817749023, 0.016726015090942382, 0.01682636833190918, 0.01667430305480957, 0.01674006462097168, 0.01675254440307617, 0.01664899253845215, 0.016706911087036133, 0.01673664093017578, 0.016615680694580078, 0.01696780776977539, 0.016760639190673828, 0.01669548797607422, 0.016828479766845702, 0.017229759216308593, 0.016897247314453124, 0.01677587127685547, 0.016596960067749023, 0.016689344406127928, 0.01670515251159668, 0.016693695068359375, 0.016680831909179687, 0.01669340705871582, 0.01664614486694336, 0.016777215957641603, 0.01682841682434082, 0.016846847534179688, 0.016632863998413086, 0.016759456634521483, 0.016813535690307618, 0.016672767639160157, 0.016622432708740233, 0.01671548843383789, 0.01761065673828125, 0.016715263366699217, 0.016663103103637694, 0.0168656005859375, 0.0167521915435791, 0.016884159088134766, 0.016635967254638673, 0.01665836715698242, 0.016793600082397463, 0.016844287872314453, 0.01672038459777832, 0.016852991104125976, 0.016737823486328126, 0.01661952018737793, 0.01662828826904297, 0.016746463775634764, 0.01668835258483887, 0.017320415496826173, 0.016793855667114256, 0.016687103271484375, 0.01721958351135254, 0.01683443260192871, 0.0166843204498291, 0.016620384216308594, 0.01677516746520996, 0.016807903289794923, 0.016643392562866212, 0.016706079483032227, 0.016982175827026366, 0.016803871154785155, 0.016752639770507814, 0.016772928237915038, 0.016718015670776368, 0.0168571834564209, 0.017033119201660157, 0.017881088256835938, 0.018191904067993165, 0.0169783992767334, 0.01695692825317383, 0.016949535369873047, 0.016873695373535155, 0.016829504013061523, 0.016686016082763672, 0.01683033561706543, 0.017127552032470704, 0.017394943237304686, 0.017176448822021486, 0.017006528854370116, 0.01684294319152832, 0.01690006446838379, 0.01672400093078613, 0.0169998722076416, 0.016795423507690428, 0.016676927566528322, 0.016777952194213866, 0.016774784088134764, 0.01687923240661621, 0.016631776809692383, 0.017727968215942382, 0.01667308807373047, 0.01681203269958496, 0.017625087738037108, 0.01705366325378418, 0.016821823120117187, 0.016922592163085937, 0.016652351379394532, 0.01666502380371094, 0.016693248748779296, 0.016700864791870117, 0.016702016830444335, 0.01684889602661133, 0.017160192489624023, 0.01699190330505371, 0.016968032836914063, 0.017003711700439454, 0.016975744247436524, 0.017044416427612306, 0.017059743881225584, 0.016730207443237305, 0.01676233673095703, 0.016636447906494142, 0.016961088180541994, 0.016913888931274414, 0.016587232589721678, 0.016717727661132813, 0.016880224227905274, 0.01683456039428711, 0.01679974365234375, 0.016664575576782227, 0.016773311614990235, 0.01680899238586426, 0.016783456802368164, 0.016667327880859374, 0.016664575576782227, 0.01677926445007324, 0.016685056686401366, 0.016662687301635743, 0.016770591735839845, 0.01677343940734863, 0.016828191757202147, 0.016908607482910155, 0.017033119201660157, 0.016707584381103514, 0.016715776443481444, 0.016714048385620118, 0.01665158462524414, 0.01660713577270508, 0.016664768218994142, 0.017252191543579102, 0.01720966339111328, 0.016949247360229493, 0.01682841682434082, 0.016717824935913086, 0.01666819190979004, 0.016654367446899413, 0.016667072296142577, 0.016772159576416014, 0.01681056022644043, 0.016847232818603515, 0.016743488311767578, 0.016679040908813475, 0.017023807525634767, 0.01680588722229004, 0.016726015090942382, 0.01677107238769531, 0.016639135360717774, 0.016784223556518554, 0.016730112075805666, 0.01674425506591797, 0.016766176223754883, 0.01685321617126465, 0.01687424087524414, 0.01676288032531738, 0.016732160568237304, 0.0167956485748291, 0.01671513557434082, 0.016804479598999025, 0.016713247299194336, 0.01679408073425293, 0.016701440811157226, 0.016728063583374024, 0.016709632873535156, 0.01677107238769531, 0.01660723114013672, 0.016742399215698242, 0.016676864624023437, 0.016877567291259766, 0.016859136581420898, 0.016898048400878905, 0.016685056686401366, 0.01665433692932129, 0.01675382423400879, 0.016724191665649413, 0.01686140823364258, 0.016789920806884767, 0.016644096374511717, 0.016770143508911133, 0.016892831802368165, 0.01674985694885254, 0.017081056594848633, 0.01682431983947754, 0.01664614486694336, 0.016633216857910155, 0.01665292739868164, 0.01663385581970215, 0.016573728561401366, 0.01682896041870117, 0.01662585639953613, 0.0169716796875, 0.016633567810058595, 0.016820512771606445, 0.01674665641784668, 0.016906240463256835, 0.016701440811157226, 0.01684000015258789, 0.016886463165283205, 0.01701478385925293, 0.016760831832885743, 0.016752639770507814, 0.016764928817749023, 0.017043455123901367, 0.016883712768554687, 0.01684480094909668, 0.016752511978149413, 0.01679372787475586, 0.01666646385192871, 0.016623008728027345, 0.01678976058959961, 0.016808448791503908, 0.01679769515991211, 0.016678911209106445, 0.01667635154724121, 0.0170644474029541, 0.016901567459106447, 0.01697849655151367, 0.016856992721557617, 0.01664771270751953, 0.016756767272949218, 0.01664259147644043, 0.016928096771240235, 0.01677788734436035, 0.01668611145019531, 0.016563167572021486, 0.016688671112060547, 0.016616992950439453, 0.01663702392578125, 0.01674950408935547, 0.016690080642700195, 0.016643552780151366, 0.01669174385070801, 0.01666662406921387, 0.01667433547973633, 0.016650880813598633, 0.01671561622619629, 0.016606239318847655, 0.016683584213256837, 0.016916223526000976, 0.01692972755432129, 0.016776512145996094, 0.01671824073791504, 0.01665433692932129, 0.016695295333862305, 0.016760831832885743, 0.016801599502563477, 0.016659744262695314, 0.016705535888671876, 0.01676585578918457, 0.016900096893310547, 0.016668031692504883, 0.016681215286254884, 0.016628095626831055, 0.01664204788208008, 0.01660259246826172, 0.016660192489624023, 0.016614208221435545]",tokens/s,59.49568450537277,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 20887 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,6765.789184,7762.542592,0.0,7367.294976,7351.94368,s,1,12.5058095703125,12.5058095703125,0.0,12.5058095703125,12.5058095703125,12.5058095703125,12.5058095703125,[12.5058095703125],,kWh,0.00015767173855417126,1.738523505041432e-05,5.054559599200134e-05,0.00022560256959658692,,MB,1635.319808,8404.271104,0.0,7994.343424,7863.794176,s,10,2.9980195617675776,0.29980195617675787,0.0004853329511504965,0.2998737182617187,0.3003250793457031,0.3003877288818359,0.3004378485107422,"[0.2996583557128906, 0.30008908081054686, 0.29907073974609377, 0.29927716064453125, 0.29923199462890626, 0.30045037841796873, 0.30015191650390627, 0.30027667236328126, 0.3003111572265625, 0.2995021057128906]",tokens/s,853.8970301083259,kWh,8.76525808872551e-06,9.666528461919464e-07,5.797406598705881e-06,1.5529317533623337e-05,tokens/kWh,16484948.514042618,MB,1641.824256,8676.900864,0.0,8266.973184,8120.408064,s,10,24.660447021484373,2.4660447021484373,0.0022546624102290007,2.4662266845703122,2.4684953125,2.4685719970703124,2.4686333447265625,"[2.465872314453125, 2.465596923828125, 2.4603427734375, 2.46468408203125, 2.467619873046875, 2.46687158203125, 2.468648681640625, 2.46575146484375, 2.4665810546875, 2.468478271484375]",tokens/s,25.546982155316936,kWh,7.217162492544173e-05,7.960560736861764e-06,4.812663654049395e-05,0.00012825882220279745,tokens/kWh,491194.2813601317,,s,630,24.654574352264408,0.03913424500359429,0.0003454978796092168,0.0391464958190918,0.03957495574951172,0.03966784496307373,0.03983591388702393,"[0.03908633422851562, 0.03847494506835938, 0.03833103942871094, 0.03829171371459961, 0.038526878356933594, 0.03899097442626953, 0.03889823913574219, 0.03885702514648438, 0.038631393432617185, 0.03855363082885742, 0.03917004776000976, 0.03863142395019531, 0.03844710540771484, 0.0385986557006836, 0.03871539306640625, 0.038866943359375, 0.03882393646240234, 0.038727680206298826, 0.03872972869873047, 0.03907379150390625, 0.03919772720336914, 0.03914236831665039, 0.03912483215332031, 0.03905142211914062, 0.03930521774291992, 0.039206912994384766, 0.039188255310058595, 0.03881596755981445, 0.038830078125, 0.0387665901184082, 0.039073055267333984, 0.03901513671875, 0.03908943939208984, 0.03895574569702148, 0.039374526977539064, 0.03924166488647461, 0.03908415985107422, 0.03921529769897461, 0.03938924789428711, 0.03960627365112305, 0.03947315216064453, 0.03955055999755859, 0.039526622772216795, 0.03944451141357422, 0.03933552169799805, 0.03915568161010742, 0.03917168045043945, 0.03952742385864258, 0.039373855590820316, 0.03941020965576172, 0.039637439727783205, 0.03946495819091797, 0.039247966766357424, 0.03930908966064453, 0.03944979095458984, 0.03929980850219727, 0.03959215927124023, 0.03966918563842774, 0.03949539184570312, 0.039905982971191405, 0.039701793670654295, 0.03977865600585938, 0.03989763259887695, 0.03919830322265625, 0.03895702362060547, 0.038859455108642575, 0.03898137664794922, 0.03891225433349609, 0.03890291213989258, 0.038277568817138674, 0.03859632110595703, 0.03843468856811524, 0.03857705688476563, 0.03897951889038086, 0.03896054458618164, 0.038754913330078126, 0.03876249694824219, 0.03929644775390625, 0.039166526794433595, 0.03902668762207031, 0.038771743774414065, 0.03885564804077148, 0.038776832580566405, 0.038809600830078124, 0.038819839477539066, 0.03923747253417969, 0.039166175842285156, 0.03884806442260742, 0.03942643356323242, 0.03916156768798828, 0.03917238235473633, 0.03909836959838867, 0.03903039932250976, 0.039541118621826174, 0.03944160079956055, 0.03918316650390625, 0.03896319961547851, 0.0388218879699707, 0.03886899185180664, 0.03897100830078125, 0.039123329162597656, 0.039251518249511716, 0.039039615631103516, 0.03924332809448242, 0.039198974609375, 0.03910860824584961, 0.03946086502075195, 0.03938230514526367, 0.039402206420898436, 0.0392540168762207, 0.03923763275146484, 0.03910806274414062, 0.03962246322631836, 0.039778144836425784, 0.03965017700195313, 0.03947724914550781, 0.03935635375976562, 0.03935785675048828, 0.039365280151367185, 0.03947724914550781, 0.039370750427246096, 0.03933184051513672, 0.0393072624206543, 0.03922739028930664, 0.03951113510131836, 0.039443359375, 0.03890985488891602, 0.03852912139892578, 0.038545406341552735, 0.03855974578857422, 0.0385654411315918, 0.038521278381347654, 0.0384224967956543, 0.03846352005004883, 0.03845657730102539, 0.03877964782714844, 0.03866419219970703, 0.0385629768371582, 0.03856083297729492, 0.039030017852783205, 0.038930976867675784, 0.03887891387939453, 0.038844734191894534, 0.03880755233764648, 0.03870515060424805, 0.038578174591064454, 0.03853107070922852, 0.0388955192565918, 0.03902268981933594, 0.038834175109863284, 0.03908748626708984, 0.03920355224609375, 0.03910851287841797, 0.038946495056152344, 0.038963264465332034, 0.0388139533996582, 0.03936793518066406, 0.039242496490478514, 0.03906755065917969, 0.03894895935058594, 0.03896934509277344, 0.03911270523071289, 0.03892784118652344, 0.03896368026733398, 0.03908169555664062, 0.03919625473022461, 0.03929065704345703, 0.039297855377197266, 0.03925609588623047, 0.039250049591064456, 0.039257953643798825, 0.03962860870361328, 0.03947555160522461, 0.03938508987426758, 0.039413761138916016, 0.039452671051025394, 0.0394947509765625, 0.03954166412353516, 0.03939123153686523, 0.03936880111694336, 0.039245246887207035, 0.039252574920654294, 0.039474880218505856, 0.03960639953613281, 0.039446590423583984, 0.03951424026489258, 0.0395359992980957, 0.03946547317504883, 0.03937212753295898, 0.03901440048217773, 0.038747135162353515, 0.038492225646972654, 0.03894524765014649, 0.039061920166015625, 0.03886678314208984, 0.03855791854858399, 0.0385167350769043, 0.038553600311279294, 0.03877068710327149, 0.03877273559570313, 0.03873382568359375, 0.038836223602294925, 0.03881951904296875, 0.03884054565429688, 0.038757823944091795, 0.03915433502197266, 0.03899135971069336, 0.038992385864257816, 0.03890716934204102, 0.03875094223022461, 0.03882393646240234, 0.03887513732910156, 0.03874540710449219, 0.038957439422607425, 0.03901472091674805, 0.03894831848144531, 0.03887760162353516, 0.03935801696777344, 0.03922371292114258, 0.03917536163330078, 0.039203422546386715, 0.03935475158691406, 0.03919257736206055, 0.03926012802124024, 0.039423103332519534, 0.039265182495117186, 0.03904307174682617, 0.03892019271850586, 0.03905036926269531, 0.039120960235595706, 0.039050048828125, 0.03911385726928711, 0.03934067153930664, 0.039272705078125, 0.039284320831298826, 0.0395145263671875, 0.03934400177001953, 0.03943027114868164, 0.039460159301757815, 0.03928543853759765, 0.03971859359741211, 0.03963935852050781, 0.039413761138916016, 0.03942102432250977, 0.039424671173095706, 0.039551231384277345, 0.039624385833740235, 0.039548961639404294, 0.03943164825439453, 0.03959276962280273, 0.0395142707824707, 0.03946684646606445, 0.03901033782958984, 0.03861577606201172, 0.03848732757568359, 0.038486560821533206, 0.038798656463623044, 0.03878579330444336, 0.03866195297241211, 0.0386761589050293, 0.038765182495117186, 0.03880956649780273, 0.03876457595825195, 0.03870684814453125, 0.03892870330810547, 0.038895294189453124, 0.03881350326538086, 0.03877737426757812, 0.038959102630615236, 0.038958942413330075, 0.03873603057861328, 0.03901792144775391, 0.03902521514892578, 0.0393994255065918, 0.039016448974609375, 0.039124385833740234, 0.03902912139892578, 0.0390043830871582, 0.038735870361328126, 0.038856254577636716, 0.03909062576293945, 0.03914956665039063, 0.03905945587158203, 0.0391734733581543, 0.03916422271728515, 0.039063583374023436, 0.039512382507324216, 0.03952640151977539, 0.03931545639038086, 0.0393359375, 0.03922518539428711, 0.03938524627685547, 0.03928617477416992, 0.03904982376098633, 0.03920896148681641, 0.03936275100708008, 0.03937849426269531, 0.03938451385498047, 0.039405536651611325, 0.039410526275634766, 0.03946022415161133, 0.03937139129638672, 0.039684097290039064, 0.03947520065307617, 0.03959807968139648, 0.03953664016723633, 0.03949158477783203, 0.03956252670288086, 0.03959676742553711, 0.03955279922485352, 0.03957753753662109, 0.0394666862487793, 0.03974614334106445, 0.03972463989257812, 0.0400654411315918, 0.03949135971069336, 0.03877068710327149, 0.03853952026367188, 0.03837952041625976, 0.03858790588378906, 0.03865983963012695, 0.03862579345703125, 0.038604225158691406, 0.038472511291503905, 0.03874816131591797, 0.03878448104858399, 0.03890230560302734, 0.03886489486694336, 0.03895296096801758, 0.03865727996826172, 0.03863808059692383, 0.038965503692626954, 0.03907583999633789, 0.038873153686523436, 0.03879740905761719, 0.03873484802246094, 0.039008960723876954, 0.03924595260620117, 0.03937283325195313, 0.039231487274169925, 0.03914342498779297, 0.0390041618347168, 0.03895500946044922, 0.0393809928894043, 0.03894076919555664, 0.03896105575561523, 0.03904512023925781, 0.03896115112304688, 0.03902668762207031, 0.03917824172973633, 0.039153568267822264, 0.03915935897827148, 0.0393240966796875, 0.039346271514892575, 0.039327743530273435, 0.039362560272216796, 0.039589088439941404, 0.039658432006835935, 0.03944755172729492, 0.03941462326049805, 0.039399265289306644, 0.039349536895751956, 0.03926713562011719, 0.03971263885498047, 0.03972713470458984, 0.039562751770019534, 0.039737888336181644, 0.0395470085144043, 0.039346176147460936, 0.03926425552368164, 0.03931545639038086, 0.039329792022705076, 0.03933184051513672, 0.03946086502075195, 0.039550975799560545, 0.039637054443359375, 0.0394936637878418, 0.03943734359741211, 0.038967262268066405, 0.03844409561157226, 0.038675422668457034, 0.038801406860351564, 0.03873750305175781, 0.03901603317260742, 0.03892102432250977, 0.03878908920288086, 0.03875433731079102, 0.038596607208251955, 0.038604286193847655, 0.038631519317626956, 0.03864617538452148, 0.0388485107421875, 0.038917537689208984, 0.038741695404052735, 0.03872604751586914, 0.039135295867919924, 0.039258560180664065, 0.03897923278808594, 0.039035232543945315, 0.03896319961547851, 0.03898303985595703, 0.038824577331542966, 0.03889561462402344, 0.038828033447265625, 0.0389939193725586, 0.03902054214477539, 0.03918950271606445, 0.03943936157226562, 0.03926835250854492, 0.03915161514282227, 0.039210945129394534, 0.03958585739135742, 0.03938508987426758, 0.03918438339233398, 0.03915375900268555, 0.03917404937744141, 0.03946086502075195, 0.039241695404052736, 0.03900617599487305, 0.039110721588134764, 0.03909836959838867, 0.0390978889465332, 0.03927702331542969, 0.039354366302490236, 0.03925299072265625, 0.03979270553588867, 0.03973388671875, 0.03953190231323242, 0.039494590759277345, 0.03949552154541015, 0.03936819076538086, 0.03980579376220703, 0.039804737091064454, 0.039653377532958986, 0.03972259140014649, 0.03957564926147461, 0.03978604888916016, 0.03972787094116211, 0.03974092864990234, 0.03990784072875977, 0.0398287353515625, 0.038836223602294925, 0.03855769729614258, 0.03857920074462891, 0.03857926559448242, 0.03845929718017578, 0.03849628829956055, 0.038991870880126955, 0.03892633438110352, 0.038866943359375, 0.03908169555664062, 0.03920515060424805, 0.039008255004882815, 0.038729217529296874, 0.03857049560546875, 0.03867766571044922, 0.03879100799560547, 0.038832897186279296, 0.038840576171875, 0.03879100799560547, 0.03859881591796875, 0.03896115112304688, 0.038948863983154294, 0.038868896484375, 0.03919267272949219, 0.03923763275146484, 0.03917004776000976, 0.038967201232910156, 0.039038143157958984, 0.03898780822753906, 0.03888217544555664, 0.03883318328857422, 0.03905436706542969, 0.03907372665405273, 0.03896310424804687, 0.03932579040527344, 0.0394420166015625, 0.03948118209838867, 0.03928326416015625, 0.039316608428955076, 0.03961494445800781, 0.039597793579101564, 0.039527103424072264, 0.03927449417114258, 0.03925596618652344, 0.039209056854248046, 0.03916377639770508, 0.03912307357788086, 0.039180030822753904, 0.03932928085327148, 0.03935308837890625, 0.03930252838134766, 0.03923212814331055, 0.039378944396972655, 0.03937279891967774, 0.03932947158813477, 0.039522624969482424, 0.03953251266479492, 0.039507999420166015, 0.03958736038208008, 0.03966620635986328, 0.039851966857910155, 0.03983769607543945, 0.03962265777587891, 0.038776832580566405, 0.03861276626586914, 0.03865827178955078, 0.0387209587097168, 0.038591041564941406, 0.03849324798583984, 0.03844944000244141, 0.03865628814697265, 0.038711681365966796, 0.038587455749511716, 0.03879199981689453, 0.038801345825195316, 0.03873196792602539, 0.03877273559570313, 0.039106559753417966, 0.03906150436401367, 0.039016448974609375, 0.03899347305297852, 0.039239776611328124, 0.03908339309692383, 0.0388271369934082, 0.03894051361083985, 0.03888329696655273, 0.03881129455566406, 0.039161407470703125, 0.03909846496582031, 0.03899260711669922, 0.038997791290283204, 0.038964542388916015, 0.03905014419555664, 0.03933996963500976, 0.03930527877807617, 0.03916377639770508, 0.03909235382080078, 0.03899369430541992, 0.03901871871948242, 0.03913900756835938, 0.039113025665283206, 0.03903398513793945, 0.03906233596801758, 0.039465023040771485, 0.03954483032226563, 0.03953987121582031, 0.0396317138671875, 0.03934793472290039, 0.039366943359375, 0.03929817581176758, 0.039433216094970705, 0.039363872528076174, 0.03964364624023437, 0.03957487869262695, 0.03942211151123047, 0.039420127868652344, 0.03935171127319336, 0.03979702377319336, 0.03962265777587891, 0.0396192626953125, 0.03957350540161133, 0.03949702453613281, 0.03953529739379883, 0.03953664016723633, 0.039593982696533206, 0.03954867172241211, 0.03917619323730469, 0.03869696044921875, 0.03838771057128906, 0.03904307174682617, 0.03878911972045898, 0.038899711608886715, 0.03872870254516601, 0.03873689651489258, 0.038735870361328126, 0.03878815841674805, 0.03876927947998047, 0.03884268951416016, 0.03889299011230469, 0.03895552062988281, 0.03900831985473633, 0.03900131225585937, 0.03912783813476563, 0.03897139358520508, 0.038860801696777345, 0.038793182373046874, 0.038846752166748044, 0.039161441802978515, 0.03918044662475586, 0.03889766311645508, 0.038876480102539065, 0.038769344329833984, 0.03886297607421875, 0.03880467224121094, 0.03898643112182617, 0.0389956169128418, 0.039036991119384766, 0.038803775787353514, 0.03935990524291992, 0.03929964828491211, 0.03949977493286133, 0.03919647979736328, 0.039184574127197266, 0.03951395034790039, 0.039591712951660155, 0.03950758361816406, 0.039524513244628905, 0.03971535873413086, 0.03946092987060547, 0.03930918502807617, 0.03919257736206055, 0.03938111877441406, 0.03942399978637695, 0.039376609802246096, 0.039385215759277344, 0.039333343505859375, 0.03920966339111328, 0.03952844619750977, 0.039495681762695314, 0.039472862243652346, 0.03946246337890625, 0.03936966323852539, 0.03958147048950195, 0.03954617691040039, 0.03978092956542969, 0.03971903991699219, 0.03956531143188476, 0.039831550598144534, 0.03990323257446289]",tokens/s,25.55306739425163,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,8219.754496,11243.814912,0.0,10848.567296,10616.027648,s,1,14.2756416015625,14.2756416015625,0.0,14.2756416015625,14.2756416015625,14.2756416015625,14.2756416015625,[14.2756416015625],,kWh,0.00021442627039165626,2.3645609914202468e-05,6.747199842199736e-05,0.0003055438787278561,,MB,4062.838784,11665.342464,0.0,11255.414784,11070.470656,s,10,3.731231964111328,0.3731231964111328,0.0011575441375760197,0.3732413330078125,0.37406481323242186,0.3748029296875,0.37539342285156246,"[0.37119436645507814, 0.37189498901367185, 0.3733327331542969, 0.37314993286132814, 0.3737827453613281, 0.3720987243652344, 0.3734735412597656, 0.3728630981445312, 0.3739007873535156, 0.3755410461425781]",tokens/s,686.1004688594102,kWh,1.091028810925901e-05,1.2032047072948179e-06,7.232546938296628e-06,1.9346039754850457e-05,tokens/kWh,13232682.411696969,MB,4067.155968,11667.439616,0.0,11257.511936,11070.473216,s,10,28.777669921875,2.8777669921875004,0.004039830674810794,2.8787237548828126,2.8814861328125,2.8832177978515627,2.8846031298828128,"[2.87059619140625, 2.87385693359375, 2.873241455078125, 2.87652734375, 2.8801279296875, 2.878455810546875, 2.881101318359375, 2.87982177734375, 2.87899169921875, 2.884949462890625]",tokens/s,21.89197394056956,kWh,8.428800495532387e-05,9.29711071260248e-06,5.61396148705021e-05,0.0001497247305384285,tokens/kWh,420772.17152733734,,s,630,28.774752719879153,0.04567421066647485,0.0004152863056896368,0.04566843032836914,0.045996459197998045,0.04609201831817627,0.04765810729980469,"[0.0477325439453125, 0.04525689697265625, 0.04514815902709961, 0.045197311401367186, 0.04511334228515625, 0.04492697525024414, 0.04501619338989258, 0.04558732986450195, 0.04525875091552734, 0.04495974349975586, 0.045055999755859374, 0.04554499053955078, 0.04532681655883789, 0.04513587188720703, 0.04541219329833984, 0.045509822845458986, 0.045292510986328124, 0.04523622512817383, 0.045270687103271486, 0.04557830429077148, 0.045416736602783205, 0.0454901123046875, 0.045709217071533206, 0.04562335968017578, 0.04540985488891602, 0.04540060806274414, 0.04520959854125976, 0.04558848190307617, 0.04547375869750977, 0.04528047943115234, 0.04534764862060547, 0.04553062438964844, 0.045482494354248046, 0.04548825454711914, 0.04541977691650391, 0.04564035034179687, 0.04565923309326172, 0.045625438690185545, 0.045778976440429685, 0.045738529205322266, 0.045666526794433594, 0.045951038360595706, 0.045868545532226565, 0.045916606903076175, 0.045668319702148436, 0.04589324951171875, 0.04563190460205078, 0.04556729507446289, 0.04560351943969727, 0.045568000793457034, 0.04591820907592774, 0.04558028793334961, 0.04563763046264648, 0.04591408157348633, 0.04581584167480469, 0.0456190071105957, 0.04587948989868164, 0.04596332931518555, 0.04575839996337891, 0.04563558578491211, 0.04558848190307617, 0.04590796661376953, 0.04584175872802734, 0.0480522575378418, 0.045537246704101565, 0.044889888763427734, 0.04483913421630859, 0.0452006721496582, 0.04505059051513672, 0.04500889587402344, 0.04507209777832031, 0.04545264053344727, 0.04529446411132813, 0.045168704986572265, 0.04543283081054687, 0.04544688034057617, 0.04538531112670898, 0.04545753479003906, 0.04558905410766602, 0.04584774398803711, 0.04581232070922851, 0.045314273834228515, 0.045292766571044925, 0.04594972610473633, 0.04574316787719727, 0.04578345489501953, 0.045566497802734376, 0.045501983642578125, 0.045287296295166014, 0.0453125114440918, 0.04537558364868164, 0.045635551452636716, 0.04546563339233398, 0.04535500717163086, 0.04526079940795898, 0.045520896911621096, 0.0455530891418457, 0.04543094253540039, 0.04569494247436524, 0.045801025390625, 0.04575651168823242, 0.04574492645263672, 0.04566220855712891, 0.045889022827148435, 0.046002689361572265, 0.04598191833496094, 0.04584220886230469, 0.04592156982421875, 0.045515487670898434, 0.04579315185546875, 0.04564534378051758, 0.045736000061035155, 0.04557244873046875, 0.04574982452392578, 0.04575689697265625, 0.04566851043701172, 0.045471969604492186, 0.04548992156982422, 0.04567862319946289, 0.04580352020263672, 0.0458526725769043, 0.046241119384765626, 0.045920639038085936, 0.045777183532714844, 0.04561414337158203, 0.04609868621826172, 0.04836604690551758, 0.04594255828857422, 0.04524486541748047, 0.04480924987792969, 0.04500783920288086, 0.045197311401367186, 0.04501913452148437, 0.045072383880615234, 0.04503504180908203, 0.045009471893310546, 0.04537334442138672, 0.04514815902709961, 0.045139198303222654, 0.04550457763671875, 0.04567315292358398, 0.04530944061279297, 0.04547430419921875, 0.04551472091674805, 0.04539779281616211, 0.04534435272216797, 0.04559939193725586, 0.04592025756835937, 0.04605747222900391, 0.0456703987121582, 0.04545238494873047, 0.0455115852355957, 0.045313919067382816, 0.04506841659545899, 0.04528537750244141, 0.045445121765136716, 0.04532428741455078, 0.04533033752441406, 0.04536323165893555, 0.04554153442382813, 0.04559862518310547, 0.04551475143432617, 0.04588899230957031, 0.04593699264526367, 0.04588972854614258, 0.045453311920166016, 0.045742080688476565, 0.0461475830078125, 0.04592435073852539, 0.046134590148925785, 0.04595167922973633, 0.045864959716796876, 0.045638912200927736, 0.04581657409667969, 0.04562124633789062, 0.04546355056762695, 0.04545705413818359, 0.045580608367919925, 0.04576204681396484, 0.0456110725402832, 0.04556604766845703, 0.04591811370849609, 0.04564015960693359, 0.04564275360107422, 0.0459048957824707, 0.04627251052856445, 0.04580108642578125, 0.04570150375366211, 0.04603084945678711, 0.04742211151123047, 0.045461505889892576, 0.04511334228515625, 0.04483686447143555, 0.044886016845703126, 0.045156097412109374, 0.04538803100585938, 0.045218910217285156, 0.04511222457885742, 0.04541439819335937, 0.045700992584228516, 0.04534489440917969, 0.04515020751953125, 0.04522335815429687, 0.04556240081787109, 0.04565200042724609, 0.045754432678222656, 0.04584236907958984, 0.045699073791503904, 0.0457105598449707, 0.04594150543212891, 0.045760223388671875, 0.045559585571289064, 0.045343265533447266, 0.04520345687866211, 0.045379585266113284, 0.04530995178222656, 0.04530995178222656, 0.045442497253417966, 0.04568300628662109, 0.04565769577026367, 0.045684993743896486, 0.04582646560668945, 0.04578003311157226, 0.04563040161132813, 0.04567244720458984, 0.04595817565917969, 0.04600931167602539, 0.04583187103271484, 0.045682079315185545, 0.04593936157226562, 0.04602249526977539, 0.04592262268066406, 0.045826145172119144, 0.04565606307983398, 0.04550764846801758, 0.04552201461791992, 0.045547359466552736, 0.04579328155517578, 0.045690879821777344, 0.045547489166259766, 0.04581788635253906, 0.04597715377807617, 0.04570111846923828, 0.04568313598632812, 0.04591180801391601, 0.04599123382568359, 0.0457081298828125, 0.04592854309082031, 0.04598332977294922, 0.04605788803100586, 0.046047233581542966, 0.046137344360351565, 0.04845094299316406, 0.04585327911376953, 0.04522208023071289, 0.04488816070556641, 0.04491872024536133, 0.0451541748046875, 0.04512294387817383, 0.04530044937133789, 0.04519289779663086, 0.04542828750610352, 0.045628223419189456, 0.04560844802856445, 0.045443550109863284, 0.045625343322753906, 0.04559667205810547, 0.045446750640869144, 0.04576707077026367, 0.04563353729248047, 0.04559667205810547, 0.04542252731323242, 0.045674560546875, 0.045932350158691404, 0.04574227142333984, 0.04533657455444336, 0.04531520080566406, 0.04557699203491211, 0.045537376403808595, 0.0453570556640625, 0.045547359466552736, 0.04550876617431641, 0.045784351348876956, 0.04563017654418945, 0.045774848937988284, 0.04591129684448242, 0.04579983901977539, 0.045840736389160155, 0.04594883346557617, 0.04575609588623047, 0.045605281829833984, 0.04577254486083984, 0.045964672088623044, 0.04593907165527344, 0.04591606521606445, 0.04585276794433594, 0.045752254486083985, 0.04567510223388672, 0.04575129699707031, 0.04576764678955078, 0.04569494247436524, 0.04578003311157226, 0.0459186897277832, 0.04595267105102539, 0.0457957763671875, 0.04606556701660156, 0.04571184158325195, 0.0458045768737793, 0.04575065612792969, 0.04590655899047852, 0.04616387176513672, 0.04593638229370117, 0.0457743034362793, 0.04608918380737305, 0.046206878662109374, 0.04830179214477539, 0.04582428741455078, 0.045090816497802735, 0.045006847381591795, 0.045225311279296875, 0.04516476821899414, 0.04521315383911133, 0.04525564956665039, 0.045561569213867184, 0.04551046371459961, 0.045335006713867185, 0.04528486251831055, 0.04526515197753906, 0.04534636688232422, 0.04546630477905273, 0.04525056076049805, 0.046088191986083986, 0.04558848190307617, 0.04563132858276367, 0.04533036804199219, 0.04567062377929688, 0.04603875350952148, 0.04568681716918945, 0.04587545776367188, 0.04551891326904297, 0.04535433578491211, 0.04540060806274414, 0.04529568099975586, 0.04561641693115234, 0.04542867279052734, 0.04541708755493164, 0.04569664001464844, 0.045711296081542965, 0.0457344970703125, 0.04571750259399414, 0.045614974975585934, 0.04582320022583008, 0.04569295883178711, 0.04561337661743164, 0.04579385757446289, 0.04595465469360351, 0.045942943572998045, 0.04606351852416992, 0.045950431823730466, 0.04605759811401367, 0.04566502380371094, 0.045767807006835935, 0.045638526916503906, 0.04577648162841797, 0.04562505722045898, 0.04581356811523438, 0.045908863067626954, 0.0458342399597168, 0.04563497543334961, 0.04588140869140625, 0.04582620620727539, 0.04574860763549805, 0.045851905822753905, 0.04605763244628906, 0.04595315170288086, 0.04599827194213867, 0.04583251190185547, 0.04596246337890625, 0.04879795074462891, 0.04600428771972656, 0.04512550354003906, 0.04509254455566406, 0.04501542282104492, 0.04497593688964844, 0.04499110412597656, 0.045049823760986325, 0.04576668930053711, 0.04559798431396484, 0.04549296188354492, 0.04565737533569336, 0.04549043273925781, 0.04529558563232422, 0.04520505523681641, 0.04546636962890625, 0.04568252944946289, 0.04557612609863281, 0.04537184143066406, 0.04535228729248047, 0.04593231964111328, 0.045920639038085936, 0.04594736099243164, 0.04581292724609375, 0.045537120819091795, 0.0454128303527832, 0.04554598236083984, 0.045440799713134764, 0.0455, 0.04542652893066406, 0.04557852935791016, 0.04563772964477539, 0.04575068664550781, 0.04573788833618164, 0.045623294830322264, 0.045782432556152344, 0.045800128936767576, 0.04552054214477539, 0.046077919006347653, 0.045857311248779294, 0.04587299346923828, 0.04571088027954102, 0.04618492889404297, 0.04592355346679688, 0.04586511993408203, 0.045763198852539065, 0.04573183822631836, 0.04594217681884766, 0.04583046340942383, 0.04587747192382813, 0.045684799194335934, 0.04566835021972656, 0.04595097732543945, 0.046134815216064454, 0.04593916702270508, 0.04589158248901367, 0.04589363098144531, 0.04592639923095703, 0.04571446228027344, 0.04584684753417969, 0.046262943267822265, 0.046165729522705076, 0.04615536117553711, 0.047457313537597655, 0.04550124740600586, 0.0452402229309082, 0.045407806396484375, 0.04519379043579102, 0.045115520477294925, 0.045281024932861326, 0.04544307327270508, 0.04547343826293945, 0.04537200164794922, 0.04547993469238281, 0.04560688018798828, 0.04551068878173828, 0.04521984100341797, 0.04549836730957031, 0.045708576202392576, 0.04555388641357422, 0.04559920120239258, 0.04552092742919922, 0.045676544189453126, 0.04592355346679688, 0.045533824920654296, 0.04554975891113281, 0.04575379180908203, 0.04573443222045898, 0.04558607864379883, 0.045609310150146486, 0.04559836959838867, 0.04544944000244141, 0.045647457122802736, 0.04573443222045898, 0.045764606475830076, 0.0457891845703125, 0.04564787292480469, 0.04560076904296875, 0.04581990432739258, 0.04554342269897461, 0.045913150787353516, 0.045790145874023434, 0.045856769561767576, 0.04604524612426758, 0.04593388748168945, 0.045723552703857424, 0.04562931060791016, 0.04605177688598633, 0.0460681266784668, 0.04580966567993164, 0.045551616668701174, 0.0457523193359375, 0.045864959716796876, 0.04571136093139649, 0.04576169586181641, 0.04593340682983398, 0.04579452896118164, 0.0457053108215332, 0.04580767822265625, 0.046029441833496096, 0.04604927825927734, 0.045795360565185544, 0.045995903015136716, 0.04600636672973633, 0.04589977645874024, 0.045939743041992186, 0.04760575866699219, 0.045616191864013673, 0.04523619079589844, 0.0449705924987793, 0.04478188705444336, 0.045047840118408206, 0.0452525749206543, 0.04536463928222656, 0.0452960319519043, 0.04542819213867187, 0.04552169418334961, 0.04553932952880859, 0.045192798614501956, 0.045467456817626956, 0.04576537704467774, 0.04562723159790039, 0.04550857543945312, 0.04561318588256836, 0.04573174285888672, 0.04584447860717773, 0.045682689666748044, 0.046044960021972656, 0.04591843032836914, 0.04558438491821289, 0.045363201141357425, 0.04560486221313476, 0.04559667205810547, 0.04516454315185547, 0.04539311981201172, 0.045478687286376954, 0.04557136154174805, 0.04557440185546875, 0.045644256591796876, 0.04552908706665039, 0.045897727966308595, 0.04566220855712891, 0.04569091033935547, 0.046047199249267576, 0.0457891845703125, 0.04555571365356445, 0.045780990600585936, 0.045946750640869144, 0.04598723220825195, 0.04613808059692383, 0.04597132873535156, 0.045967071533203126, 0.04598748779296875, 0.0458493423461914, 0.045676544189453126, 0.04586700820922852, 0.04562124633789062, 0.04579663848876953, 0.04565884780883789, 0.04564582443237305, 0.04571955108642578, 0.045972606658935544, 0.04611065673828125, 0.04594537734985352, 0.04597983932495117, 0.04610054397583008, 0.045961376190185546, 0.0457154541015625, 0.046063617706298826, 0.04767948913574219, 0.045876480102539065, 0.04532275390625, 0.04503308868408203, 0.04509302520751953, 0.04543503952026367, 0.04541676712036133, 0.04523782348632813, 0.04544732666015625, 0.04568707275390625, 0.04567244720458984, 0.04557619094848633, 0.04550451278686524, 0.04534864044189453, 0.045813407897949215, 0.045732414245605466, 0.046094337463378904, 0.045757854461669925, 0.04581232070922851, 0.045674495697021485, 0.046034015655517575, 0.04604169464111328, 0.04577312088012695, 0.045631103515625, 0.04540454483032227, 0.04566835021972656, 0.045588096618652346, 0.04561753463745117, 0.04551459121704102, 0.045805824279785155, 0.04539587020874023, 0.04555939102172851, 0.04574044799804688, 0.04571263885498047, 0.04553305435180664, 0.045728641510009764, 0.04589494323730469, 0.045927040100097655, 0.04574627304077149, 0.0461475830078125, 0.04589683151245117, 0.04599625778198242, 0.045949600219726563, 0.04602880096435547, 0.04594233703613281, 0.045841121673583986, 0.045851905822753905, 0.04606000137329101, 0.04585827255249023, 0.045607295989990235, 0.045905406951904294, 0.045970081329345706, 0.04572979354858398, 0.04597350311279297, 0.04614924621582031, 0.04588787078857422, 0.04601651382446289, 0.04595302581787109, 0.04627046585083008, 0.04598524856567383, 0.045792896270751955, 0.04625065612792969, 0.04609868621826172]",tokens/s,21.894193362251276,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1174.67136,987.62752,0.0,585.105408,557.135872,s,1,8.2523447265625,8.2523447265625,0.0,8.2523447265625,8.2523447265625,8.2523447265625,8.2523447265625,[8.2523447265625],,kWh,3.130329075413556e-05,3.443075080391679e-06,9.593063229992871e-06,4.433942906452011e-05,,MB,1378.361344,1147.011072,0.0,729.808896,689.092096,s,10,0.306799295425415,0.0306799295425415,0.00010370549462965012,0.03065217590332031,0.030763820838928223,0.03085085401535034,0.030920480556488036,"[0.03093788719177246, 0.030726112365722657, 0.03074448013305664, 0.030634975433349608, 0.030669376373291014, 0.030633888244628905, 0.03070332717895508, 0.030561920166015624, 0.030570240020751954, 0.030617088317871095]",tokens/s,8344.217337429816,kWh,9.202501397144128e-07,1.0139468772765945e-07,6.084657208742021e-07,1.6301105483162743e-06,tokens/kWh,157044563.79625294,MB,1392.427008,1293.811712,0.0,876.609536,689.094656,s,10,11.192409057617187,1.1192409057617188,0.01453047875821345,1.1253532104492188,1.1320630981445312,1.1336450744628908,1.1349106555175783,"[1.1306866455078124, 1.1317115478515625, 1.1069072265625, 1.113021728515625, 1.1290291748046875, 1.13522705078125, 1.1081610107421875, 1.0867691650390625, 1.12921826171875, 1.12167724609375]",tokens/s,56.28815000924601,kWh,3.192749729278362e-05,3.5211714313319616e-06,1.349860112032713e-05,4.894726984444272e-05,tokens/kWh,1287099.3663225279,,s,630,11.185633096694957,0.017754973169357056,0.0005818242943146861,0.01781752014160156,0.01822146224975586,0.018380788803100585,0.019865641307830816,"[0.01763475227355957, 0.01802272033691406, 0.01806982421875, 0.017919776916503906, 0.018428064346313475, 0.018221183776855467, 0.018685888290405274, 0.018077728271484374, 0.018050975799560547, 0.018569023132324218, 0.018360511779785156, 0.017802303314208984, 0.018111423492431642, 0.018001279830932616, 0.01799228858947754, 0.01798579216003418, 0.018042720794677735, 0.01802566337585449, 0.018163839340209962, 0.01974540710449219, 0.018952096939086914, 0.01795439910888672, 0.018022911071777344, 0.017915872573852538, 0.0177357120513916, 0.018069631576538087, 0.018242591857910155, 0.017797983169555665, 0.017956256866455078, 0.018122400283813477, 0.017634239196777344, 0.01773481559753418, 0.017677152633666992, 0.017674079895019533, 0.017715423583984376, 0.017551296234130858, 0.017688575744628905, 0.01762099266052246, 0.017632511138916014, 0.01818435287475586, 0.01793289566040039, 0.018116607666015624, 0.0178606071472168, 0.018001888275146486, 0.0178621768951416, 0.017766624450683593, 0.018351615905761717, 0.01806211280822754, 0.018634752273559572, 0.01760416030883789, 0.017165887832641603, 0.017645439147949218, 0.017363840103149415, 0.017291200637817382, 0.017434240341186524, 0.01734307289123535, 0.017747039794921874, 0.017920543670654297, 0.01812508773803711, 0.017323200225830077, 0.017207199096679688, 0.017625568389892578, 0.017836063385009766, 0.017344671249389647, 0.017551359176635743, 0.017663360595703125, 0.017519231796264648, 0.017221792221069336, 0.01754627227783203, 0.017704767227172853, 0.017912607192993164, 0.01897056007385254, 0.01834832000732422, 0.018448160171508788, 0.01783216094970703, 0.017864416122436524, 0.018126367568969726, 0.018017023086547852, 0.01797532844543457, 0.01784185600280762, 0.018079967498779298, 0.017868223190307616, 0.01778665542602539, 0.017995872497558595, 0.017913663864135742, 0.017847232818603516, 0.017721343994140625, 0.01765171241760254, 0.017825056076049804, 0.017609439849853515, 0.018478464126586915, 0.01832819175720215, 0.01796291160583496, 0.017937631607055665, 0.01817091178894043, 0.018073024749755858, 0.018341535568237304, 0.01785318374633789, 0.018257951736450194, 0.018224672317504884, 0.018168256759643554, 0.01796643257141113, 0.0180163516998291, 0.017816127777099608, 0.017815616607666014, 0.017948480606079103, 0.018216224670410158, 0.017830751419067384, 0.017958015441894532, 0.017828544616699218, 0.01791168022155762, 0.018274623870849608, 0.01803228759765625, 0.017832511901855468, 0.018308895111083984, 0.017729536056518554, 0.01787494468688965, 0.017798463821411134, 0.017838783264160156, 0.01790127944946289, 0.018017568588256837, 0.018000032424926756, 0.018223968505859375, 0.017983488082885742, 0.017920000076293945, 0.017932287216186525, 0.017551359176635743, 0.018030624389648437, 0.018012319564819336, 0.018066272735595704, 0.018046144485473634, 0.0179866886138916, 0.01782032012939453, 0.018753536224365236, 0.017915327072143553, 0.018225248336791993, 0.017899007797241212, 0.01813715171813965, 0.018279327392578124, 0.018013376235961914, 0.018249568939208986, 0.0179005126953125, 0.01783193588256836, 0.017743871688842772, 0.017721343994140625, 0.017911808013916015, 0.017810592651367186, 0.017625951766967775, 0.01799078369140625, 0.017752960205078126, 0.017723392486572266, 0.01768671989440918, 0.017700672149658203, 0.017556671142578126, 0.01750259208679199, 0.017756479263305664, 0.017718496322631835, 0.017722272872924806, 0.017500160217285156, 0.01732806396484375, 0.017250368118286133, 0.017166015625, 0.01729078483581543, 0.017103647232055662, 0.017117183685302736, 0.01720319938659668, 0.017527008056640626, 0.01740505599975586, 0.017268863677978516, 0.01733072090148926, 0.01700249671936035, 0.016863231658935548, 0.016795616149902343, 0.01680524826049805, 0.01702774429321289, 0.017277088165283203, 0.017728448867797852, 0.01741916847229004, 0.017293312072753905, 0.017375232696533204, 0.0172359676361084, 0.017195167541503905, 0.017022464752197267, 0.01716223907470703, 0.016949600219726562, 0.01707539176940918, 0.01727097511291504, 0.017458879470825195, 0.017275840759277343, 0.01733990478515625, 0.017240575790405274, 0.017020448684692383, 0.017160640716552735, 0.01712451171875, 0.017144704818725588, 0.017129472732543945, 0.016830272674560547, 0.016734399795532227, 0.016855039596557618, 0.017035263061523438, 0.01701478385925293, 0.01711628723144531, 0.016916799545288085, 0.016943679809570313, 0.017117183685302736, 0.017217536926269532, 0.01722368049621582, 0.017100799560546876, 0.0172728328704834, 0.017061248779296875, 0.017283199310302734, 0.017846784591674804, 0.017768447875976562, 0.017675359725952147, 0.018182655334472657, 0.017727359771728516, 0.017756704330444337, 0.017811456680297853, 0.017813695907592773, 0.017898591995239257, 0.018055904388427736, 0.017999872207641602, 0.017856672286987306, 0.01792393684387207, 0.017885120391845703, 0.017846336364746095, 0.018028543472290038, 0.017734687805175783, 0.017765472412109375, 0.017848192214965822, 0.017721343994140625, 0.017810943603515626, 0.018020288467407226, 0.01782022476196289, 0.018003679275512694, 0.018061504364013672, 0.017956159591674806, 0.018064159393310547, 0.018283552169799804, 0.018133535385131835, 0.01824345588684082, 0.01807766342163086, 0.01783184051513672, 0.017877695083618163, 0.01836031913757324, 0.018595552444458006, 0.01812681579589844, 0.018011552810668945, 0.017867679595947265, 0.017926143646240233, 0.018271392822265625, 0.017875392913818358, 0.01762873649597168, 0.017974016189575195, 0.018119487762451172, 0.017980287551879883, 0.018017919540405273, 0.01857369613647461, 0.018092031478881835, 0.017877248764038085, 0.017862464904785155, 0.017722368240356445, 0.018002656936645507, 0.018125024795532227, 0.01762099266052246, 0.017735872268676758, 0.017700672149658203, 0.017977504730224608, 0.017735231399536134, 0.017607200622558595, 0.017808448791503905, 0.017941471099853515, 0.01816752052307129, 0.01787494468688965, 0.01795471954345703, 0.018065792083740234, 0.017847295761108398, 0.017949087142944336, 0.017817920684814453, 0.017618623733520508, 0.017733055114746092, 0.01764828872680664, 0.019097824096679688, 0.017807104110717772, 0.017950239181518553, 0.017805728912353515, 0.017713472366333007, 0.017893375396728514, 0.017674240112304687, 0.017717248916625978, 0.0179072322845459, 0.01791801643371582, 0.018069311141967772, 0.018006080627441405, 0.01790028762817383, 0.017722688674926757, 0.01777712059020996, 0.018105472564697266, 0.017947423934936525, 0.017917535781860353, 0.01791987228393555, 0.01794931221008301, 0.018106016159057617, 0.01794867134094238, 0.018028192520141602, 0.017920703887939454, 0.017798688888549803, 0.018143711090087892, 0.01777459144592285, 0.01785651206970215, 0.017710304260253905, 0.01805801582336426, 0.01778998374938965, 0.01781648063659668, 0.01778518486022949, 0.017597312927246093, 0.017979263305664062, 0.01797324752807617, 0.01843609619140625, 0.01798963165283203, 0.017872896194458008, 0.017614431381225586, 0.017901248931884765, 0.017691360473632813, 0.017889280319213868, 0.01774140739440918, 0.01773369598388672, 0.01781711959838867, 0.018025184631347658, 0.01845043182373047, 0.01865648078918457, 0.018168064117431642, 0.018044704437255858, 0.018735647201538086, 0.018024415969848634, 0.017774944305419923, 0.017909759521484374, 0.01771014404296875, 0.01816671943664551, 0.01767628860473633, 0.017799327850341797, 0.017744863510131836, 0.017703296661376954, 0.017661823272705077, 0.017576032638549805, 0.017711648941040038, 0.01816985511779785, 0.017913856506347657, 0.018268159866333008, 0.018191648483276368, 0.018258655548095703, 0.01803011131286621, 0.01803215980529785, 0.01857017517089844, 0.017890560150146485, 0.01761952018737793, 0.01786422348022461, 0.01810089683532715, 0.02047590446472168, 0.01898700714111328, 0.01834988784790039, 0.017938720703125, 0.01781711959838867, 0.0178732795715332, 0.01785856056213379, 0.018000032424926756, 0.01788083267211914, 0.01805961608886719, 0.017784576416015625, 0.017917407989501952, 0.0176246395111084, 0.017625343322753905, 0.01749814414978027, 0.017676992416381834, 0.018233343124389647, 0.018042911529541017, 0.01803183937072754, 0.018069311141967772, 0.01784012794494629, 0.017698272705078125, 0.01774985694885254, 0.01787392044067383, 0.017834112167358397, 0.017901439666748047, 0.018382848739624022, 0.01790496063232422, 0.018104671478271484, 0.01804435157775879, 0.018080671310424803, 0.017952768325805665, 0.017880895614624023, 0.018176191329956053, 0.018257919311523436, 0.01812588882446289, 0.018030559539794922, 0.017824256896972656, 0.017973056793212892, 0.018075712203979494, 0.018559583663940428, 0.0182108154296875, 0.01826201629638672, 0.01818796730041504, 0.018241855621337892, 0.018054880142211915, 0.017851936340332032, 0.018182720184326172, 0.01775436782836914, 0.017686336517333985, 0.01766793632507324, 0.017573408126831055, 0.01769664001464844, 0.01789427185058594, 0.01765577507019043, 0.017522272109985353, 0.01737059211730957, 0.017644512176513673, 0.017739776611328126, 0.01781670379638672, 0.017963903427124023, 0.01741823959350586, 0.01739529609680176, 0.017224096298217775, 0.0171539192199707, 0.0171060791015625, 0.01725129508972168, 0.016957056045532225, 0.016818048477172852, 0.016845312118530274, 0.01700819206237793, 0.01678585624694824, 0.016920576095581053, 0.01671334457397461, 0.01668953514099121, 0.016718080520629883, 0.01666431999206543, 0.016833759307861327, 0.01678006362915039, 0.016805471420288084, 0.016757152557373048, 0.01674563217163086, 0.016866144180297853, 0.01678335952758789, 0.016745855331420898, 0.016759424209594725, 0.01684889602661133, 0.016855039596557618, 0.016999679565429686, 0.016726783752441406, 0.016598848342895507, 0.01665558433532715, 0.01681711959838867, 0.016778303146362306, 0.01693996810913086, 0.01682851219177246, 0.017162143707275392, 0.017059808731079103, 0.016801055908203126, 0.016853727340698243, 0.017159999847412108, 0.016910112380981446, 0.016908063888549804, 0.016818431854248046, 0.016740032196044922, 0.017196895599365235, 0.017055776596069334, 0.016710176467895507, 0.01677270317077637, 0.017195743560791017, 0.017094783782958985, 0.016975648880004884, 0.01680188751220703, 0.016731199264526368, 0.016796607971191407, 0.016715776443481444, 0.016869375228881836, 0.01695913505554199, 0.016933216094970702, 0.016977920532226562, 0.01703878402709961, 0.016882240295410158, 0.017036575317382813, 0.01798406410217285, 0.01774812889099121, 0.01774799919128418, 0.017792064666748045, 0.017722272872924806, 0.017927328109741212, 0.01759721565246582, 0.017626688003540038, 0.017875295639038086, 0.017716928482055663, 0.018110784530639648, 0.017989503860473634, 0.017884607315063476, 0.017832447052001953, 0.017953311920166016, 0.018231103897094727, 0.018073024749755858, 0.017826175689697264, 0.018167999267578124, 0.017893375396728514, 0.01802444839477539, 0.017612800598144532, 0.017321983337402345, 0.017566272735595703, 0.017795072555541993, 0.01743996810913086, 0.017478431701660156, 0.017682432174682617, 0.01764579200744629, 0.017620031356811523, 0.017563871383666992, 0.017426944732666014, 0.017946624755859376, 0.017888256072998047, 0.017550111770629883, 0.017548992156982423, 0.017604255676269533, 0.017290111541748046, 0.017047744750976562, 0.0172030086517334, 0.01714995193481445, 0.017395967483520507, 0.01706163215637207, 0.016860799789428713, 0.017074079513549806, 0.017054176330566405, 0.016879615783691407, 0.016869407653808594, 0.01683203125, 0.016914207458496092, 0.01716316795349121, 0.017325632095336913, 0.0171495361328125, 0.017536672592163086, 0.023212032318115236, 0.020968032836914063, 0.018092159271240235, 0.018114784240722655, 0.017922048568725587, 0.018226272583007814, 0.017918880462646485, 0.018176000595092775, 0.017915903091430666, 0.01796227264404297, 0.01800441551208496, 0.018083616256713866, 0.017918367385864258, 0.017981632232666016, 0.017901792526245117, 0.018062496185302736, 0.018119071960449217, 0.017843488693237305, 0.017945440292358398, 0.019914751052856446, 0.020051263809204103, 0.019023744583129883, 0.018064191818237305, 0.018264064788818358, 0.018110464096069336, 0.018089279174804688, 0.018064128875732423, 0.018354240417480468, 0.018245376586914063, 0.018116735458374025, 0.018081792831420897, 0.018155263900756835, 0.018001216888427735, 0.018158271789550783, 0.018378271102905273, 0.01818876838684082, 0.018102367401123046, 0.017875072479248046, 0.018485023498535157, 0.018132991790771484, 0.017893375396728514, 0.01807360076904297, 0.01776367950439453, 0.01762601661682129, 0.017794815063476563, 0.017983488082885742, 0.017657855987548828, 0.017581151962280273, 0.01754409599304199, 0.017475423812866212, 0.017277088165283203, 0.01722064018249512, 0.017398048400878906, 0.017016735076904297, 0.017521440505981447, 0.018008064270019532, 0.01728857612609863, 0.01724188804626465, 0.01751919937133789, 0.01817011260986328, 0.020242431640625, 0.021547008514404296, 0.018880512237548826, 0.01797881507873535, 0.017639232635498048, 0.01752128028869629, 0.017641311645507814, 0.017412384033203124, 0.01743667221069336, 0.01802774429321289, 0.018684703826904295, 0.017682432174682617, 0.017539072036743163, 0.01784832000732422, 0.017527839660644532, 0.017707551956176758, 0.0176378231048584, 0.017352544784545898, 0.017325504302978516, 0.01781190490722656, 0.017447200775146485, 0.017131071090698242, 0.01761734390258789, 0.018116607666015624, 0.01803878402709961, 0.017833984375, 0.01794819259643555, 0.01739321517944336, 0.01716521644592285, 0.01696767997741699, 0.016951295852661134, 0.017115135192871094, 0.017461055755615233, 0.017403999328613282, 0.01754307174682617]",tokens/s,56.32224788296946,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 58.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 128596 has 14.69 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 193.68 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,7184.900096,7954.366464,0.0,7551.844352,7485.12768,s,1,13.268470703125,13.268470703125,0.0,13.268470703125,13.268470703125,13.268470703125,13.268470703125,[13.268470703125],,kWh,0.0001682408088375079,1.8550930528815067e-05,5.178420809399953e-05,0.0002385759474603225,,MB,2830.180352,8243.77344,0.0,7826.571264,7735.225344,s,10,3.4457615661621093,0.34457615661621094,0.00034090135650515895,0.3445185089111328,0.34505919799804685,0.3451055877685547,0.34514269958496097,"[0.3446754150390625, 0.3451519775390625, 0.3443394470214844, 0.34504888916015625, 0.3445556945800781, 0.34475445556640627, 0.34443524169921874, 0.3439016418457031, 0.3444813232421875, 0.34441748046875]",tokens/s,742.941712839211,kWh,1.0088455262786643e-05,1.112564950890802e-06,6.710809966344523e-06,1.7911830180021968e-05,tokens/kWh,14292230.186814222,MB,2838.233088,8285.71648,0.0,7868.514304,7758.594048,s,10,25.666829589843747,2.5666829589843747,0.0028038430734798623,2.567400146484375,2.5698125732421873,2.5703017944335937,2.5706931713867185,"[2.562714599609375, 2.562442138671875, 2.570791015625, 2.568533935546875, 2.568759033203125, 2.56419873046875, 2.564885986328125, 2.569703857421875, 2.567998291015625, 2.566802001953125]",tokens/s,24.545298740335593,kWh,7.509903269971135e-05,8.283511464412427e-06,4.983937416265607e-05,0.00013322191832677984,tokens/kWh,472895.15712772874,,s,630,25.663301181793226,0.04073539870125907,0.0002970329025518393,0.04069223976135254,0.04114452590942382,0.041274570083618164,0.04156105407714844,"[0.04107657623291015, 0.04032972717285156, 0.040495296478271485, 0.04039203262329102, 0.040501918792724606, 0.04043929672241211, 0.04036198425292969, 0.040282623291015625, 0.040267295837402343, 0.040685279846191406, 0.040688385009765626, 0.04051276779174805, 0.04054876708984375, 0.0404667854309082, 0.04076339340209961, 0.04046847915649414, 0.04043062210083008, 0.040539104461669924, 0.04061183929443359, 0.04060774230957031, 0.04055350494384766, 0.04039779281616211, 0.040586849212646485, 0.040494590759277346, 0.040538623809814454, 0.04067776107788086, 0.040486942291259764, 0.04061183929443359, 0.04063436889648438, 0.04057212829589844, 0.04063907241821289, 0.04084550476074219, 0.04072652816772461, 0.040705440521240234, 0.040632320404052735, 0.04070051193237305, 0.04083852767944336, 0.04090739059448242, 0.04093948745727539, 0.040615966796875, 0.0405871696472168, 0.04055459213256836, 0.04076134490966797, 0.04084531021118164, 0.04073385620117188, 0.04072739028930664, 0.04065484619140625, 0.040548351287841795, 0.040760639190673825, 0.04092793655395508, 0.04077500915527344, 0.04069823837280274, 0.041021503448486325, 0.04071139144897461, 0.040779937744140626, 0.040754016876220704, 0.04081868743896484, 0.040814464569091796, 0.04075699234008789, 0.04079635238647461, 0.041337024688720705, 0.04125696182250976, 0.04119311904907227, 0.04117913436889648, 0.040529918670654294, 0.040392704010009765, 0.04060569763183594, 0.04039680099487305, 0.04059340667724609, 0.04049862289428711, 0.04065542221069336, 0.040714241027832034, 0.04041523361206055, 0.04037836837768555, 0.04048896026611328, 0.04052899169921875, 0.04052880096435547, 0.040896289825439455, 0.040954078674316406, 0.040716289520263675, 0.040503009796142575, 0.040425376892089845, 0.04038899230957031, 0.04025548934936524, 0.04069375991821289, 0.04060969543457031, 0.04047267150878906, 0.040777313232421876, 0.040753150939941404, 0.04056908798217773, 0.04080451202392578, 0.04072150421142578, 0.040618911743164066, 0.040755199432373046, 0.040937473297119144, 0.040623775482177736, 0.04066134262084961, 0.040665088653564455, 0.04053811264038086, 0.04077977752685547, 0.04103577423095703, 0.04115407943725586, 0.04072259140014649, 0.040492416381835934, 0.040555137634277344, 0.04058143997192383, 0.04062566375732422, 0.04057740783691406, 0.04057715225219727, 0.04057452774047852, 0.04059145736694336, 0.040851806640625, 0.04074291229248047, 0.040683521270751956, 0.040690719604492186, 0.04093356704711914, 0.04067583847045898, 0.04060393524169922, 0.04068457412719727, 0.04079411315917969, 0.04095801544189453, 0.04091340637207031, 0.0406544303894043, 0.04065977478027344, 0.040748191833496095, 0.04098748779296875, 0.04129769515991211, 0.04064483261108399, 0.04094579315185547, 0.04092617416381836, 0.04073283386230469, 0.040730495452880856, 0.0405425910949707, 0.04053023910522461, 0.040958145141601565, 0.04084326553344726, 0.040683521270751956, 0.0404804801940918, 0.040585407257080076, 0.04090252685546875, 0.040968414306640624, 0.040836864471435544, 0.04061004638671875, 0.0405252799987793, 0.04072092819213867, 0.040613887786865234, 0.04062822341918945, 0.04047872161865235, 0.040419166564941406, 0.040673023223876954, 0.04052624130249023, 0.040512641906738284, 0.04077452850341797, 0.04106572723388672, 0.040958721160888674, 0.040697856903076174, 0.04077328109741211, 0.040730976104736326, 0.041109504699707033, 0.04103916931152344, 0.04069036865234375, 0.040716289520263675, 0.04075078582763672, 0.04103942489624023, 0.04111833572387695, 0.040771713256835936, 0.040560352325439454, 0.0406080322265625, 0.040564735412597655, 0.040591358184814456, 0.040736766815185545, 0.04096409606933594, 0.04072848129272461, 0.04058531188964844, 0.04061520004272461, 0.04073497772216797, 0.040742977142333985, 0.04084368133544922, 0.04085504150390625, 0.04078847885131836, 0.040705249786376956, 0.0409477424621582, 0.04096435165405273, 0.04096051025390625, 0.040890369415283206, 0.04106444931030274, 0.041524959564208985, 0.04152348709106445, 0.04138393783569336, 0.0411715202331543, 0.04045795059204101, 0.04064694213867188, 0.040632320404052735, 0.04046575927734375, 0.040521793365478516, 0.040723041534423826, 0.04069491195678711, 0.040575870513916014, 0.040525310516357424, 0.04060416030883789, 0.04061183929443359, 0.04055449676513672, 0.04048076629638672, 0.04104937744140625, 0.04107132720947266, 0.04082828903198242, 0.04042127990722656, 0.04040777587890625, 0.04046233749389649, 0.0407259521484375, 0.040327392578125, 0.04054665756225586, 0.0407347183227539, 0.04055807876586914, 0.040485088348388674, 0.04060960006713867, 0.0405522575378418, 0.04060432052612305, 0.04080844879150391, 0.04081459045410156, 0.040783584594726564, 0.040984352111816405, 0.040694271087646484, 0.040648094177246095, 0.04052576065063476, 0.04070467376708985, 0.04110335922241211, 0.04116617584228516, 0.04093199920654297, 0.04058502578735351, 0.04066118240356445, 0.040576736450195314, 0.0406162223815918, 0.04072857666015625, 0.0405852165222168, 0.04052761459350586, 0.04061824035644531, 0.04064460754394531, 0.04072857666015625, 0.04068700790405273, 0.04078652954101562, 0.04089395141601562, 0.04088825607299805, 0.04089225769042969, 0.04084739303588867, 0.04110943984985352, 0.04126182556152344, 0.041275390625, 0.041061664581298826, 0.04127612686157227, 0.04181379318237305, 0.041885921478271484, 0.04116582489013672, 0.0405068473815918, 0.04060211181640625, 0.040443649291992186, 0.04080624008178711, 0.04056108856201172, 0.040680736541748044, 0.04038931274414063, 0.040316959381103516, 0.04058915328979492, 0.04044777679443359, 0.04058483123779297, 0.04048700714111328, 0.04094838333129883, 0.04111360168457031, 0.040949440002441405, 0.04069817733764648, 0.04074063873291016, 0.04048099136352539, 0.04029644775390625, 0.04046233749389649, 0.040357887268066404, 0.04055244827270508, 0.04041507339477539, 0.04047683334350586, 0.04041680145263672, 0.040481151580810545, 0.040441951751708984, 0.04067932891845703, 0.040714241027832034, 0.04118127822875976, 0.040613887786865234, 0.040546302795410154, 0.04056582260131836, 0.040549312591552734, 0.040609760284423826, 0.04063235092163086, 0.04067443084716797, 0.04091379165649414, 0.040904705047607424, 0.04090457534790039, 0.040971649169921874, 0.040783935546875, 0.04089926528930664, 0.04091817474365234, 0.04074975967407227, 0.04109904098510742, 0.04113391876220703, 0.04113462448120117, 0.041471168518066405, 0.04153631973266601, 0.041080768585205076, 0.04105017471313477, 0.04090675354003906, 0.04079411315917969, 0.04077772903442383, 0.04072854232788086, 0.04106607818603516, 0.04107228851318359, 0.04092598342895508, 0.04104553604125977, 0.041216480255126954, 0.04114636611938476, 0.04107171249389648, 0.040184062957763673, 0.04038870239257813, 0.040405502319335936, 0.040214462280273436, 0.04035395050048828, 0.04024934387207031, 0.04024115371704102, 0.0403037109375, 0.04026460647583008, 0.04036329650878906, 0.04045897674560547, 0.04052550506591797, 0.04061337661743164, 0.04081951904296875, 0.040591358184814456, 0.04043571090698242, 0.04050249481201172, 0.04037302398681641, 0.040323070526123043, 0.04024524688720703, 0.04036608123779297, 0.04065894317626953, 0.04043487930297852, 0.040347583770751955, 0.040426303863525394, 0.040734783172607425, 0.04076134490966797, 0.04055593490600586, 0.040624736785888675, 0.04053145599365234, 0.04046211242675781, 0.04050739288330078, 0.040573280334472654, 0.04082099151611328, 0.0407327995300293, 0.04068950271606445, 0.04070006561279297, 0.040685470581054685, 0.04071433639526367, 0.04074067306518555, 0.04075539016723633, 0.04063436889648438, 0.04050851058959961, 0.040747936248779294, 0.04081430435180664, 0.040849601745605466, 0.040839263916015625, 0.04085145568847656, 0.04113721466064453, 0.04115347290039063, 0.04115171051025391, 0.04151078414916992, 0.041513088226318356, 0.041194271087646485, 0.04109011077880859, 0.04099296188354492, 0.040966079711914065, 0.041161537170410156, 0.04138940811157227, 0.04115286254882813, 0.04118259048461914, 0.041212833404541016, 0.04109107208251953, 0.04033315277099609, 0.04034537506103516, 0.04032886505126953, 0.04036460876464844, 0.040505504608154295, 0.040473663330078125, 0.040385440826416014, 0.0402391357421875, 0.040441505432128905, 0.04051593780517578, 0.04043775939941406, 0.040443294525146486, 0.040575584411621096, 0.04077689743041992, 0.040559425354003906, 0.04047385787963867, 0.04035391998291016, 0.040294143676757814, 0.04035033416748047, 0.040392959594726566, 0.04046819305419922, 0.040495391845703124, 0.040457408905029295, 0.040561054229736326, 0.04044022369384766, 0.040512641906738284, 0.04086249542236328, 0.04071023941040039, 0.0405032958984375, 0.04048255920410156, 0.04057523345947266, 0.04062783813476562, 0.04078540802001953, 0.04063935852050781, 0.04073846435546875, 0.040790367126464847, 0.040846881866455076, 0.04062198257446289, 0.04062988662719726, 0.040756160736083985, 0.04085929489135742, 0.04079977416992187, 0.04059014511108398, 0.040622081756591794, 0.04048076629638672, 0.04075286483764649, 0.040710430145263675, 0.04065894317626953, 0.04099459075927735, 0.04127356719970703, 0.041153919219970705, 0.04153152084350586, 0.04149264144897461, 0.04156246566772461, 0.04129792022705078, 0.04104998397827148, 0.04104000091552734, 0.041299488067626955, 0.04110793685913086, 0.04085148620605469, 0.041012641906738284, 0.04118790435791016, 0.04112937545776367, 0.04075987243652344, 0.04084534454345703, 0.04056860733032226, 0.040452320098876955, 0.04035567855834961, 0.04041680145263672, 0.040554431915283205, 0.04040774536132812, 0.04056409454345703, 0.04066268920898437, 0.04049126434326172, 0.040460479736328124, 0.040628768920898437, 0.040670783996582034, 0.040567230224609375, 0.040490657806396484, 0.04039510345458985, 0.04048486328125, 0.040376190185546876, 0.04047065734863281, 0.04033059310913086, 0.0404159049987793, 0.04034560012817383, 0.040529918670654294, 0.040533409118652344, 0.04073654556274414, 0.040708286285400394, 0.040653438568115235, 0.0407347183227539, 0.04062822341918945, 0.04062582397460938, 0.04071417617797852, 0.040831390380859374, 0.04077363204956055, 0.04060569763183594, 0.04067532730102539, 0.04068966293334961, 0.04075872039794922, 0.04096672058105469, 0.04075439834594727, 0.040989471435546876, 0.040822784423828126, 0.040738815307617186, 0.040837120056152344, 0.04072963333129883, 0.04083196640014648, 0.04104191970825195, 0.04108492660522461, 0.04097964859008789, 0.040938304901123046, 0.0408985595703125, 0.04149862289428711, 0.041363327026367185, 0.041181312561035154, 0.04112588882446289, 0.04127107238769531, 0.04118524932861328, 0.04109689712524414, 0.041557598114013675, 0.041610206604003906, 0.041635841369628904, 0.041181182861328124, 0.04114432144165039, 0.040323070526123043, 0.04035356903076172, 0.04032944107055664, 0.04037171173095703, 0.040500736236572264, 0.04040192031860351, 0.04043276977539063, 0.04044275283813477, 0.04066006469726562, 0.04060457611083984, 0.040513534545898434, 0.04068684768676758, 0.040814815521240236, 0.04104246520996094, 0.040486270904541016, 0.040335712432861326, 0.04041961669921875, 0.0403394546508789, 0.040325119018554685, 0.04033331298828125, 0.040419296264648436, 0.04052348709106445, 0.040408607482910155, 0.04039350509643555, 0.040509441375732425, 0.04048896026611328, 0.04046201705932617, 0.040476417541503905, 0.04053449630737305, 0.04047267150878906, 0.040718143463134765, 0.04083657455444336, 0.040822784423828126, 0.040714977264404296, 0.04071014404296875, 0.040959999084472655, 0.04112793731689453, 0.0408078727722168, 0.040839744567871095, 0.04070809555053711, 0.04075724792480469, 0.04076332855224609, 0.040742591857910154, 0.04067571258544922, 0.0407973747253418, 0.04083795166015625, 0.041111553192138675, 0.04101324844360352, 0.04110131072998047, 0.04113398361206055, 0.04093142318725586, 0.041024608612060545, 0.040780128479003905, 0.04124115371704101, 0.04126825714111328, 0.041065441131591794, 0.0409989128112793, 0.041619457244873044, 0.041531265258789064, 0.041375934600830076, 0.04161049652099609, 0.04147884750366211, 0.04110326385498047, 0.0402529296875, 0.04035561752319336, 0.04022560119628906, 0.04014211273193359, 0.04030854415893555, 0.04046265411376953, 0.0404219856262207, 0.04045401763916016, 0.040532096862792966, 0.04071219253540039, 0.040484256744384765, 0.04076985549926758, 0.04088041687011719, 0.04099174499511719, 0.04078899383544922, 0.0405032958984375, 0.04040428924560547, 0.04043366241455078, 0.04031526565551758, 0.04029267120361328, 0.04042486572265625, 0.040358497619628904, 0.04043161773681641, 0.04052377700805664, 0.040443649291992186, 0.04044416046142578, 0.04039475250244141, 0.04038243103027344, 0.04053772735595703, 0.04057948684692383, 0.04058931350708008, 0.04056063842773437, 0.04055449676513672, 0.040744510650634766, 0.040626625061035156, 0.04081843185424805, 0.041140480041503905, 0.040941120147705075, 0.04068329620361328, 0.04074972915649414, 0.040767425537109374, 0.04082080078125, 0.04104806518554688, 0.04106156921386719, 0.041089439392089845, 0.041342464447021485, 0.041350048065185545, 0.04119091033935547, 0.04098099136352539, 0.04146995162963867, 0.041227390289306644, 0.04105920028686524, 0.04095795059204101, 0.041021408081054686, 0.04079190444946289, 0.04091104125976563, 0.04090060806274414, 0.041014366149902344, 0.0411016960144043, 0.04126774215698242, 0.04111360168457031, 0.0411932487487793]",tokens/s,24.548673435939428,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4975.235072,7338.917888,0.0,6943.670272,6539.1744,s,1,11.454900390625,11.454900390625,0.0,11.454900390625,11.454900390625,11.454900390625,11.454900390625,[11.454900390625],,kWh,0.00013009809213333862,1.4343599980473967e-05,4.150808876199341e-05,0.000185949780875806,,MB,5064.54016,7634.61632,0.0,7224.68864,6917.39904,s,10,1.997946273803711,0.1997946273803711,0.00045300925581402235,0.19987035369873046,0.20022368927001952,0.20027427444458007,0.20031474258422852,"[0.19856562805175781, 0.1997248992919922, 0.19992588806152345, 0.20032485961914062, 0.19982698059082032, 0.19996701049804688, 0.19991372680664063, 0.19982464599609376, 0.19966018676757813, 0.20021244812011718]",tokens/s,1281.3157358462124,kWh,5.8498406213332145e-06,6.45020581985509e-07,3.872714209280092e-06,1.0367575412598817e-05,tokens/kWh,24692369.219606094,MB,5068.857344,7636.713472,0.0,7226.785792,6917.4016,s,10,18.880694335937502,1.8880694335937502,0.010499768530445765,1.8859650268554686,1.8976787353515625,1.9055944946289063,1.9119271020507813,"[1.91351025390625, 1.895919677734375, 1.8854495849609374, 1.89361572265625, 1.88930224609375, 1.88648046875, 1.8767947998046874, 1.880902099609375, 1.876064697265625, 1.88265478515625]",tokens/s,33.36741693873294,kWh,5.5153869485750264e-05,6.083324457757696e-06,3.659748483351961e-05,9.783467877702756e-05,tokens/kWh,643943.4440581304,,s,630,18.878338212966945,0.0299656162110586,0.0006856709627015122,0.02980006408691406,0.030400528717041016,0.030849382019042967,0.03327409896850587,"[0.030861824035644532, 0.030451583862304687, 0.03032486343383789, 0.03016703987121582, 0.029781984329223632, 0.030013471603393554, 0.03058278465270996, 0.03886899185180664, 0.031029247283935548, 0.030678079605102538, 0.030370752334594728, 0.030089216232299806, 0.030066015243530274, 0.034293663024902346, 0.03078390312194824, 0.030098880767822266, 0.03010243225097656, 0.029872127532958984, 0.029851839065551757, 0.029764896392822267, 0.029610496520996094, 0.029790239334106447, 0.02987775993347168, 0.03008799934387207, 0.030015167236328126, 0.02981888008117676, 0.029759328842163087, 0.03001692771911621, 0.029969120025634767, 0.03016435241699219, 0.029794975280761717, 0.02984979248046875, 0.030721023559570314, 0.02986191940307617, 0.03232438278198242, 0.03017081642150879, 0.030112064361572266, 0.029941728591918945, 0.030117919921875, 0.03122208023071289, 0.030279359817504882, 0.03154051208496094, 0.030308448791503906, 0.029794944763183593, 0.02997657585144043, 0.02969379234313965, 0.029880479812622072, 0.033753089904785157, 0.02998271942138672, 0.029650367736816407, 0.029880895614624023, 0.029817951202392577, 0.02981340789794922, 0.029634880065917968, 0.029757343292236327, 0.029998464584350584, 0.029837055206298826, 0.02992630386352539, 0.029756511688232422, 0.02974604797363281, 0.029570335388183593, 0.029770656585693358, 0.029640544891357423, 0.03107459259033203, 0.030399520874023436, 0.03091760063171387, 0.02980988883972168, 0.029723392486572266, 0.029596704483032227, 0.02965116882324219, 0.029739999771118165, 0.03291091156005859, 0.03019139289855957, 0.029810239791870117, 0.029913631439208985, 0.03311040115356445, 0.02971238327026367, 0.029869279861450194, 0.02955913543701172, 0.029892416000366212, 0.02954876708984375, 0.029661600112915038, 0.029618175506591796, 0.02976950454711914, 0.029865663528442384, 0.029700639724731446, 0.029871103286743163, 0.02974208068847656, 0.029655040740966795, 0.029841407775878907, 0.030093311309814453, 0.02967884826660156, 0.029558847427368164, 0.03000595283508301, 0.030541215896606445, 0.029909280776977538, 0.02993548774719238, 0.030134719848632814, 0.030530912399291992, 0.030296735763549805, 0.02987615966796875, 0.029996448516845704, 0.029931968688964843, 0.02979862403869629, 0.030011199951171876, 0.029849599838256836, 0.029860031127929686, 0.029715679168701173, 0.029702943801879884, 0.033931262969970705, 0.02998886489868164, 0.030223487854003906, 0.029889408111572265, 0.029767679214477538, 0.02973695945739746, 0.029521024703979493, 0.029892608642578124, 0.029834112167358397, 0.029870080947875976, 0.029844863891601563, 0.030227071762084962, 0.030070783615112305, 0.029855743408203125, 0.029708032608032228, 0.030226015090942384, 0.03050294494628906, 0.031162271499633788, 0.030193920135498046, 0.03017545509338379, 0.030168703079223633, 0.029969184875488282, 0.029743104934692382, 0.02998476791381836, 0.029804447174072265, 0.029849695205688476, 0.029768735885620116, 0.029815776824951172, 0.0343633918762207, 0.03083417510986328, 0.02981235122680664, 0.02983526420593262, 0.02956540870666504, 0.029684127807617186, 0.029696128845214845, 0.02968947219848633, 0.030079231262207032, 0.029884416580200194, 0.029878271102905272, 0.029851648330688478, 0.02975334358215332, 0.029784063339233398, 0.029572511672973634, 0.029631071090698242, 0.02960588836669922, 0.02954444885253906, 0.029542400360107423, 0.029792255401611328, 0.029613344192504883, 0.03027836799621582, 0.03022233581542969, 0.02967897605895996, 0.029706880569458007, 0.029658271789550782, 0.029598175048828126, 0.029616256713867188, 0.029827327728271485, 0.029617887496948242, 0.029535808563232423, 0.029548799514770508, 0.02951603126525879, 0.02972694396972656, 0.03334096145629883, 0.02995350456237793, 0.029604768753051756, 0.030033184051513673, 0.029591808319091795, 0.02947539138793945, 0.029466495513916016, 0.0295644474029541, 0.029655008316040038, 0.029743520736694336, 0.029673696517944336, 0.029756799697875976, 0.02965328025817871, 0.029485408782958984, 0.02955264091491699, 0.029792255401611328, 0.029865407943725587, 0.029841983795166015, 0.03007619285583496, 0.02976963233947754, 0.029856607437133788, 0.02969094467163086, 0.02988729667663574, 0.029671552658081055, 0.029609983444213867, 0.02981888008117676, 0.031327903747558596, 0.030644575119018556, 0.034277023315429686, 0.030075231552124024, 0.02991923141479492, 0.029566976547241212, 0.02943791961669922, 0.029585439682006835, 0.029493247985839844, 0.029629919052124025, 0.029825504302978516, 0.029897056579589843, 0.029812448501586913, 0.029853599548339844, 0.02964080047607422, 0.029569055557250975, 0.029906143188476564, 0.03101568031311035, 0.029775871276855468, 0.0297574405670166, 0.029839359283447265, 0.029627391815185547, 0.03092572784423828, 0.03095350456237793, 0.03012118339538574, 0.029922143936157226, 0.030963903427124025, 0.030211904525756835, 0.029953088760375977, 0.029922239303588866, 0.030031360626220704, 0.029956672668457033, 0.0299085750579834, 0.02974959945678711, 0.029689855575561523, 0.02974515151977539, 0.03235430526733398, 0.030428192138671876, 0.029799392700195312, 0.030703296661376955, 0.029643072128295898, 0.02978803253173828, 0.02970035171508789, 0.02979414367675781, 0.029613920211791992, 0.02976582336425781, 0.029859872817993165, 0.029738975524902345, 0.029631872177124024, 0.029931264877319334, 0.029992000579833984, 0.030194879531860352, 0.029938304901123047, 0.02973695945739746, 0.029853696823120116, 0.030394367218017578, 0.02985318374633789, 0.029823007583618163, 0.02994223976135254, 0.029724672317504884, 0.029511648178100584, 0.029576288223266602, 0.029627328872680665, 0.02959974479675293, 0.029822240829467772, 0.02993020820617676, 0.029664800643920897, 0.02965305519104004, 0.029622400283813476, 0.029737247467041015, 0.029503488540649415, 0.029542400360107423, 0.029636480331420897, 0.029525856018066406, 0.029800735473632812, 0.029732223510742187, 0.03025391960144043, 0.029535135269165038, 0.029772672653198242, 0.02998886489868164, 0.03010083198547363, 0.03015747261047363, 0.031243392944335938, 0.03040959930419922, 0.030328832626342773, 0.029898752212524415, 0.03006163215637207, 0.02974611282348633, 0.030076927185058593, 0.030013439178466796, 0.029829120635986327, 0.029829120635986327, 0.030121503829956056, 0.029913536071777345, 0.029750656127929688, 0.029593759536743165, 0.029693887710571288, 0.0297825927734375, 0.029882368087768556, 0.030492671966552733, 0.030234016418457032, 0.030144672393798828, 0.030091712951660157, 0.02996214485168457, 0.029911136627197264, 0.0302336311340332, 0.030188127517700194, 0.02986969566345215, 0.02985241508483887, 0.030424224853515626, 0.030243679046630858, 0.029916799545288086, 0.02989299201965332, 0.030373695373535157, 0.030562496185302733, 0.03095369529724121, 0.030701568603515625, 0.03076483154296875, 0.03082035255432129, 0.030614912033081056, 0.030304672241210938, 0.03002934455871582, 0.029854368209838868, 0.029539648056030272, 0.02958515167236328, 0.029700639724731446, 0.02954662322998047, 0.02987654495239258, 0.029702144622802733, 0.02959987258911133, 0.029671295166015625, 0.0299683837890625, 0.02959929656982422, 0.029654527664184572, 0.0296964168548584, 0.031095327377319334, 0.030271263122558595, 0.031910112380981445, 0.030261247634887696, 0.030036127090454102, 0.029941247940063476, 0.030393888473510742, 0.03092134475708008, 0.030120128631591796, 0.030057567596435547, 0.03017184066772461, 0.029761760711669923, 0.030002208709716798, 0.029715423583984376, 0.02975948715209961, 0.029734432220458986, 0.02963484764099121, 0.029747360229492186, 0.030574623107910155, 0.029693952560424806, 0.029918943405151367, 0.029480960845947264, 0.0297161922454834, 0.029831743240356444, 0.029468896865844727, 0.030084543228149414, 0.029981311798095704, 0.029849023818969728, 0.029740928649902344, 0.02959401512145996, 0.02961577606201172, 0.029704160690307617, 0.029751487731933594, 0.0298272647857666, 0.029915136337280275, 0.02960383987426758, 0.029661216735839845, 0.02962633514404297, 0.02981216049194336, 0.029889087677001953, 0.030676448822021484, 0.03002627182006836, 0.029738304138183593, 0.02975174331665039, 0.02970035171508789, 0.02970992088317871, 0.03064028739929199, 0.03038719940185547, 0.02999135971069336, 0.029710655212402345, 0.02980784034729004, 0.029792224884033203, 0.029666208267211915, 0.02965648078918457, 0.029997440338134767, 0.029734752655029298, 0.029701791763305663, 0.029807231903076173, 0.029554176330566406, 0.029600351333618165, 0.02975334358215332, 0.029700096130371095, 0.02975276756286621, 0.029678144454956056, 0.029642751693725586, 0.029742271423339843, 0.029735744476318358, 0.030286880493164064, 0.0298505916595459, 0.029574560165405273, 0.029616767883300782, 0.03035887908935547, 0.029605791091918944, 0.029938016891479492, 0.029828575134277342, 0.029815647125244142, 0.029597759246826172, 0.029798559188842774, 0.02957910346984863, 0.029480991363525392, 0.029652927398681642, 0.029746912002563478, 0.029675519943237305, 0.0295053768157959, 0.029608160018920898, 0.02957855987548828, 0.02954745674133301, 0.029741056442260744, 0.030066688537597655, 0.02995315170288086, 0.02996940803527832, 0.02978803253173828, 0.029679616928100585, 0.0296092472076416, 0.029754079818725587, 0.029857791900634766, 0.02958131217956543, 0.02971772766113281, 0.029737695693969727, 0.030076608657836915, 0.030384511947631837, 0.030074880599975585, 0.02997657585144043, 0.029751424789428712, 0.029624191284179688, 0.02958892822265625, 0.029524511337280273, 0.029663263320922853, 0.029749216079711913, 0.030654464721679688, 0.03000934410095215, 0.029889888763427734, 0.029778079986572267, 0.02967398452758789, 0.029519840240478514, 0.029576223373413087, 0.029617151260375976, 0.029853696823120116, 0.029738208770751954, 0.029621023178100586, 0.02956819152832031, 0.02973369598388672, 0.029938976287841798, 0.029799135208129882, 0.029818336486816407, 0.030083616256713866, 0.02994175910949707, 0.03007855987548828, 0.030493087768554687, 0.029888511657714844, 0.029784063339233398, 0.03298099136352539, 0.029698047637939453, 0.029648895263671874, 0.029816831588745117, 0.029941791534423827, 0.029954015731811525, 0.029857215881347657, 0.029931615829467774, 0.029879840850830078, 0.029763872146606446, 0.02976425552368164, 0.029837312698364257, 0.029773759841918945, 0.02972438430786133, 0.02974086380004883, 0.02962486457824707, 0.029710336685180663, 0.029865983963012696, 0.02974742317199707, 0.029746976852416993, 0.029722623825073242, 0.029942943572998048, 0.02983203125, 0.029676799774169923, 0.02984832000732422, 0.029792224884033203, 0.029890592575073243, 0.030058496475219725, 0.029691328048706056, 0.029704896926879883, 0.02982080078125, 0.02979840087890625, 0.02980659294128418, 0.03000934410095215, 0.029621696472167967, 0.029739391326904296, 0.029536191940307616, 0.029554943084716796, 0.029486879348754883, 0.029442272186279296, 0.02963043212890625, 0.030341119766235353, 0.03016294479370117, 0.029848672866821288, 0.029780031204223633, 0.029522207260131834, 0.02957574462890625, 0.02959916877746582, 0.029643295288085937, 0.029574687957763673, 0.02968832015991211, 0.02971161651611328, 0.02953696060180664, 0.029515775680541992, 0.02965510368347168, 0.029624319076538085, 0.02954966354370117, 0.031160448074340822, 0.03062044715881348, 0.030048255920410157, 0.029834592819213867, 0.029903039932250977, 0.02966691207885742, 0.029655040740966795, 0.029526464462280272, 0.02956742477416992, 0.02964905548095703, 0.02981203269958496, 0.029766176223754885, 0.03027334403991699, 0.029617471694946287, 0.029678623199462892, 0.02988630485534668, 0.02966934394836426, 0.029538335800170897, 0.02970419120788574, 0.02961305618286133, 0.029640832901000978, 0.02957401657104492, 0.029467744827270506, 0.02979631996154785, 0.029722976684570312, 0.02969046401977539, 0.029695968627929687, 0.029855775833129882, 0.029998783111572266, 0.029475103378295897, 0.029655071258544923, 0.02959974479675293, 0.029599231719970705, 0.030714656829833983, 0.02981990432739258, 0.030044160842895507, 0.02970083236694336, 0.02979151916503906, 0.029798208236694337, 0.029705087661743165, 0.029632543563842772, 0.02976358413696289, 0.029740543365478517, 0.02964694404602051, 0.02965135955810547, 0.029724672317504884, 0.029788000106811523, 0.030671072006225587, 0.03011849594116211, 0.029857215881347657, 0.029796287536621092, 0.029710975646972657, 0.03000934410095215, 0.029698047637939453, 0.02979430389404297, 0.02978348731994629, 0.029637184143066406, 0.029691904067993165, 0.03036275291442871, 0.030026239395141603, 0.031056255340576173, 0.030431232452392577, 0.030060543060302734, 0.029851648330688478, 0.030015487670898438, 0.030284799575805665, 0.02995439910888672, 0.030118335723876954, 0.03005836868286133, 0.029866336822509765, 0.030104831695556642, 0.029897823333740234, 0.029734560012817383, 0.0296910400390625, 0.029644800186157227, 0.029594240188598634, 0.029616352081298827, 0.02940108871459961, 0.029642784118652343, 0.02956812858581543, 0.029436159133911132, 0.02969798469543457, 0.02962499237060547, 0.02938265609741211, 0.02945996856689453, 0.029624959945678712, 0.02989459228515625, 0.02980963134765625, 0.029989856719970703, 0.029902912139892577, 0.029699583053588868, 0.029589056015014648, 0.029803199768066405, 0.03133443260192871, 0.030503072738647462, 0.030381664276123047, 0.030456192016601564, 0.030111040115356445, 0.029788095474243163, 0.02958620834350586, 0.029655168533325196, 0.029529024124145507, 0.02984441566467285, 0.02974515151977539, 0.029675647735595702, 0.029603712081909178, 0.029603071212768554, 0.02955673599243164, 0.029688192367553712, 0.02970252799987793]",tokens/s,33.371581380360766,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,2783.858688,3346.92352,0.0,2944.401408,2910.225408,s,1,9.7664384765625,9.7664384765625,0.0,9.7664384765625,9.7664384765625,9.7664384765625,9.7664384765625,[9.7664384765625],,kWh,6.977685974585105e-05,7.689644632860384e-06,2.1199461403983988e-05,9.866596578269543e-05,,MB,1689.870336,3565.027328,0.0,3147.825152,3105.082368,s,12,1.0903720016479492,0.09086433347066243,0.00019881790316077566,0.09078158569335937,0.09111210250854491,0.0911842185974121,0.09125451950073242,"[0.0912720947265625, 0.09076310729980469, 0.09072191619873046, 0.09062111663818359, 0.09073980712890625, 0.09083596801757812, 0.09079987335205078, 0.09076329803466797, 0.09111231994628906, 0.09099104309082032, 0.09064131164550782, 0.09111014556884765]",tokens/s,2817.387089320974,kWh,2.6828901846330633e-06,2.9587165029178815e-07,1.770179805541323e-06,4.748941640466174e-06,tokens/kWh,53906747.941183396,MB,1694.048256,3565.027328,0.0,3147.825152,3105.084928,s,12,9.656188171386717,0.8046823476155597,0.0016080668596248065,0.8044126586914062,0.8070184814453125,0.8075011108398437,0.8078710717773437,"[0.802819580078125, 0.8052971801757812, 0.8030277099609375, 0.8025515747070312, 0.8071227416992187, 0.804286376953125, 0.8060801391601562, 0.804558837890625, 0.804079345703125, 0.8038621826171874, 0.8045389404296875, 0.8079635620117187]",tokens/s,78.29176343520153,kWh,2.3640258539323624e-05,2.6071222245772633e-06,1.5821246304625173e-05,4.206862706852607e-05,tokens/kWh,1497553.03155909,,s,756,9.65366825580598,0.0127694024547698,0.00015959258538485914,0.012747263908386229,0.012864495754241943,0.012920632123947144,0.013445756673812868,"[0.013230048179626464, 0.012831168174743652, 0.012797951698303223, 0.012795136451721192, 0.012776288032531738, 0.012721280097961425, 0.0127042236328125, 0.012690848350524902, 0.012643232345581054, 0.012666879653930664, 0.01268230438232422, 0.012635231971740723, 0.01267081642150879, 0.012770655632019044, 0.01287440013885498, 0.012693504333496093, 0.01265056037902832, 0.012631999969482421, 0.012630016326904296, 0.012623871803283691, 0.012627967834472656, 0.01267948818206787, 0.012664511680603027, 0.012668928146362305, 0.012635616302490235, 0.012623935699462891, 0.012640735626220704, 0.012638175964355469, 0.012605631828308105, 0.012627840042114258, 0.012636128425598145, 0.012734335899353027, 0.012746879577636719, 0.012789759635925293, 0.012815839767456054, 0.012704192161560058, 0.012721664428710937, 0.012736319541931152, 0.012761856079101562, 0.012738656044006348, 0.01271718406677246, 0.012757823944091798, 0.012736800193786622, 0.012725152015686036, 0.012741151809692383, 0.012710176467895507, 0.012705792427062988, 0.013788895606994629, 0.012810527801513672, 0.012670944213867188, 0.012625151634216309, 0.012698399543762206, 0.012701760292053223, 0.012729408264160157, 0.012714879989624023, 0.012679167747497559, 0.012691455841064453, 0.01269478416442871, 0.01267369556427002, 0.01273635196685791, 0.0128307523727417, 0.013138143539428711, 0.01281439971923828, 0.01336905574798584, 0.012895968437194824, 0.012827520370483398, 0.012768704414367676, 0.012707615852355957, 0.012696031570434571, 0.012658816337585449, 0.012682751655578613, 0.012656448364257812, 0.012662688255310058, 0.012646528244018554, 0.012683808326721191, 0.012722175598144531, 0.012750207901000976, 0.012718720436096191, 0.012715935707092285, 0.012667263984680176, 0.012648159980773926, 0.012636096000671386, 0.012662240028381348, 0.01265724754333496, 0.012652544021606446, 0.012679167747497559, 0.012660736083984376, 0.012697919845581055, 0.012664128303527832, 0.012630399703979492, 0.012670975685119629, 0.012703743934631348, 0.012732000350952148, 0.012718496322631835, 0.0127325439453125, 0.01269654369354248, 0.012695615768432617, 0.012675071716308594, 0.012772159576416016, 0.01279196834564209, 0.012748671531677245, 0.012777471542358398, 0.012778592109680176, 0.012782496452331543, 0.012789536476135253, 0.012728128433227539, 0.012725919723510741, 0.012725119590759277, 0.01273744010925293, 0.012749119758605957, 0.012736319541931152, 0.012708255767822266, 0.012743103981018066, 0.012752896308898925, 0.012961152076721192, 0.014684288024902344, 0.013163007736206055, 0.012886015892028809, 0.012805503845214844, 0.012736767768859862, 0.012759103775024414, 0.012781888008117677, 0.012779071807861328, 0.012758655548095703, 0.012795968055725098, 0.012874624252319337, 0.013162464141845702, 0.012763199806213378, 0.012724224090576173, 0.01265664005279541, 0.01264844799041748, 0.01265766429901123, 0.012655391693115234, 0.012656031608581543, 0.012683456420898437, 0.012669119834899902, 0.012648896217346191, 0.012659903526306152, 0.01267081642150879, 0.012686304092407227, 0.012703519821166992, 0.012656864166259765, 0.01267302417755127, 0.01271388816833496, 0.01273193645477295, 0.01274732780456543, 0.012709152221679688, 0.012769503593444824, 0.012728511810302734, 0.012798272132873535, 0.012726271629333496, 0.012719679832458496, 0.012700096130371093, 0.012703743934631348, 0.012678720474243165, 0.01269315242767334, 0.012701727867126466, 0.012695327758789062, 0.012710880279541016, 0.012696800231933593, 0.01274720001220703, 0.012677663803100587, 0.012783231735229493, 0.012789664268493652, 0.012760479927062989, 0.012753791809082031, 0.012761216163635253, 0.012801152229309082, 0.012761247634887696, 0.012719743728637696, 0.012714976310729981, 0.0130099515914917, 0.012801247596740723, 0.012748031616210938, 0.012738143920898438, 0.012740703582763671, 0.01276137638092041, 0.012800543785095216, 0.012830719947814942, 0.012861536026000977, 0.01275279998779297, 0.012744704246520995, 0.012783616065979005, 0.012852448463439942, 0.01292790412902832, 0.012762463569641114, 0.012736895561218261, 0.012734623908996582, 0.012763232231140136, 0.013064064025878907, 0.012677280426025391, 0.012643232345581054, 0.01267807960510254, 0.012691360473632812, 0.01261580753326416, 0.012586912155151368, 0.012601408004760742, 0.012605440139770508, 0.012603391647338867, 0.012617695808410644, 0.012673055648803711, 0.012715264320373535, 0.012786432266235352, 0.012630016326904296, 0.012619775772094726, 0.012625920295715331, 0.012679200172424316, 0.012699616432189942, 0.012717599868774415, 0.012703392028808594, 0.012671775817871093, 0.012683103561401367, 0.012709823608398438, 0.012695808410644532, 0.012668191909790039, 0.012698335647583008, 0.012717663764953613, 0.012732831954956055, 0.012777471542358398, 0.012791328430175782, 0.012722528457641602, 0.01272652816772461, 0.012705151557922362, 0.012708352088928223, 0.012734111785888671, 0.012709440231323242, 0.012981120109558106, 0.012729696273803711, 0.012728032112121581, 0.012740575790405273, 0.012790656089782715, 0.012828031539916991, 0.012749279975891113, 0.01276483154296875, 0.01284761619567871, 0.012836000442504884, 0.012872544288635254, 0.012828448295593261, 0.012832320213317872, 0.012810912132263184, 0.012767231941223145, 0.01274880027770996, 0.01275494384765625, 0.012749855995178222, 0.012754176139831543, 0.012803903579711914, 0.012777376174926757, 0.012770976066589355, 0.012828224182128907, 0.012798239707946777, 0.01278979206085205, 0.012783231735229493, 0.01321395206451416, 0.012904191970825195, 0.012986368179321289, 0.012843008041381837, 0.01272982406616211, 0.012664480209350586, 0.012626943588256835, 0.012617312431335449, 0.012639840126037598, 0.012632608413696289, 0.012617952346801757, 0.012633055686950683, 0.012630016326904296, 0.012641247749328613, 0.012735808372497558, 0.012705696105957032, 0.01265078353881836, 0.012621439933776856, 0.012630111694335937, 0.012620800018310547, 0.012623744010925293, 0.012705696105957032, 0.012662367820739746, 0.012652511596679688, 0.012659135818481446, 0.012707839965820313, 0.012758591651916503, 0.012832351684570312, 0.012796768188476563, 0.01277337646484375, 0.012831968307495117, 0.012849151611328125, 0.01287827205657959, 0.012834303855895996, 0.012819519996643066, 0.01283459186553955, 0.012937215805053711, 0.012886015892028809, 0.012854751586914062, 0.012864031791687013, 0.01287168025970459, 0.012887519836425781, 0.012849056243896484, 0.01284160041809082, 0.012807392120361328, 0.012794912338256836, 0.01291808032989502, 0.012878591537475587, 0.01285910415649414, 0.01290054416656494, 0.012987648010253906, 0.012917216300964356, 0.012901599884033204, 0.012886879920959472, 0.012961152076721192, 0.01288259220123291, 0.012906463623046874, 0.012940863609313964, 0.012988863945007324, 0.012979647636413575, 0.01291327953338623, 0.012935232162475586, 0.012969792366027832, 0.013517984390258789, 0.012916735649108887, 0.012891136169433593, 0.012779040336608887, 0.012863712310791016, 0.012779232025146484, 0.012766624450683594, 0.012711039543151855, 0.012680576324462891, 0.012708383560180664, 0.012756928443908692, 0.012761088371276855, 0.012695648193359376, 0.012681119918823243, 0.012650752067565918, 0.012688768386840821, 0.012716128349304199, 0.01269379234313965, 0.012712032318115234, 0.012698623657226562, 0.012731295585632324, 0.012814335823059082, 0.012693183898925781, 0.012689727783203125, 0.012696895599365234, 0.012737215995788575, 0.012681216239929198, 0.012662879943847656, 0.01268496036529541, 0.01271628761291504, 0.012634112358093261, 0.012656864166259765, 0.012717856407165527, 0.012726271629333496, 0.012727328300476074, 0.012757087707519531, 0.012743552207946778, 0.012736512184143066, 0.012738176345825195, 0.012767871856689454, 0.012744256019592285, 0.012736703872680664, 0.012805855751037598, 0.012839200019836426, 0.01284505558013916, 0.012805791854858398, 0.012804448127746582, 0.01277132797241211, 0.01277132797241211, 0.012787775993347168, 0.01278486442565918, 0.01278230381011963, 0.012781760215759278, 0.012783359527587891, 0.01282863998413086, 0.012756511688232422, 0.012759615898132324, 0.012766304016113282, 0.012785887718200684, 0.01278979206085205, 0.012769472122192383, 0.012806015968322754, 0.012808799743652344, 0.013049504280090333, 0.012687487602233887, 0.012667200088500977, 0.012646400451660156, 0.012595199584960937, 0.012644351959228516, 0.012736224174499511, 0.012738847732543945, 0.012708127975463867, 0.01267478370666504, 0.012666303634643555, 0.012646016120910644, 0.012882880210876465, 0.012737983703613282, 0.012742527961730957, 0.012690112113952637, 0.012652480125427246, 0.01261087989807129, 0.012640352249145509, 0.012648544311523437, 0.012644543647766113, 0.012634176254272461, 0.01266710376739502, 0.012644448280334473, 0.012666687965393067, 0.012714176177978516, 0.012697152137756348, 0.012675519943237304, 0.012677120208740235, 0.012666496276855468, 0.012773152351379394, 0.014262880325317383, 0.012775424003601075, 0.01274396800994873, 0.012769951820373535, 0.012761119842529298, 0.01278486442565918, 0.012763615608215331, 0.012755295753479005, 0.012763296127319336, 0.01286128044128418, 0.012759039878845215, 0.01283625602722168, 0.012812895774841309, 0.012820480346679687, 0.012878911972045898, 0.012837823867797851, 0.012793951988220215, 0.01284928035736084, 0.01286736011505127, 0.012833791732788086, 0.012844032287597656, 0.012859392166137695, 0.012862527847290039, 0.012878432273864746, 0.012816415786743165, 0.013789504051208497, 0.012832768440246582, 0.01288969612121582, 0.01272265625, 0.012719136238098144, 0.012748831748962403, 0.012853440284729003, 0.013359168052673339, 0.01282630443572998, 0.012721376419067383, 0.0127192964553833, 0.012670175552368164, 0.012851327896118163, 0.012814751625061035, 0.012640416145324707, 0.01264627170562744, 0.012625887870788574, 0.012640480041503907, 0.012647711753845214, 0.012671487808227539, 0.012620896339416503, 0.012677023887634278, 0.012694527626037597, 0.012707839965820313, 0.012711423873901367, 0.012671551704406737, 0.012649760246276856, 0.01269974422454834, 0.01267129611968994, 0.01272652816772461, 0.012748288154602052, 0.012759552001953126, 0.012691455841064453, 0.012670432090759277, 0.012669471740722657, 0.013422687530517579, 0.013407296180725097, 0.012659551620483399, 0.012759039878845215, 0.012721792221069336, 0.012760607719421387, 0.012747615814208984, 0.012740736007690429, 0.01269983959197998, 0.012686367988586425, 0.012728992462158204, 0.012739839553833008, 0.012712703704833985, 0.012744480133056641, 0.012759263992309571, 0.012810239791870117, 0.012772671699523926, 0.01272697639465332, 0.012732416152954102, 0.01273855972290039, 0.012719712257385253, 0.012766783714294433, 0.012749855995178222, 0.012756896018981934, 0.012833696365356445, 0.012835840225219726, 0.012853023529052734, 0.012814559936523438, 0.012808192253112792, 0.012725791931152343, 0.012757216453552246, 0.012835071563720704, 0.012847104072570802, 0.01288924789428711, 0.012895296096801758, 0.013473952293395996, 0.012807456016540528, 0.01270844841003418, 0.01266643238067627, 0.012661312103271484, 0.01266592025756836, 0.01285001564025879, 0.013721599578857421, 0.012658783912658692, 0.012618847846984863, 0.012560992240905762, 0.012570943832397461, 0.01258028793334961, 0.012591679573059083, 0.01259126377105713, 0.012635680198669433, 0.01269587230682373, 0.012717503547668457, 0.012777759552001952, 0.012712224006652832, 0.012677023887634278, 0.01265062427520752, 0.01271510410308838, 0.01270041561126709, 0.012712063789367676, 0.012705792427062988, 0.012683263778686523, 0.012677056312561035, 0.012701791763305664, 0.012675040245056153, 0.012650495529174804, 0.012673279762268067, 0.01269324779510498, 0.01268671989440918, 0.012753600120544434, 0.012758975982666015, 0.01275494384765625, 0.012742655754089355, 0.01274675178527832, 0.01275494384765625, 0.012779168128967285, 0.012759391784667968, 0.012726271629333496, 0.01274182415008545, 0.012868000030517577, 0.012773344039916992, 0.012732864379882813, 0.012801024436950683, 0.0128090238571167, 0.012777664184570313, 0.012804096221923827, 0.01278166389465332, 0.012785568237304687, 0.012770591735839843, 0.01297481632232666, 0.012812031745910644, 0.012783935546875, 0.012780896186828613, 0.01288047981262207, 0.012791328430175782, 0.012833151817321777, 0.012818528175354003, 0.012890111923217774, 0.013307392120361328, 0.012800512313842773, 0.012738080024719238, 0.012695072174072265, 0.01265721607208252, 0.012699551582336426, 0.012718432426452637, 0.012744671821594238, 0.012720352172851562, 0.01265395164489746, 0.012650112152099609, 0.012679455757141113, 0.012751520156860352, 0.012725503921508789, 0.01271065616607666, 0.012683263778686523, 0.012674400329589844, 0.012629792213439942, 0.012636832237243652, 0.012653823852539062, 0.012641247749328613, 0.01273804759979248, 0.012753408432006836, 0.012736639976501464, 0.012771200180053711, 0.012779168128967285, 0.012779871940612792, 0.01270809555053711, 0.012713664054870606, 0.012726335525512696, 0.01274067211151123, 0.012719552040100097, 0.012720640182495118, 0.012758272171020508, 0.01276198387145996, 0.012746623992919922, 0.012769280433654785, 0.012880191802978516, 0.012700703620910645, 0.012652799606323243, 0.012726688385009765, 0.01278889560699463, 0.012759903907775878, 0.012789440155029298, 0.012804287910461426, 0.01280832004547119, 0.012815839767456054, 0.01272815990447998, 0.012735296249389648, 0.01273804759979248, 0.01274300765991211, 0.012846336364746094, 0.012820480346679687, 0.012845343589782714, 0.01282096004486084, 0.012834624290466308, 0.012832991600036621, 0.012805919647216796, 0.012779744148254394, 0.012807488441467285, 0.012823167800903321, 0.012858752250671387, 0.012819135665893554, 0.01325391960144043, 0.012769023895263671, 0.012685376167297363, 0.012708576202392578, 0.012699263572692871, 0.012682911872863769, 0.0126527681350708, 0.012704256057739258, 0.012660032272338867, 0.01266755199432373, 0.012674719810485839, 0.012636544227600098, 0.012688511848449708, 0.012645312309265137, 0.012695487976074218, 0.012703743934631348, 0.012996607780456543, 0.012717344284057618, 0.012693535804748536, 0.012640192031860351, 0.012642239570617676, 0.012690239906311036, 0.012767231941223145, 0.012752415657043457, 0.012749247550964356, 0.01272544002532959, 0.01272659206390381, 0.012754752159118652, 0.012729151725769043, 0.012723456382751465, 0.012776096343994141, 0.012800000190734863, 0.012807231903076172, 0.012794431686401367, 0.012809599876403809, 0.012800383567810058, 0.012750559806823731, 0.012737152099609374, 0.012747039794921875, 0.01279974365234375, 0.012748703956604004, 0.01275062370300293, 0.012808768272399903, 0.01276518440246582, 0.012750207901000976, 0.012745280265808106, 0.012763199806213378, 0.012776991844177247, 0.012811967849731445, 0.012796704292297362, 0.012789119720458984, 0.012796544075012206, 0.012799488067626954, 0.012830495834350586, 0.012810720443725585, 0.012828927993774414, 0.01283801555633545, 0.012882816314697265, 0.012931072235107421, 0.012801119804382324, 0.012839839935302735, 0.012881695747375488, 0.012918208122253418, 0.013358688354492188, 0.012864959716796876, 0.01283801555633545, 0.012832480430603027, 0.012779647827148438, 0.01276518440246582, 0.01274880027770996, 0.01277132797241211, 0.012774944305419922, 0.01284553623199463, 0.01277337646484375, 0.012777183532714844, 0.012732576370239258, 0.012748767852783203, 0.012770751953125, 0.012823264122009277, 0.012826623916625977, 0.01281769561767578, 0.012849504470825196, 0.012890527725219727, 0.012847071647644042, 0.012800000190734863, 0.01271561622619629, 0.012698016166687011, 0.012750847816467285, 0.012769056320190429, 0.012847328186035156, 0.012824319839477539, 0.012841216087341309, 0.012822751998901366, 0.012807968139648438, 0.012789536476135253, 0.012715264320373535, 0.012823519706726074, 0.012759296417236329, 0.012762080192565918, 0.012763168334960938, 0.01279257583618164, 0.012820480346679687, 0.012803296089172363, 0.01275369644165039, 0.012721823692321778, 0.012781920433044434, 0.012769087791442871, 0.012785728454589845, 0.01283084774017334, 0.012838047981262207, 0.01284182357788086, 0.012789759635925293, 0.01282975959777832, 0.012827584266662598, 0.012775424003601075, 0.012741920471191406, 0.013800448417663574, 0.012818143844604493, 0.012784671783447266, 0.01280620765686035, 0.012893088340759277, 0.012819968223571777, 0.012783616065979005, 0.012769887924194336, 0.012768735885620117, 0.012859840393066407]",tokens/s,78.31220008470063,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11136.868352,12227.3792,0.0,11848.9088,11814.752256,s,1,16.2327216796875,16.2327216796875,0.0,16.2327216796875,16.2327216796875,16.2327216796875,16.2327216796875,[16.2327216796875],,kWh,0.00026652513839583964,2.939236722192133e-05,8.68925695139966e-05,0.00038281007513175756,,MB,2067.243008,14033.027072,0.0,13625.196544,13297.870848,s,10,22.9517958984375,2.29517958984375,0.0008820631523208208,2.2952384033203126,2.296043969726562,2.2965340698242187,2.296926149902344,"[2.294716552734375, 2.293971923828125, 2.294431884765625, 2.29522900390625, 2.295479248046875, 2.29407421875, 2.295247802734375, 2.29568603515625, 2.29593505859375, 2.297024169921875]",tokens/s,111.53811280511947,kWh,6.69229592179181e-05,7.379358171590439e-06,4.43492299238013e-05,0.00011865154731330984,tokens/kWh,2157578.2684401874,MB,2071.445504,14335.01696,0.0,13927.186432,13689.870848,s,10,1357.6977968750002,135.76977968750003,0.03905213328064623,135.753359375,135.8288109375,135.83175703125,135.83411390625,"[135.726765625, 135.7214375, 135.73778125, 135.75059375, 135.75296875, 135.75375, 135.787078125, 135.8045625, 135.82815625, 135.834703125]",tokens/s,0.4640207868423038,kWh,0.003959727415000008,0.0004367888125291182,0.0026341422462009987,0.007030658473730125,tokens/kWh,8960.753851918405,,s,630,1357.6917192382805,2.155066221013145,0.0009203916848711222,2.154985595703125,2.1563291015625,2.156659765625,2.1572001464843753,"[2.15377978515625, 2.15362353515625, 2.154102783203125, 2.152888427734375, 2.153879638671875, 2.153713623046875, 2.1530703125, 2.153484619140625, 2.15326904296875, 2.15319775390625, 2.154849609375, 2.15480126953125, 2.154350830078125, 2.153640380859375, 2.15429931640625, 2.153489501953125, 2.155323974609375, 2.1542197265625, 2.154045654296875, 2.15464697265625, 2.154449462890625, 2.15431298828125, 2.154271484375, 2.15437890625, 2.1539443359375, 2.15463818359375, 2.1542236328125, 2.154859619140625, 2.154393798828125, 2.15407470703125, 2.154638671875, 2.154382080078125, 2.154458251953125, 2.1549345703125, 2.15419140625, 2.154756103515625, 2.15472314453125, 2.154986572265625, 2.153968017578125, 2.154627685546875, 2.154684326171875, 2.154903564453125, 2.154151611328125, 2.154827880859375, 2.15438232421875, 2.153913330078125, 2.155095458984375, 2.154193603515625, 2.15475, 2.155017822265625, 2.1539208984375, 2.154256103515625, 2.1548037109375, 2.154446044921875, 2.155332275390625, 2.15518212890625, 2.15475537109375, 2.154677001953125, 2.15482568359375, 2.15515771484375, 2.154954833984375, 2.154704833984375, 2.1553427734375, 2.15356201171875, 2.153287109375, 2.15346142578125, 2.15304443359375, 2.15363134765625, 2.15324658203125, 2.153400390625, 2.1534482421875, 2.15419482421875, 2.153303955078125, 2.1533408203125, 2.154169921875, 2.15413916015625, 2.153862060546875, 2.153770751953125, 2.154093994140625, 2.1539375, 2.153967529296875, 2.154238037109375, 2.15394921875, 2.15406396484375, 2.154262451171875, 2.154094482421875, 2.15416015625, 2.153773193359375, 2.154221435546875, 2.154911865234375, 2.154006591796875, 2.154006591796875, 2.15403466796875, 2.15473388671875, 2.153947265625, 2.154082275390625, 2.1540576171875, 2.1540595703125, 2.153810302734375, 2.15459765625, 2.154572021484375, 2.1547216796875, 2.15456787109375, 2.154612548828125, 2.154059814453125, 2.154459228515625, 2.15434033203125, 2.154974853515625, 2.1545126953125, 2.15436083984375, 2.15488427734375, 2.15504296875, 2.154566162109375, 2.1549423828125, 2.15516357421875, 2.15512060546875, 2.154883056640625, 2.155005859375, 2.155716064453125, 2.154611328125, 2.155560302734375, 2.154902099609375, 2.155333251953125, 2.154781005859375, 2.15556298828125, 2.154715087890625, 2.154047607421875, 2.15368701171875, 2.15332666015625, 2.153343017578125, 2.154083740234375, 2.15368115234375, 2.15378759765625, 2.15322802734375, 2.15338232421875, 2.15393408203125, 2.1533740234375, 2.15382666015625, 2.1550546875, 2.15336181640625, 2.15402294921875, 2.153701416015625, 2.15465087890625, 2.1540625, 2.154532958984375, 2.154282958984375, 2.154406005859375, 2.154145751953125, 2.153818115234375, 2.15392041015625, 2.15444287109375, 2.153777099609375, 2.1542412109375, 2.154507080078125, 2.15466943359375, 2.154838134765625, 2.154627685546875, 2.15440966796875, 2.154647216796875, 2.15475244140625, 2.154928466796875, 2.154638427734375, 2.153850830078125, 2.15507666015625, 2.15435986328125, 2.155682861328125, 2.154911865234375, 2.155358154296875, 2.15447509765625, 2.15534765625, 2.154611083984375, 2.155585205078125, 2.1549453125, 2.15463720703125, 2.155443359375, 2.155148193359375, 2.155347412109375, 2.154822021484375, 2.154760009765625, 2.15597216796875, 2.155488037109375, 2.155116455078125, 2.1555029296875, 2.155637451171875, 2.155179931640625, 2.155326904296875, 2.15504736328125, 2.156221923828125, 2.155205322265625, 2.15432421875, 2.153321044921875, 2.15322216796875, 2.15401416015625, 2.154091064453125, 2.15307666015625, 2.154629150390625, 2.15393896484375, 2.15340771484375, 2.153814697265625, 2.153936279296875, 2.154150390625, 2.153486572265625, 2.154694091796875, 2.15483154296875, 2.154114013671875, 2.153946044921875, 2.1546708984375, 2.15391015625, 2.1548994140625, 2.154569091796875, 2.154937255859375, 2.154496826171875, 2.154492919921875, 2.154501708984375, 2.15464599609375, 2.154235107421875, 2.154486572265625, 2.15464697265625, 2.154468017578125, 2.155227294921875, 2.154882568359375, 2.154663330078125, 2.153976806640625, 2.15427001953125, 2.1545458984375, 2.154760009765625, 2.155342041015625, 2.1553828125, 2.154933837890625, 2.154500732421875, 2.15493212890625, 2.15534375, 2.1550771484375, 2.155051513671875, 2.155622314453125, 2.155462646484375, 2.155150390625, 2.15542578125, 2.15520458984375, 2.15525, 2.15550732421875, 2.15552490234375, 2.1555712890625, 2.1557412109375, 2.155888671875, 2.15494384765625, 2.155469482421875, 2.1559990234375, 2.156082763671875, 2.1556552734375, 2.156632568359375, 2.156023681640625, 2.15436474609375, 2.153816650390625, 2.154111083984375, 2.15410400390625, 2.154985595703125, 2.15435107421875, 2.153399658203125, 2.154334228515625, 2.153712158203125, 2.1545986328125, 2.15471875, 2.1541240234375, 2.154567626953125, 2.1542216796875, 2.15448486328125, 2.1534501953125, 2.1555380859375, 2.154695068359375, 2.1541083984375, 2.154921875, 2.154289306640625, 2.154621337890625, 2.15501416015625, 2.1543505859375, 2.154255859375, 2.15504052734375, 2.154267578125, 2.154643310546875, 2.154623291015625, 2.1547373046875, 2.1549189453125, 2.15486279296875, 2.154537109375, 2.15472119140625, 2.15438818359375, 2.154477783203125, 2.155399169921875, 2.15554248046875, 2.155152587890625, 2.155150146484375, 2.155085693359375, 2.154963134765625, 2.154284423828125, 2.1551826171875, 2.15504296875, 2.1550439453125, 2.15484326171875, 2.15489306640625, 2.155017822265625, 2.155342529296875, 2.15538623046875, 2.15503662109375, 2.154445068359375, 2.15603759765625, 2.15510107421875, 2.156349365234375, 2.15521826171875, 2.155336181640625, 2.15549951171875, 2.1556796875, 2.155181396484375, 2.156218505859375, 2.155602783203125, 2.154482177734375, 2.153758544921875, 2.15450244140625, 2.15416357421875, 2.153860107421875, 2.153955078125, 2.154624267578125, 2.15406884765625, 2.1538037109375, 2.15387744140625, 2.154406005859375, 2.154027099609375, 2.154689697265625, 2.15476708984375, 2.15424169921875, 2.15487939453125, 2.154475341796875, 2.1543466796875, 2.15468994140625, 2.154701416015625, 2.15486669921875, 2.1554482421875, 2.15520458984375, 2.15374853515625, 2.153969482421875, 2.154931640625, 2.154985595703125, 2.15489794921875, 2.154056884765625, 2.15442724609375, 2.15461181640625, 2.15497216796875, 2.15499462890625, 2.154851318359375, 2.154536376953125, 2.154569580078125, 2.1551787109375, 2.15495703125, 2.154661376953125, 2.154598876953125, 2.1551669921875, 2.154668701171875, 2.155399169921875, 2.15532958984375, 2.15520263671875, 2.15521484375, 2.15503466796875, 2.1550322265625, 2.154961181640625, 2.1554052734375, 2.154875, 2.1551328125, 2.1555322265625, 2.15577392578125, 2.1547763671875, 2.155614013671875, 2.1548134765625, 2.15576953125, 2.1561640625, 2.155378662109375, 2.155300048828125, 2.15560888671875, 2.156122314453125, 2.15542138671875, 2.15410693359375, 2.15370751953125, 2.15404345703125, 2.15413134765625, 2.15398388671875, 2.154730712890625, 2.154179443359375, 2.1545166015625, 2.15442236328125, 2.153889404296875, 2.1549775390625, 2.154921875, 2.15444189453125, 2.15519091796875, 2.154968505859375, 2.155385498046875, 2.154739013671875, 2.1551982421875, 2.15461572265625, 2.154502197265625, 2.155098388671875, 2.15466259765625, 2.1547666015625, 2.15472607421875, 2.1551494140625, 2.154901611328125, 2.15481689453125, 2.155205322265625, 2.15503466796875, 2.156209228515625, 2.155299560546875, 2.1552314453125, 2.155640869140625, 2.155310791015625, 2.1551865234375, 2.155093994140625, 2.155556884765625, 2.155173828125, 2.1561201171875, 2.1563681640625, 2.15586328125, 2.15506396484375, 2.155530029296875, 2.155470703125, 2.15595166015625, 2.156271484375, 2.156151123046875, 2.155916748046875, 2.155580322265625, 2.15549951171875, 2.156786865234375, 2.156016357421875, 2.15598291015625, 2.1558330078125, 2.15690283203125, 2.156203125, 2.1568740234375, 2.156804443359375, 2.156328857421875, 2.15601171875, 2.15711328125, 2.156693603515625, 2.155583251953125, 2.154637451171875, 2.154427734375, 2.15435888671875, 2.15477880859375, 2.154145263671875, 2.15453759765625, 2.1538388671875, 2.154702880859375, 2.154565185546875, 2.154799560546875, 2.155702392578125, 2.155001708984375, 2.15463134765625, 2.155059326171875, 2.15557080078125, 2.155644287109375, 2.155435791015625, 2.155279296875, 2.155478271484375, 2.155039306640625, 2.1556474609375, 2.155759521484375, 2.1556796875, 2.155052978515625, 2.155063232421875, 2.156125244140625, 2.155266845703125, 2.15598583984375, 2.15598583984375, 2.155769775390625, 2.155423828125, 2.156295166015625, 2.155389892578125, 2.154956787109375, 2.1549384765625, 2.155702392578125, 2.156359130859375, 2.15553466796875, 2.156149169921875, 2.15556884765625, 2.15610107421875, 2.156392822265625, 2.15614208984375, 2.155922119140625, 2.155926513671875, 2.15628515625, 2.1563779296875, 2.156031982421875, 2.156570556640625, 2.155749267578125, 2.156455810546875, 2.156539306640625, 2.156162109375, 2.156419921875, 2.156111083984375, 2.1556201171875, 2.157068115234375, 2.15618359375, 2.15617529296875, 2.156353515625, 2.156662841796875, 2.156812255859375, 2.15483154296875, 2.154446533203125, 2.1550927734375, 2.155013427734375, 2.155321044921875, 2.153851806640625, 2.15472314453125, 2.154208740234375, 2.155258544921875, 2.15474609375, 2.155627685546875, 2.1557392578125, 2.1559658203125, 2.155125244140625, 2.15587890625, 2.15556103515625, 2.155802734375, 2.156337158203125, 2.155599853515625, 2.155122802734375, 2.154838134765625, 2.15573291015625, 2.155853271484375, 2.15604833984375, 2.155375244140625, 2.155175048828125, 2.156044677734375, 2.155989501953125, 2.156078857421875, 2.156032470703125, 2.156312255859375, 2.1562412109375, 2.155767333984375, 2.15622900390625, 2.155845703125, 2.156160888671875, 2.155933349609375, 2.156331298828125, 2.156830810546875, 2.155802734375, 2.156553466796875, 2.15624560546875, 2.156656005859375, 2.156861083984375, 2.155530029296875, 2.155895751953125, 2.156756591796875, 2.15710546875, 2.156838623046875, 2.15633251953125, 2.156612060546875, 2.156882080078125, 2.156545166015625, 2.15689306640625, 2.1569375, 2.156795654296875, 2.156451904296875, 2.157452392578125, 2.15695458984375, 2.15725048828125, 2.15652978515625, 2.15766015625, 2.1569189453125, 2.15613720703125, 2.154751953125, 2.15535205078125, 2.155133056640625, 2.155610107421875, 2.15499560546875, 2.155558837890625, 2.155183837890625, 2.15576611328125, 2.155431884765625, 2.15549072265625, 2.155479736328125, 2.15575341796875, 2.15545654296875, 2.156015380859375, 2.15538916015625, 2.156316650390625, 2.156632080078125, 2.156115966796875, 2.15632275390625, 2.1554462890625, 2.156004638671875, 2.156026611328125, 2.156030029296875, 2.156322998046875, 2.155793212890625, 2.15531201171875, 2.15627783203125, 2.1556279296875, 2.15658349609375, 2.156557373046875, 2.15524658203125, 2.156424560546875, 2.156294921875, 2.155815185546875, 2.15559130859375, 2.15583447265625, 2.156342041015625, 2.156015625, 2.156478271484375, 2.156675048828125, 2.1556572265625, 2.156151123046875, 2.156275634765625, 2.156451904296875, 2.1573857421875, 2.156472412109375, 2.155664794921875, 2.156966552734375, 2.156527587890625, 2.156632080078125, 2.156265625, 2.1570087890625, 2.15602685546875, 2.156317138671875, 2.157116943359375, 2.1564873046875, 2.15671337890625, 2.156304931640625, 2.157250732421875, 2.15592333984375, 2.157657470703125, 2.157234130859375]",tokens/s,0.4640228640073425,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3563.528192,4495.179776,0.0,4116.709376,3980.386816,s,1,10.4573623046875,10.4573623046875,0.0,10.4573623046875,10.4573623046875,10.4573623046875,10.4573623046875,[10.4573623046875],,kWh,9.463678974167351e-05,1.0428446453185679e-05,2.954335696799676e-05,0.00013460859316285595,,MB,3465.621504,4826.529792,0.0,4418.699264,4245.764608,s,10,6.514956481933594,0.6514956481933594,0.0008382393185001823,0.6516083984375001,0.6524576538085937,0.6526914428710937,0.6528784741210938,"[0.6507066650390625, 0.6516373901367187, 0.6515794067382813, 0.6514669799804688, 0.6501195068359376, 0.6517879638671875, 0.6524057006835937, 0.6503561401367187, 0.6529252319335938, 0.6519714965820312]",tokens/s,392.94199540688413,kWh,1.9035242236718612e-05,2.0992695936843544e-06,1.2672371248999888e-05,3.380688307940286e-05,tokens/kWh,7572422.438316127,MB,3469.758464,4837.015552,0.0,4429.185024,4245.767168,s,10,384.47273437499996,38.447273437499994,0.025700547755004608,38.459927734375,38.465746484375,38.4658712890625,38.4659711328125,"[38.38426171875, 38.41635546875, 38.44120703125, 38.4502265625, 38.4564609375, 38.46393359375, 38.46571875, 38.46339453125, 38.46599609375, 38.4651796875]",tokens/s,1.638607744250395,kWh,0.0011221179002866139,0.00012377796577120082,0.0007458437355633994,0.001991739601621214,tokens/kWh,31630.640847187035,,s,630,384.46834374999986,0.6102672123015873,0.0005672002210965215,0.6103543701171874,0.6108875366210937,0.6110109985351563,0.6112549932861329,"[0.6090960083007813, 0.6084502563476563, 0.608760498046875, 0.6085468139648438, 0.6086820068359375, 0.608964599609375, 0.6090198364257813, 0.6083390502929688, 0.60896337890625, 0.6084527587890625, 0.6092669677734375, 0.6092006225585938, 0.6084193115234375, 0.6095037231445313, 0.6087141723632813, 0.6089382934570312, 0.6088442993164063, 0.6087393188476562, 0.6091402587890625, 0.6090695190429688, 0.608712646484375, 0.6101893310546875, 0.6080819091796875, 0.6089801025390625, 0.6094119873046875, 0.608943603515625, 0.610287109375, 0.6082645874023438, 0.6091060180664063, 0.6097963256835938, 0.6085675659179688, 0.6099081420898438, 0.6093010864257813, 0.608392578125, 0.6096492309570313, 0.6088048706054687, 0.610361328125, 0.6084198608398438, 0.6096354370117187, 0.6103089599609375, 0.6092575073242188, 0.6096132202148438, 0.6098903198242187, 0.6089487915039062, 0.6100578002929687, 0.60937255859375, 0.6100594482421875, 0.6092112426757812, 0.6090424194335937, 0.6103367919921875, 0.6088253173828125, 0.6102559814453125, 0.6092584228515625, 0.609912841796875, 0.6095703735351562, 0.6097042846679688, 0.6093067626953125, 0.60988330078125, 0.6095225219726562, 0.6101954345703124, 0.6098411865234376, 0.6093475341796875, 0.6101852416992187, 0.6092809448242188, 0.6101094360351562, 0.6082826538085937, 0.6092533569335937, 0.6097572021484375, 0.6091590576171875, 0.6098058471679687, 0.609083740234375, 0.610011474609375, 0.609449462890625, 0.609554931640625, 0.6091915283203125, 0.6089580688476562, 0.6100978393554688, 0.60969384765625, 0.60918115234375, 0.6101787109375, 0.6088201293945312, 0.6105702514648438, 0.6092473754882812, 0.6092838134765625, 0.6100452270507812, 0.6097061767578125, 0.6102218627929688, 0.6087933959960937, 0.6097158813476562, 0.6104061889648438, 0.6090422973632813, 0.6100711059570313, 0.6098965454101563, 0.6100003051757813, 0.6095570678710938, 0.6095827026367188, 0.610037353515625, 0.60992333984375, 0.6097310791015625, 0.61005419921875, 0.6099200439453125, 0.6091724243164063, 0.6103319091796875, 0.6097229614257812, 0.609804443359375, 0.6102564697265624, 0.609752685546875, 0.6098746948242187, 0.6103369750976563, 0.6098450927734375, 0.6097754516601562, 0.6103055419921875, 0.6095953979492188, 0.6103026123046875, 0.6103894653320312, 0.6097640991210938, 0.6101337890625, 0.6101913452148438, 0.6103736572265624, 0.6095872192382813, 0.6099100952148437, 0.6101613159179687, 0.610305908203125, 0.6104202270507812, 0.6102186279296875, 0.6095318603515625, 0.6104039916992188, 0.6099992065429688, 0.6098433227539063, 0.6099674682617188, 0.6096532592773437, 0.6100801391601562, 0.6097854614257813, 0.6095120239257813, 0.6103056640625, 0.6099771728515625, 0.609578369140625, 0.6103779907226563, 0.6103719482421875, 0.6095155029296875, 0.6104514770507813, 0.6100208740234375, 0.6097679443359375, 0.6097694702148437, 0.6106480712890625, 0.6100399169921875, 0.6097529296875, 0.610355224609375, 0.609977783203125, 0.6100938720703125, 0.610328369140625, 0.6099795532226563, 0.6102411499023438, 0.609832763671875, 0.6103208618164062, 0.610081787109375, 0.6100912475585938, 0.6101895751953125, 0.610353515625, 0.6101071166992188, 0.6107816772460938, 0.6100471801757813, 0.6102208862304688, 0.6105856323242187, 0.6097660522460937, 0.6105784301757813, 0.6104160766601563, 0.6101141967773438, 0.6105151977539063, 0.60994970703125, 0.6100787353515625, 0.6105149536132812, 0.61008056640625, 0.6100582275390625, 0.6106473999023437, 0.6098645629882813, 0.6100541381835938, 0.6104146118164062, 0.6102568969726563, 0.610663818359375, 0.610531982421875, 0.6101392822265626, 0.6105437622070312, 0.6101736450195312, 0.6103326416015625, 0.61028759765625, 0.610763916015625, 0.6102324829101563, 0.6103866577148438, 0.6103729248046875, 0.6099683227539062, 0.6100995483398437, 0.610337890625, 0.6098373413085938, 0.6101339111328125, 0.61000927734375, 0.6096589965820313, 0.6109815063476562, 0.6097310180664063, 0.61061767578125, 0.610209716796875, 0.6096117553710938, 0.6103079223632812, 0.6105637817382813, 0.610521484375, 0.6099417114257812, 0.6102159423828125, 0.610492431640625, 0.6102261962890625, 0.610174072265625, 0.6103909301757813, 0.6102794189453125, 0.609967529296875, 0.6100049438476562, 0.6103330688476563, 0.6102509765625, 0.6103941040039063, 0.6103749389648437, 0.60979638671875, 0.6105010375976563, 0.610197509765625, 0.6100459594726563, 0.6109686889648438, 0.6098619384765624, 0.6103475952148437, 0.6103693237304687, 0.6103880615234375, 0.6099354858398438, 0.6102138671875, 0.6106185913085938, 0.6098746948242187, 0.6102774047851562, 0.6105184326171875, 0.6099586181640625, 0.6109733276367187, 0.6100340576171875, 0.6104267578125, 0.61057421875, 0.6102254638671875, 0.6105209350585937, 0.6104053955078125, 0.61082421875, 0.610802734375, 0.6102413940429687, 0.6108724975585937, 0.6108927001953125, 0.6102405395507813, 0.6106473999023437, 0.6105316772460937, 0.6103616943359375, 0.6100801391601562, 0.6112569580078125, 0.6104248046875, 0.6101829833984375, 0.6103634643554687, 0.6095133666992187, 0.6109862060546875, 0.610718994140625, 0.6098419189453125, 0.6104116821289063, 0.6095, 0.6103265380859375, 0.6106603393554687, 0.6099819946289062, 0.6097412719726563, 0.6111395874023438, 0.6095827026367188, 0.6108819580078125, 0.6099450073242187, 0.6100506591796875, 0.6106132202148438, 0.6102151489257812, 0.6099361572265625, 0.6103777465820313, 0.6101986083984375, 0.6105732421875, 0.61054931640625, 0.610621826171875, 0.6099002075195312, 0.61038818359375, 0.6103051147460937, 0.6107286376953125, 0.610433349609375, 0.6107156372070313, 0.6105252075195312, 0.6105189819335938, 0.61015576171875, 0.6110543823242187, 0.610620849609375, 0.6102533569335937, 0.6102498168945313, 0.6105712280273438, 0.6102937622070312, 0.6101273193359374, 0.610498291015625, 0.6101920166015625, 0.6104590454101563, 0.610794189453125, 0.6101744384765625, 0.6108883056640625, 0.610123779296875, 0.6103654174804688, 0.6105947875976563, 0.6106603393554687, 0.6108671264648438, 0.6105887451171875, 0.6101355590820312, 0.610887451171875, 0.610244873046875, 0.6103985595703125, 0.6108070068359375, 0.6105148315429687, 0.6110955200195313, 0.6108405151367188, 0.6107074584960938, 0.6110088500976563, 0.6105499267578125, 0.6099988403320312, 0.6107484130859375, 0.61009716796875, 0.6108262329101563, 0.6096029052734375, 0.610570556640625, 0.610397705078125, 0.609651611328125, 0.6106480712890625, 0.61031005859375, 0.6104925537109375, 0.6104390869140625, 0.6105394897460937, 0.6101621704101563, 0.6107075805664063, 0.6103699340820312, 0.6103582763671875, 0.6105354614257813, 0.610065185546875, 0.610385986328125, 0.6104017333984375, 0.6105086669921875, 0.6109661865234375, 0.610418701171875, 0.6102097778320312, 0.6107197265625, 0.6104658203125, 0.6108712768554687, 0.610318359375, 0.6100889892578125, 0.6107152709960938, 0.6109371948242187, 0.6102835083007813, 0.6110003051757813, 0.6104874267578125, 0.6102968139648437, 0.61108154296875, 0.6101611938476562, 0.6107177124023437, 0.6106275634765626, 0.6112010498046875, 0.6103121337890625, 0.6109384765625, 0.6103938598632812, 0.6107551879882812, 0.610673828125, 0.61046630859375, 0.610532958984375, 0.6106406860351562, 0.6106869506835938, 0.6105858154296875, 0.6110704956054688, 0.610211669921875, 0.610906005859375, 0.6102861328125, 0.6105409545898437, 0.6107675170898438, 0.61068603515625, 0.6106018676757813, 0.6112501831054687, 0.6102958374023437, 0.6106951904296875, 0.6100008544921875, 0.6104107666015625, 0.610318115234375, 0.6100132446289063, 0.6111651611328125, 0.6099130249023438, 0.610789794921875, 0.6106354370117187, 0.61014697265625, 0.6103236694335937, 0.610946044921875, 0.6106171264648438, 0.610324462890625, 0.6105066528320312, 0.6101213989257812, 0.6107918701171875, 0.6104920043945312, 0.61086083984375, 0.6102144775390625, 0.6106419067382812, 0.6106988525390625, 0.610336181640625, 0.6100687255859375, 0.6110331420898437, 0.6101918334960937, 0.6103512573242188, 0.6108096923828125, 0.6102547607421875, 0.6108285522460938, 0.6110157470703125, 0.6107221069335937, 0.6106929931640624, 0.6105497436523437, 0.610037841796875, 0.61115380859375, 0.6100897216796874, 0.6103983154296875, 0.610609130859375, 0.6102425537109375, 0.6104266967773437, 0.6110127563476563, 0.6101115112304687, 0.6108079833984374, 0.6104503784179688, 0.6108599243164062, 0.610407958984375, 0.6111585083007812, 0.6106439819335937, 0.6110303955078125, 0.610271240234375, 0.6106846313476563, 0.6108927001953125, 0.6101299438476563, 0.6107689208984375, 0.6109710693359375, 0.6105823364257813, 0.61077783203125, 0.6106378173828125, 0.6107095336914062, 0.610710693359375, 0.6108168334960937, 0.6104063720703125, 0.6110336303710937, 0.610076171875, 0.6098011474609375, 0.6101842651367188, 0.6108245239257812, 0.6096748657226563, 0.610658935546875, 0.6100380859375, 0.6109224853515625, 0.6098063354492187, 0.61053076171875, 0.610688720703125, 0.6106028442382813, 0.610356201171875, 0.6102913818359375, 0.6101486206054687, 0.6109524536132812, 0.610075439453125, 0.610819580078125, 0.6102649536132813, 0.6107093505859374, 0.6104130859375, 0.6104392700195312, 0.61022021484375, 0.6107393798828125, 0.6105497436523437, 0.6100529174804687, 0.6108324584960938, 0.61034228515625, 0.61023291015625, 0.6107208251953125, 0.6103804931640625, 0.6105541381835937, 0.61051904296875, 0.6109224853515625, 0.6100930786132812, 0.610405517578125, 0.6113425903320312, 0.61042724609375, 0.6109452514648438, 0.6101565551757813, 0.6107545776367187, 0.6108323974609375, 0.610150390625, 0.6103222045898438, 0.6109002075195312, 0.610995849609375, 0.6104231567382813, 0.6113660278320312, 0.6103663330078125, 0.6107053833007813, 0.6102097778320312, 0.610680419921875, 0.6104946899414062, 0.6103143920898437, 0.6110802001953125, 0.6103775024414062, 0.6107463989257812, 0.6105205078125, 0.6111751708984375, 0.6102006225585938, 0.61135498046875, 0.6102411499023438, 0.6107234497070313, 0.6103710327148437, 0.61014111328125, 0.6103770141601562, 0.6103634033203125, 0.61069384765625, 0.6104965209960938, 0.6104432373046875, 0.61039208984375, 0.61032373046875, 0.6102998657226563, 0.6108250122070312, 0.610334716796875, 0.6104146118164062, 0.6105001831054687, 0.6102429809570312, 0.6105613403320312, 0.6103067626953125, 0.6107095336914062, 0.6102866821289062, 0.6101697387695313, 0.6107027587890625, 0.61000927734375, 0.6103187866210937, 0.610703369140625, 0.610620849609375, 0.6109596557617187, 0.6103853149414062, 0.6109967651367187, 0.6100850830078125, 0.6110531616210938, 0.6100833740234375, 0.6104219360351563, 0.6108773803710937, 0.6101287231445313, 0.6106746826171875, 0.6106624145507813, 0.610909912109375, 0.6104019775390624, 0.61080029296875, 0.6104452514648437, 0.6104496459960937, 0.6108167724609375, 0.6103964233398438, 0.6106337890625, 0.6113442993164062, 0.6102157592773437, 0.610970458984375, 0.6105879516601562, 0.6110172729492187, 0.6101951293945312, 0.6109410400390625, 0.6104273681640625, 0.6108549194335937, 0.6107973022460937, 0.610593017578125, 0.6105426025390625, 0.6108866577148437, 0.6106618041992188, 0.610755126953125, 0.6107421875, 0.6102978515625, 0.6112400512695313, 0.6103338623046874, 0.6110248413085938, 0.6092952880859375, 0.6105497436523437, 0.6100065307617187, 0.6108591918945312, 0.6095916137695313, 0.6105489501953125, 0.6105137329101562, 0.6101810913085938, 0.6112948608398437, 0.60995849609375, 0.6097098388671875, 0.61123583984375, 0.6098510131835938, 0.6106725463867188, 0.6104515991210937, 0.6106319580078124, 0.6102650756835938, 0.61079736328125, 0.6099724731445313, 0.6109937744140626, 0.6100443115234375, 0.6105042724609375, 0.61080810546875, 0.6102652587890625, 0.6105252075195312, 0.61050830078125, 0.6104171142578125, 0.610428955078125, 0.610639892578125, 0.6108561401367187, 0.6106427612304688, 0.6103059692382813, 0.6109490966796876, 0.6110658569335937, 0.6107730102539063, 0.6106644287109375, 0.6104304809570312, 0.610922607421875, 0.6101253662109375, 0.6109519653320312, 0.6105042114257813, 0.610482666015625, 0.6106760864257812, 0.6104493408203125, 0.6108674926757812, 0.6101047973632813, 0.6105118408203125, 0.610850830078125, 0.6100556030273437, 0.6108903198242187, 0.6104965209960938, 0.6106492309570313, 0.61102685546875, 0.6107894287109376, 0.6108814697265625, 0.6110543823242187, 0.6104352416992187, 0.6114283447265625, 0.61051904296875, 0.6107484130859375, 0.6108098754882813]",tokens/s,1.6386264571359783,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,5180.567552,5584.584704,0.0,5182.062592,5181.108736,s,1,11.058919921875,11.058919921875,0.0,11.058919921875,11.058919921875,11.058919921875,11.058919921875,[11.058919921875],,kWh,0.00011530209433749784,1.2711294180273348e-05,3.4602805459992e-05,0.0001626161939777632,,MB,5213.7984,5739.773952,0.0,5322.571776,5283.621376,s,10,2.440563400268555,0.24405634002685547,0.0007691004744948949,0.24409329986572265,0.2450234130859375,0.24511565704345703,0.24518945220947266,"[0.2422626495361328, 0.2437012481689453, 0.2441497344970703, 0.24379766845703124, 0.244036865234375, 0.24375567626953126, 0.24446499633789062, 0.24520790100097656, 0.24500291442871094, 0.2441837463378906]",tokens/s,1048.9381262204877,kWh,7.149432002743991e-06,7.884526903470215e-07,4.752178598487769e-06,1.269006329157878e-05,tokens/kWh,20173264.239736572,MB,5221.220352,5756.551168,0.0,5339.348992,5283.623936,s,10,24.758136962890624,2.4758136962890624,0.010295311844426745,2.4763955078124997,2.4855232666015623,2.486075109863281,2.4865165844726564,"[2.485400634765625, 2.4732705078125, 2.4768505859375, 2.47284716796875, 2.448720947265625, 2.4759404296875, 2.473294189453125, 2.486626953125, 2.481483154296875, 2.483702392578125]",tokens/s,25.446179611345226,kWh,7.269821247642271e-05,8.01754476626919e-06,4.8101252573112665e-05,0.00012881700981580457,tokens/kWh,489065.8468946275,,s,630,24.755305088043233,0.03929413506038606,0.0005142077816655496,0.03921575927734375,0.03971001434326172,0.03999087085723877,0.0413761264038086,"[0.0412760009765625, 0.03926406478881836, 0.039380958557128906, 0.039801055908203126, 0.041425086975097655, 0.040357311248779296, 0.03984159851074219, 0.03956777572631836, 0.039565471649169924, 0.03940556716918946, 0.04016080093383789, 0.0394532470703125, 0.03915539169311524, 0.03952252960205078, 0.039196670532226564, 0.039526142120361325, 0.03941996765136719, 0.03915539169311524, 0.03924979019165039, 0.0392869758605957, 0.03913363265991211, 0.03859561538696289, 0.03840304183959961, 0.0388724479675293, 0.03911129760742187, 0.039378944396972655, 0.03923763275146484, 0.03892428970336914, 0.03914473724365235, 0.03924860763549805, 0.0391352653503418, 0.03909603118896485, 0.03930752182006836, 0.039196670532226564, 0.03940304183959961, 0.039551456451416014, 0.04173110580444336, 0.039384033203125, 0.04032438278198242, 0.039190784454345706, 0.03946748733520508, 0.03984195327758789, 0.03941676712036133, 0.03935302352905273, 0.03948384094238281, 0.03955209732055664, 0.039430816650390624, 0.03929471969604492, 0.03948799896240234, 0.039223072052001956, 0.03922739028930664, 0.03928473663330078, 0.039052894592285156, 0.039207103729248044, 0.039510238647460935, 0.03923353576660156, 0.03931366348266602, 0.039124351501464844, 0.0391541748046875, 0.03949667358398438, 0.03908291244506836, 0.03941203308105469, 0.03909190368652344, 0.03971286392211914, 0.039221118927001954, 0.03924790573120117, 0.03912499237060547, 0.03936796951293945, 0.039158496856689456, 0.039425247192382815, 0.03931596755981445, 0.03898777770996094, 0.03899446487426758, 0.03910015869140625, 0.038989761352539065, 0.03891177749633789, 0.03921744155883789, 0.03936870574951172, 0.03921331024169922, 0.038921985626220706, 0.038752254486083985, 0.038817790985107424, 0.03903241729736328, 0.03917660903930664, 0.03891814422607422, 0.0392437744140625, 0.039665088653564454, 0.03916243362426758, 0.0392171516418457, 0.03899907302856445, 0.038588863372802734, 0.03906614303588867, 0.038919326782226565, 0.03914652633666992, 0.03902649688720703, 0.03939059066772461, 0.039641822814941406, 0.03910553741455078, 0.03968297576904297, 0.0389304313659668, 0.03900368118286133, 0.03933590316772461, 0.03919830322265625, 0.03942623901367188, 0.03913596725463867, 0.04057292938232422, 0.0395797119140625, 0.03918175888061524, 0.03922691345214844, 0.03955187225341797, 0.0392457275390625, 0.03929427337646484, 0.0392938232421875, 0.039569408416748046, 0.0394936637878418, 0.03957731246948242, 0.0393034896850586, 0.039158912658691404, 0.03960710525512695, 0.039185855865478514, 0.03982767868041992, 0.039378814697265625, 0.03906198501586914, 0.039214366912841796, 0.03961532974243164, 0.039180160522460934, 0.03958047866821289, 0.039188480377197264, 0.03970268630981445, 0.03923072052001953, 0.03947375869750976, 0.03930316925048828, 0.03897292709350586, 0.03890431976318359, 0.039139328002929685, 0.03921327972412109, 0.03903055953979492, 0.03944019317626953, 0.03943206405639649, 0.039356735229492186, 0.039204158782958985, 0.03913593673706055, 0.03904025650024414, 0.03898028945922852, 0.03891584014892578, 0.039127361297607424, 0.03944422531127929, 0.03962496185302734, 0.039309120178222655, 0.039184574127197266, 0.039661441802978516, 0.039649406433105466, 0.03946707153320313, 0.0390819206237793, 0.04118947219848633, 0.03964508819580078, 0.039839744567871094, 0.03930112075805664, 0.03920272064208984, 0.03913737487792969, 0.039122943878173826, 0.03927852630615234, 0.03942111968994141, 0.039242622375488284, 0.03920076751708984, 0.039436286926269534, 0.039640510559082034, 0.039141952514648436, 0.03916352081298828, 0.03894262313842774, 0.03924425506591797, 0.03922041702270508, 0.03911148834228516, 0.039894977569580076, 0.03904547119140625, 0.039300830841064456, 0.039169151306152346, 0.039150463104248044, 0.03896934509277344, 0.03922668838500976, 0.039121185302734375, 0.03923820877075195, 0.03942588806152344, 0.03883011245727539, 0.0394730224609375, 0.039376895904541014, 0.03943180847167969, 0.03942038345336914, 0.039204864501953124, 0.039495552062988284, 0.039204734802246094, 0.03881833648681641, 0.038973857879638675, 0.038960254669189454, 0.03871014404296875, 0.03902182388305664, 0.038647743225097654, 0.0389947509765625, 0.039143264770507814, 0.03885481643676758, 0.03922857666015625, 0.0396247673034668, 0.03929987335205078, 0.03878464126586914, 0.03891187286376953, 0.039442943572998046, 0.0391736946105957, 0.03881017684936523, 0.038952705383300784, 0.039112831115722654, 0.040359935760498046, 0.038752254486083985, 0.03864985656738281, 0.038981632232666014, 0.03871855926513672, 0.03856009674072266, 0.03956374359130859, 0.039114494323730466, 0.038977504730224606, 0.038699390411376954, 0.040738815307617186, 0.044181503295898435, 0.039096446990966795, 0.03908185577392578, 0.039395584106445315, 0.03907148742675781, 0.039428096771240234, 0.03952595138549805, 0.039333694458007815, 0.03905513763427734, 0.03927443313598633, 0.03911977767944336, 0.03907968139648438, 0.03902694320678711, 0.039038528442382814, 0.03883871841430664, 0.0388403205871582, 0.03877840042114258, 0.039166431427001956, 0.038973377227783206, 0.040373470306396486, 0.040711006164550784, 0.03930931091308594, 0.03879081726074219, 0.03898185729980469, 0.0389879035949707, 0.03991961669921875, 0.03931891250610352, 0.03915635299682617, 0.03912041473388672, 0.03916348648071289, 0.03915225601196289, 0.039600128173828124, 0.03906140899658203, 0.03876873779296875, 0.03845478439331055, 0.03861872100830078, 0.038728065490722656, 0.038521472930908206, 0.03842614364624024, 0.03829990386962891, 0.03853529739379883, 0.03862326431274414, 0.03905737686157226, 0.03928678512573242, 0.03890585708618164, 0.04112588882446289, 0.0389582405090332, 0.03916067123413086, 0.039059070587158205, 0.038789630889892575, 0.039249984741210935, 0.038700607299804686, 0.038821342468261716, 0.039201568603515625, 0.038844417572021485, 0.0387869758605957, 0.03879110336303711, 0.03895721435546875, 0.0395898551940918, 0.03867171096801758, 0.038924991607666014, 0.03894892883300781, 0.03876038360595703, 0.03858227157592774, 0.038727680206298826, 0.03861679840087891, 0.038535457611083984, 0.03880931091308594, 0.03873001480102539, 0.038580062866210935, 0.039274593353271485, 0.03873388671875, 0.038581760406494144, 0.03900057601928711, 0.03843638229370117, 0.03863113784790039, 0.03928940963745117, 0.039405311584472656, 0.038981311798095705, 0.039386974334716794, 0.03877775955200195, 0.03901996612548828, 0.039064449310302736, 0.0390447998046875, 0.03856524658203125, 0.038650497436523434, 0.038424190521240235, 0.03848396682739258, 0.03862089538574219, 0.03870355224609375, 0.03866755294799805, 0.03870835113525391, 0.03879276657104492, 0.038420734405517576, 0.03999884796142578, 0.04037273788452148, 0.039406753540039065, 0.039220191955566405, 0.039271839141845705, 0.03955152130126953, 0.03924505615234375, 0.03920876693725586, 0.03970969772338867, 0.03973529434204102, 0.03930486297607422, 0.04010019302368164, 0.03914678573608398, 0.039095073699951174, 0.039163841247558596, 0.03909353637695313, 0.0392138557434082, 0.03917203140258789, 0.03893145751953125, 0.038919166564941404, 0.03920076751708984, 0.039428096771240234, 0.038762592315673826, 0.03909212875366211, 0.039282913208007815, 0.03907900619506836, 0.03884921646118164, 0.03935174560546875, 0.039107135772705075, 0.039337982177734376, 0.03946905517578125, 0.039368350982666014, 0.03899427032470703, 0.03921017456054687, 0.03947993469238281, 0.042234046936035156, 0.039139328002929685, 0.038911006927490235, 0.03918127822875977, 0.039137279510498044, 0.03879116821289062, 0.038940414428710934, 0.039212352752685545, 0.03895305633544922, 0.03885551834106445, 0.03923535919189453, 0.03922726440429687, 0.03927500915527344, 0.039077728271484376, 0.03953184127807617, 0.039178081512451175, 0.039014560699462894, 0.03943075180053711, 0.039432479858398435, 0.03911660766601562, 0.039726207733154294, 0.039172000885009765, 0.03911164855957031, 0.039051265716552735, 0.039481441497802736, 0.03917324829101562, 0.03896966552734375, 0.03922582244873047, 0.039636993408203126, 0.039210334777832034, 0.03910335922241211, 0.03940230560302734, 0.039185375213623044, 0.039341793060302735, 0.04251881790161133, 0.03920896148681641, 0.03898291015625, 0.039828224182128905, 0.03906745529174805, 0.03918048095703125, 0.03897958374023437, 0.03872550582885742, 0.0390382080078125, 0.039088287353515626, 0.03912480163574219, 0.03892540740966797, 0.0391657600402832, 0.039051265716552735, 0.0396082878112793, 0.03902012634277344, 0.038984127044677734, 0.03883827209472656, 0.0389095344543457, 0.039043487548828124, 0.03909222412109375, 0.03887104034423828, 0.03938860702514648, 0.03914931106567383, 0.03887567901611328, 0.03938127899169922, 0.039591552734375, 0.039145854949951174, 0.03943577575683594, 0.03897971343994141, 0.039022975921630856, 0.039684097290039064, 0.03933766555786133, 0.03940998458862305, 0.03903078460693359, 0.03916799926757813, 0.03950511932373047, 0.039502304077148435, 0.03936902236938476, 0.03898275375366211, 0.038755008697509766, 0.03893062210083008, 0.039028766632080075, 0.039301151275634764, 0.03926217651367187, 0.039226497650146484, 0.038917182922363285, 0.038948673248291016, 0.039171966552734375, 0.039442558288574216, 0.039757823944091795, 0.039702529907226565, 0.03966748809814453, 0.039134815216064454, 0.03922294235229492, 0.03921123123168945, 0.03922576141357422, 0.040189697265625, 0.039479393005371094, 0.03927872085571289, 0.03893475341796875, 0.03899311828613281, 0.039103008270263674, 0.0392806396484375, 0.03947724914550781, 0.03920182418823242, 0.03923452758789062, 0.03987251281738281, 0.04126924896240235, 0.039106559753417966, 0.039378944396972655, 0.03957104110717773, 0.03924822235107422, 0.03956947326660156, 0.03895296096801758, 0.03930521774291992, 0.03990323257446289, 0.03971686553955078, 0.03972406387329101, 0.039543777465820315, 0.039725055694580076, 0.0393043212890625, 0.039224224090576174, 0.03993596649169922, 0.03948044967651367, 0.039435104370117186, 0.03973251342773437, 0.04047334289550781, 0.03964495849609375, 0.039636257171630856, 0.03957766342163086, 0.0395447998046875, 0.039812000274658206, 0.039263553619384765, 0.03916624069213867, 0.04170191955566406, 0.03973011016845703, 0.03914377593994141, 0.03937318420410156, 0.038887008666992184, 0.03899811172485351, 0.03984848022460938, 0.03934566497802734, 0.039020832061767576, 0.038744640350341794, 0.03879196929931641, 0.03898863983154297, 0.03911478424072266, 0.03929702377319336, 0.03934207916259766, 0.0392355842590332, 0.03904499053955078, 0.03858848190307617, 0.03885615921020508, 0.03938159942626953, 0.039346176147460936, 0.03934822463989258, 0.039213054656982424, 0.04110038375854492, 0.03960105514526367, 0.03994617462158203, 0.039566398620605465, 0.039107391357421875, 0.03903807830810547, 0.03914854431152344, 0.039222335815429686, 0.03953871917724609, 0.04079644775390625, 0.03949631881713867, 0.039280769348144534, 0.03933171081542969, 0.03909142303466797, 0.03944502258300781, 0.03941596984863281, 0.03946710586547852, 0.03918643188476562, 0.03942412948608399, 0.03976793670654297, 0.039441505432128904, 0.03958025741577149, 0.039907646179199216, 0.03929679870605469, 0.03918467330932617, 0.03928057479858398, 0.03941785430908203, 0.03976297760009766, 0.039527393341064455, 0.039005374908447264, 0.03915433502197266, 0.03923369598388672, 0.03918643188476562, 0.03942588806152344, 0.03936886215209961, 0.039561088562011716, 0.03941616058349609, 0.039497440338134765, 0.03928684616088867, 0.0391596794128418, 0.040044353485107424, 0.0392457275390625, 0.0390742073059082, 0.039175552368164064, 0.03931107330322266, 0.03904940795898437, 0.03927308654785156, 0.03939952087402344, 0.03917420959472656, 0.038981441497802735, 0.03937254333496094, 0.039239166259765625, 0.03926310348510742, 0.03949939346313477, 0.0393383674621582, 0.039293952941894535, 0.039247039794921876, 0.03938079833984375, 0.039351390838623046, 0.03938396835327149, 0.039366241455078124, 0.03952646255493164, 0.039211647033691406, 0.03941142272949219, 0.039617729187011716, 0.0404797134399414, 0.039255455017089845, 0.03899356842041016, 0.039248001098632815, 0.03914425659179688, 0.03906150436401367, 0.039271678924560544, 0.03952217483520508, 0.03930976104736328, 0.039116287231445314, 0.03915980911254883, 0.03905795288085938, 0.03904751968383789, 0.03890796661376953, 0.03926947021484375, 0.039011199951171874, 0.039124160766601565, 0.0391627197265625, 0.03888300704956055, 0.039747486114501955, 0.03910284805297851, 0.039400894165039065, 0.03897398376464844, 0.039007423400878906, 0.0393260498046875, 0.0392320327758789, 0.03913318252563477, 0.03916799926757813, 0.03902620697021485, 0.038991905212402346, 0.03925030517578125, 0.039207134246826175, 0.03907683181762695, 0.03905625534057617, 0.038983680725097655, 0.039110111236572265, 0.03942646408081055, 0.039011489868164065, 0.03916873550415039, 0.03923574447631836, 0.03927452850341797, 0.039182464599609376, 0.039224575042724606, 0.039510719299316405, 0.03910860824584961, 0.03968000030517578, 0.03970659255981445, 0.04000732803344727, 0.039790977478027345, 0.04062822341918945, 0.03998112106323242, 0.03959782409667969, 0.04067929458618164, 0.041417022705078126, 0.03974553680419922, 0.04123017501831055, 0.03938028717041016, 0.03965407943725586, 0.039565471649169924, 0.03946495819091797, 0.03976396942138672, 0.03959603118896484, 0.03953571319580078]",tokens/s,25.449090518552698,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,873.938944,655.294464,0.0,260.046848,258.555392,s,1,7.4411923828125,7.4411923828125,0.0,7.4411923828125,7.4411923828125,7.4411923828125,7.4411923828125,[7.4411923828125],,kWh,1.4626477424997118e-05,1.6062564338542084e-06,4.513892499995176e-06,2.07466263588465e-05,,MB,1319.145472,751.763456,0.0,341.835776,317.950464,s,18,0.19939472007751463,0.011077484448750811,0.0007115572758054694,0.010883920192718506,0.01117632646560669,0.01184773926734924,0.013511051759719844,"[0.010726367950439453, 0.010890496253967286, 0.010988991737365722, 0.010856703758239746, 0.01148083209991455, 0.011015359878540038, 0.010823936462402343, 0.010953760147094727, 0.010739520072937011, 0.010793663978576661, 0.010769696235656738, 0.011025983810424805, 0.010902239799499511, 0.010877344131469726, 0.01104582405090332, 0.010825599670410156, 0.010751520156860352, 0.0139268798828125]",tokens/s,23109.939913196507,kWh,3.150733660412048e-07,3.474662900105478e-08,1.616532970691219e-07,5.114732921113814e-07,tokens/kWh,500514892.8563252,MB,1333.055488,779.026432,0.0,369.098752,317.953024,s,18,10.063167541503903,0.5590648634168837,0.0031527165675258478,0.5581517944335938,0.5640977416992188,0.5648694519042968,0.5651240368652344,"[0.5586320190429688, 0.559837646484375, 0.5648132934570312, 0.5637910766601563, 0.56099658203125, 0.5651876831054687, 0.5630997924804687, 0.5564115600585937, 0.5565952758789062, 0.5572677612304687, 0.5553729248046875, 0.5580917358398437, 0.5545128784179687, 0.5589156494140625, 0.55782763671875, 0.5582118530273438, 0.5575255126953125, 0.55607666015625]",tokens/s,112.68817649343515,kWh,1.6115394765672077e-05,1.777027075129568e-06,6.047225126819507e-06,2.393964696762115e-05,tokens/kWh,2631617.754648126,,s,1134,10.054431803703311,0.008866341978574345,0.00013876498947876364,0.008839359760284424,0.008989321804046631,0.009089022159576415,0.00937534260749817,"[0.008696415901184081, 0.00890287971496582, 0.008814208030700684, 0.00887337589263916, 0.008897279739379883, 0.008929280281066895, 0.00889241600036621, 0.008814911842346191, 0.00882259178161621, 0.008838272094726563, 0.009082719802856446, 0.008922016143798828, 0.008884448051452637, 0.008914719581604004, 0.00890060806274414, 0.008834912300109863, 0.008976575851440429, 0.008871904373168945, 0.008902463912963867, 0.008875359535217285, 0.008836159706115723, 0.008844927787780761, 0.008833184242248535, 0.008888319969177246, 0.00882688045501709, 0.008797375679016114, 0.008774463653564452, 0.008787839889526368, 0.00891654396057129, 0.008857184410095215, 0.00884335994720459, 0.008941439628601075, 0.00886620807647705, 0.008852160453796387, 0.008825056076049805, 0.008850527763366698, 0.008800864219665527, 0.008890368461608887, 0.008830975532531739, 0.008824831962585449, 0.008785152435302734, 0.00880297565460205, 0.008880224227905274, 0.008826144218444824, 0.008827615737915039, 0.008820735931396484, 0.008817919731140137, 0.008876799583435058, 0.00883471965789795, 0.00881481647491455, 0.008794143676757812, 0.008809951782226563, 0.008894207954406739, 0.008811391830444337, 0.00905136013031006, 0.008822976112365722, 0.008869664192199707, 0.008956735610961913, 0.008833024024963379, 0.008894463539123536, 0.008887616157531739, 0.008861568450927735, 0.008835424423217773, 0.00864463996887207, 0.008844544410705566, 0.008923423767089845, 0.00891539192199707, 0.00886137580871582, 0.008851648330688477, 0.008812704086303712, 0.00882588768005371, 0.009081567764282227, 0.008823007583618163, 0.008828384399414062, 0.008818976402282715, 0.008928735733032227, 0.008883999824523927, 0.008854528427124024, 0.00884547233581543, 0.008795519828796387, 0.008854240417480468, 0.008789183616638184, 0.009019968032836913, 0.008924351692199708, 0.008846143722534179, 0.008855648040771484, 0.008789248466491699, 0.00878275203704834, 0.008724224090576172, 0.008804351806640624, 0.008854592323303222, 0.008811327934265136, 0.008792192459106445, 0.00880844783782959, 0.008902463912963867, 0.008840895652770997, 0.008845919609069825, 0.008777631759643554, 0.008747008323669434, 0.008820735931396484, 0.008762592315673829, 0.008765727996826171, 0.008800640106201171, 0.008779359817504882, 0.008828991889953614, 0.008943967819213867, 0.00875327968597412, 0.008773280143737792, 0.008825471878051758, 0.008971487998962403, 0.0090251522064209, 0.009048959732055665, 0.009054207801818847, 0.008970239639282226, 0.008893952369689942, 0.008879743576049804, 0.008866687774658204, 0.009093119621276855, 0.008986623764038085, 0.008951807975769043, 0.008972288131713867, 0.009048383712768554, 0.009010304450988769, 0.009102080345153809, 0.009015232086181641, 0.009069472312927247, 0.009036352157592773, 0.00890499210357666, 0.008979840278625488, 0.00883148765563965, 0.008892448425292968, 0.008890175819396972, 0.008944767951965332, 0.008866687774658204, 0.008934816360473634, 0.008869695663452148, 0.0088439359664917, 0.0088919677734375, 0.008968768119812011, 0.008919103622436524, 0.008842687606811523, 0.008860159873962402, 0.008830975532531739, 0.008879648208618165, 0.008872415542602539, 0.008818079948425293, 0.008783647537231445, 0.008784704208374024, 0.008879199981689453, 0.008892671585083008, 0.009140928268432618, 0.008806367874145508, 0.008824383735656739, 0.008878527641296386, 0.008873984336853028, 0.009246496200561524, 0.00918950366973877, 0.009011296272277832, 0.008970080375671386, 0.008999072074890136, 0.009164799690246582, 0.00911359977722168, 0.00927948760986328, 0.00930799961090088, 0.00924454402923584, 0.00912332820892334, 0.009059103965759277, 0.009127936363220214, 0.008980480194091797, 0.009166303634643555, 0.008900511741638183, 0.009051936149597168, 0.008858688354492187, 0.008837984085083008, 0.008901535987854004, 0.00894364833831787, 0.008996864318847657, 0.008888319969177246, 0.008937184333801269, 0.00890499210357666, 0.008953856468200684, 0.0089683837890625, 0.008858528137207031, 0.00911036777496338, 0.009080287933349609, 0.008854111671447755, 0.00881065559387207, 0.008826720237731933, 0.008921088218688965, 0.008660927772521972, 0.008849472045898437, 0.008801504135131836, 0.008815391540527344, 0.008850527763366698, 0.008889151573181151, 0.008833120346069336, 0.009223584175109864, 0.008841535568237305, 0.00893564796447754, 0.008924896240234376, 0.00901360034942627, 0.008869888305664063, 0.008970239639282226, 0.008904704093933105, 0.00908681583404541, 0.008882335662841797, 0.008865311622619628, 0.008970463752746582, 0.008945919990539552, 0.008898176193237305, 0.008923328399658204, 0.008933664321899414, 0.008928511619567871, 0.00894223976135254, 0.008912896156311035, 0.009047167778015137, 0.008901503562927246, 0.008908703804016113, 0.008929375648498536, 0.00900476837158203, 0.009005151748657226, 0.008982720375061036, 0.00887615966796875, 0.008824383735656739, 0.008961343765258789, 0.008887295722961425, 0.008938495635986327, 0.008920063972473144, 0.009129983901977539, 0.009041343688964844, 0.009002976417541504, 0.00898960018157959, 0.009161919593811034, 0.008932191848754883, 0.008873824119567872, 0.008865823745727539, 0.008906368255615235, 0.008888480186462402, 0.008931327819824218, 0.008931327819824218, 0.009047776222229005, 0.009058591842651367, 0.008966143608093263, 0.008860735893249512, 0.008909760475158691, 0.009124896049499513, 0.009132512092590332, 0.008905216217041016, 0.009025535583496093, 0.009060352325439454, 0.008881312370300293, 0.008835455894470216, 0.008610112190246582, 0.008862751960754395, 0.008855999946594239, 0.0088721923828125, 0.008881664276123047, 0.008894911766052246, 0.008886591911315918, 0.009281503677368163, 0.008832063674926758, 0.008765727996826171, 0.008897215843200683, 0.008869088172912598, 0.008942208290100097, 0.008964287757873534, 0.008867839813232421, 0.008835040092468261, 0.008800479888916015, 0.0088220796585083, 0.009029120445251464, 0.009063360214233398, 0.008986111640930175, 0.008921152114868163, 0.00890931224822998, 0.008851455688476563, 0.008818047523498535, 0.008752832412719727, 0.008813504219055177, 0.008757247924804687, 0.008844927787780761, 0.008800928115844726, 0.008863871574401855, 0.00876643180847168, 0.008747648239135743, 0.00881980800628662, 0.008874912261962891, 0.008779775619506837, 0.008857600212097168, 0.008992639541625977, 0.008945088386535644, 0.008954015731811524, 0.008911231994628906, 0.009695391654968261, 0.008935423851013183, 0.008964096069335938, 0.00892080020904541, 0.008885760307312012, 0.00885206413269043, 0.008826175689697265, 0.008816639900207519, 0.008870911598205567, 0.008902784347534179, 0.008931072235107421, 0.008888480186462402, 0.008830816268920898, 0.008738816261291504, 0.008994815826416015, 0.008809887886047363, 0.00885206413269043, 0.008876031875610351, 0.008976384162902832, 0.008904704093933105, 0.009381888389587402, 0.008841216087341308, 0.008732704162597657, 0.009003392219543457, 0.008978240013122559, 0.009071999549865723, 0.008950143814086915, 0.008972543716430664, 0.008873984336853028, 0.008843263626098634, 0.00884768009185791, 0.008969311714172363, 0.00904368019104004, 0.008938336372375488, 0.008890175819396972, 0.008972512245178223, 0.008988672256469727, 0.008986623764038085, 0.009901920318603515, 0.009130144119262695, 0.008978431701660156, 0.008885631561279297, 0.00889510440826416, 0.009031935691833496, 0.00886143970489502, 0.008861696243286133, 0.00887615966796875, 0.010073984146118164, 0.008867839813232421, 0.00887993621826172, 0.008902463912963867, 0.008865856170654296, 0.00888044834136963, 0.008859295845031739, 0.008950112342834472, 0.009028863906860352, 0.008896415710449218, 0.008811552047729493, 0.008777536392211915, 0.008837216377258301, 0.00878816032409668, 0.008865119934082032, 0.01015436840057373, 0.010173983573913574, 0.00917955207824707, 0.008904864311218261, 0.008853407859802246, 0.008812543869018554, 0.008903008460998535, 0.008727392196655274, 0.008933343887329102, 0.008815648078918457, 0.00880620765686035, 0.00878819179534912, 0.008779552459716797, 0.008841312408447266, 0.008770912170410157, 0.008935999870300294, 0.008761343955993652, 0.00875216007232666, 0.008903072357177735, 0.008790592193603515, 0.008736767768859864, 0.008779328346252441, 0.008786368370056152, 0.008630463600158692, 0.008860159873962402, 0.008972384452819825, 0.008847104072570802, 0.00886614418029785, 0.00919961643218994, 0.00891651153564453, 0.009375712394714356, 0.009331071853637695, 0.008909184455871582, 0.008920831680297852, 0.008959391593933105, 0.008946271896362304, 0.009013248443603515, 0.008830911636352539, 0.008806719779968261, 0.008791808128356934, 0.008867839813232421, 0.00881049633026123, 0.00909721565246582, 0.009187328338623046, 0.009062623977661133, 0.009024831771850586, 0.009107616424560547, 0.009240896224975585, 0.009114975929260255, 0.008986495971679687, 0.008944128036499023, 0.008945152282714844, 0.008964799880981445, 0.008900704383850098, 0.008937472343444825, 0.008847104072570802, 0.009066752433776856, 0.008882176399230958, 0.008816896438598633, 0.00878985595703125, 0.008843168258666993, 0.008861536026000977, 0.00880832004547119, 0.009095071792602539, 0.008870271682739258, 0.008849408149719238, 0.008889375686645508, 0.008815263748168945, 0.008830880165100098, 0.008839327812194825, 0.008911104202270507, 0.008871935844421386, 0.00898252773284912, 0.00882688045501709, 0.008835071563720704, 0.008953248023986816, 0.008790623664855958, 0.008804351806640624, 0.00890880012512207, 0.008912639617919922, 0.008887999534606934, 0.008995391845703126, 0.008855551719665527, 0.008896512031555176, 0.008839167594909669, 0.008886272430419923, 0.008570816040039063, 0.008754048347473144, 0.00875820827484131, 0.0088406400680542, 0.008876640319824219, 0.008798175811767579, 0.008783072471618652, 0.008784671783447266, 0.008843263626098634, 0.008832544326782226, 0.008906368255615235, 0.008801119804382324, 0.008916383743286132, 0.008843680381774903, 0.008783424377441406, 0.008764320373535157, 0.0088570556640625, 0.008782079696655273, 0.008806303977966308, 0.008804415702819824, 0.00876483154296875, 0.008759200096130371, 0.008909536361694335, 0.008908927917480469, 0.008790047645568847, 0.00873686408996582, 0.008828672409057617, 0.008906784057617188, 0.008861120223999024, 0.008777503967285155, 0.008738592147827148, 0.00874953556060791, 0.008783967971801757, 0.008858016014099121, 0.008812704086303712, 0.008800095558166503, 0.00880793571472168, 0.008786432266235352, 0.008892671585083008, 0.009213312149047852, 0.008842656135559082, 0.008907744407653809, 0.008864831924438477, 0.008817600250244141, 0.008762687683105468, 0.008850111961364746, 0.00873846435546875, 0.008929280281066895, 0.008813247680664063, 0.008815744400024413, 0.008996416091918945, 0.008857664108276367, 0.008794015884399414, 0.008846464157104493, 0.008774815559387207, 0.008858336448669433, 0.008853471755981446, 0.008777759552001952, 0.00876905632019043, 0.008755680084228515, 0.008853504180908203, 0.008831232070922852, 0.008848480224609375, 0.008577568054199219, 0.008760767936706543, 0.008821727752685547, 0.008799263954162598, 0.008934368133544921, 0.009063712120056152, 0.00890339183807373, 0.008826848030090331, 0.008939552307128906, 0.008970239639282226, 0.008859519958496093, 0.008847647666931153, 0.008849023818969726, 0.008849120140075684, 0.008822784423828126, 0.008770048141479492, 0.008783871650695801, 0.008840736389160156, 0.008827712059020996, 0.008863200187683105, 0.008845503807067872, 0.009011103630065917, 0.008814687728881837, 0.00880031967163086, 0.008890111923217774, 0.008757439613342285, 0.008820735931396484, 0.008773632049560547, 0.008839167594909669, 0.008771583557128907, 0.008755200386047364, 0.008740863800048827, 0.00886518383026123, 0.00885001564025879, 0.008832832336425782, 0.00876153564453125, 0.008847423553466797, 0.008990655899047852, 0.008824319839477538, 0.008827391624450684, 0.008742176055908202, 0.008733344078063966, 0.008767552375793457, 0.008871935844421386, 0.00877785587310791, 0.008779647827148438, 0.008773504257202148, 0.008834367752075195, 0.008773759841918945, 0.008778240203857422, 0.008749343872070313, 0.008885791778564453, 0.008815072059631347, 0.008717951774597167, 0.008747296333312988, 0.008828479766845703, 0.00875766372680664, 0.008846464157104493, 0.009016223907470703, 0.008859135627746583, 0.008773695945739747, 0.008867679595947265, 0.00889254379272461, 0.00863920021057129, 0.008769344329833985, 0.008857919692993163, 0.008896512031555176, 0.00894156837463379, 0.008899999618530273, 0.009499072074890137, 0.008808128356933594, 0.008757344245910645, 0.008849791526794434, 0.008762368202209473, 0.008765439987182617, 0.00886844825744629, 0.008785951614379882, 0.008777600288391113, 0.008788479804992675, 0.008761311531066894, 0.008732928276062011, 0.008785696029663085, 0.008835136413574219, 0.008828351974487305, 0.008825247764587402, 0.008885600090026856, 0.008751487731933594, 0.009046112060546875, 0.008783935546875, 0.008712703704833985, 0.008728287696838378, 0.00872047996520996, 0.008795295715332031, 0.008835840225219726, 0.008801695823669433, 0.008720928192138672, 0.008720735549926757, 0.008759008407592774, 0.00874931240081787, 0.008945343971252441, 0.0088373441696167, 0.00878112030029297, 0.008938112258911134, 0.008949664115905762, 0.008801312446594238, 0.008770527839660645, 0.008891807556152344, 0.008792672157287598, 0.008763199806213378, 0.008794303894042968, 0.008757247924804687, 0.008792287826538086, 0.008924960136413574, 0.008841440200805665, 0.008828703880310058, 0.008828927993774414, 0.008843296051025391, 0.008871904373168945, 0.009116800308227539, 0.008915455818176269, 0.008954239845275878, 0.008984512329101562, 0.008844511985778809, 0.008819552421569824, 0.008830975532531739, 0.008928576469421386, 0.008539392471313476, 0.008749823570251464, 0.00880844783782959, 0.008757023811340333, 0.008847071647644042, 0.008870400428771973, 0.008867839813232421, 0.008812543869018554, 0.008755200386047364, 0.008787967681884766, 0.008971424102783203, 0.008829119682312012, 0.008770303726196289, 0.008722335815429687, 0.00871833610534668, 0.008798208236694336, 0.008830975532531739, 0.008730624198913574, 0.00870809555053711, 0.008744959831237792, 0.008793248176574706, 0.008810463905334473, 0.009030207633972168, 0.008826848030090331, 0.008778079986572266, 0.008894463539123536, 0.008752960205078125, 0.008726719856262208, 0.0087073917388916, 0.008811200141906738, 0.008825984001159668, 0.008813535690307617, 0.008815903663635254, 0.008796287536621093, 0.008794816017150878, 0.008882271766662597, 0.008862527847290039, 0.008805120468139648, 0.008767647743225098, 0.008742912292480469, 0.008901727676391602, 0.008842047691345214, 0.008734496116638183, 0.008737024307250976, 0.008753215789794922, 0.008816703796386719, 0.008912832260131835, 0.008795392036437989, 0.008844032287597656, 0.008763168334960937, 0.008839391708374023, 0.008873791694641113, 0.008906944274902344, 0.008930720329284669, 0.008785663604736328, 0.008827743530273437, 0.008775775909423827, 0.008730143547058105, 0.008912991523742676, 0.008868127822875977, 0.008863743782043456, 0.008822784423828126, 0.008804351806640624, 0.009036288261413575, 0.008863776206970216, 0.008851072311401368, 0.00883897590637207, 0.00878441619873047, 0.008890624046325684, 0.008850527763366698, 0.00881116771697998, 0.008814592361450196, 0.008930848121643067, 0.008802783966064454, 0.008787103652954101, 0.00877455997467041, 0.008758336067199708, 0.008917344093322755, 0.008733216285705567, 0.008816287994384766, 0.008773759841918945, 0.008700127601623536, 0.008742912292480469, 0.00887507152557373, 0.008868800163269044, 0.008837151527404784, 0.008726495742797852, 0.008720383644104004, 0.008726655960083008, 0.009335968017578125, 0.008852191925048828, 0.009571807861328125, 0.00894825553894043, 0.008861696243286133, 0.008806528091430664, 0.00880339241027832, 0.008902815818786622, 0.008870559692382813, 0.00881980800628662, 0.008767775535583496, 0.008821632385253906, 0.008794048309326171, 0.008863519668579102, 0.009127967834472657, 0.00897433567047119, 0.008904704093933105, 0.008919136047363281, 0.0088306884765625, 0.008873472213745117, 0.008800959587097168, 0.00880246353149414, 0.008867903709411621, 0.008821855545043946, 0.00878985595703125, 0.00889737606048584, 0.008783935546875, 0.008896448135375977, 0.008779392242431641, 0.008753536224365234, 0.008722784042358399, 0.0087325439453125, 0.008810272216796875, 0.008820480346679687, 0.008768863677978515, 0.008825632095336914, 0.008798336029052735, 0.008509440422058106, 0.008810272216796875, 0.008795552253723145, 0.008790399551391602, 0.008730143547058105, 0.008757247924804687, 0.008852448463439942, 0.008802240371704101, 0.008777503967285155, 0.008746527671813964, 0.008747424125671387, 0.008808192253112792, 0.008816384315490722, 0.008801055908203124, 0.00871014404296875, 0.008714240074157715, 0.00882595157623291, 0.008733599662780762, 0.008760479927062989, 0.008766304016113282, 0.008740863800048827, 0.008840479850769044, 0.008774368286132813, 0.008797823905944824, 0.008886655807495118, 0.008748096466064453, 0.008790431976318359, 0.008753664016723632, 0.008749088287353516, 0.008722432136535644, 0.008814592361450196, 0.008847359657287598, 0.008859583854675293, 0.008883616447448731, 0.00882960033416748, 0.008766752243041992, 0.008829248428344727, 0.008864159584045411, 0.008763392448425293, 0.008754336357116699, 0.008733087539672852, 0.008787584304809571, 0.008844096183776856, 0.008914143562316894, 0.008769920349121094, 0.008786175727844238, 0.00880246353149414, 0.008771039962768555, 0.008755040168762208, 0.008786751747131347, 0.008742848396301269, 0.00881657600402832, 0.00878003215789795, 0.008761119842529298, 0.00871228790283203, 0.008798080444335938, 0.008832415580749512, 0.008821344375610352, 0.009197376251220702, 0.00874720001220703, 0.008817791938781738, 0.00884006404876709, 0.008943455696105957, 0.008640512466430664, 0.00880844783782959, 0.00894156837463379, 0.008839167594909669, 0.008965408325195313, 0.008848095893859863, 0.008749055862426757, 0.008927231788635253, 0.008881759643554688, 0.008740384101867676, 0.00885043239593506, 0.008740351676940919, 0.008756832122802734, 0.008844063758850098, 0.00881049633026123, 0.008834112167358398, 0.008965056419372558, 0.00880454444885254, 0.008935232162475586, 0.008867584228515624, 0.008849663734436035, 0.008807583808898926, 0.008853952407836914, 0.008790207862854003, 0.008757696151733399, 0.009234208106994629, 0.008841183662414551, 0.009639967918395996, 0.009012895584106445, 0.008845024108886719, 0.00890124797821045, 0.008851455688476563, 0.008785920143127441, 0.008766464233398438, 0.008796416282653808, 0.00885427188873291, 0.008900575637817383, 0.008753087997436524, 0.008732128143310546, 0.008752896308898925, 0.008816864013671875, 0.008876704216003417, 0.009280896186828614, 0.008807040214538575, 0.008794112205505371, 0.008874143600463867, 0.008858688354492187, 0.008899552345275879, 0.008793696403503418, 0.008771807670593261, 0.008820735931396484, 0.008758432388305664, 0.008858464241027831, 0.009440640449523925, 0.008871904373168945, 0.00880463981628418, 0.008740544319152832, 0.008766143798828126, 0.008745247840881348, 0.00883683204650879, 0.008879679679870606, 0.008825568199157715, 0.008845024108886719, 0.008599648475646973, 0.008887136459350586, 0.008775039672851563, 0.008849920272827149, 0.008818816184997559, 0.008841183662414551, 0.00886787223815918, 0.00883414363861084, 0.008862624168395996, 0.008796256065368652, 0.008818592071533203, 0.008947711944580078, 0.00887980842590332, 0.008816127777099609, 0.008866463661193848, 0.008833087921142579, 0.008877984046936035, 0.008818592071533203, 0.008771871566772461, 0.008828415870666503, 0.008894335746765137, 0.008822848320007324, 0.008979007720947266, 0.008754207611083984, 0.00876028823852539, 0.008857248306274414, 0.008870240211486816, 0.008853407859802246, 0.008785887718200684, 0.008789407730102538, 0.008847807884216309, 0.008787327766418458, 0.008984767913818359, 0.008788736343383789, 0.008764800071716308, 0.008834783554077148, 0.008805120468139648, 0.00875113582611084, 0.008767200469970702, 0.008792448043823241, 0.008843263626098634, 0.008822784423828126, 0.009174495697021485, 0.008815072059631347, 0.008947903633117676, 0.008968064308166504, 0.008839167594909669, 0.008789119720458984, 0.008801088333129883, 0.008878144264221191, 0.008823904037475586, 0.008833951950073242, 0.008828512191772461, 0.008822463989257812, 0.0088951997756958, 0.008970239639282226, 0.008886272430419923, 0.008983584403991699, 0.009073599815368653, 0.008810527801513672, 0.008812000274658203, 0.008790464401245117, 0.008831071853637695, 0.008551487922668457, 0.008846272468566894, 0.008812543869018554, 0.008817888259887696, 0.008800224304199218, 0.008841312408447266, 0.008819135665893554, 0.008833184242248535, 0.008832480430603027, 0.008993184089660645, 0.008876288414001465, 0.008816415786743165, 0.009124064445495605, 0.008947839736938476, 0.008978464126586915, 0.008940544128417969, 0.008813407897949219, 0.008752703666687012, 0.00883148765563965, 0.008868895530700683, 0.008844191551208497, 0.00878326416015625, 0.008734368324279785, 0.008739775657653808, 0.008872032165527344, 0.008826208114624024, 0.008905280113220214, 0.00891113567352295, 0.008803839683532716, 0.008798591613769531, 0.008848896026611328, 0.00874672031402588, 0.008736384391784669, 0.00880742359161377, 0.008839167594909669, 0.008808639526367188, 0.00878163242340088, 0.00915065574645996, 0.008877535820007325, 0.008784223556518554, 0.008730112075805664, 0.008794912338256836, 0.008828160285949706, 0.008864224433898926, 0.008730624198913574, 0.008801600456237794, 0.008786144256591797, 0.008819168090820312, 0.008798208236694336, 0.009343168258666993, 0.008835968017578124, 0.00907260799407959, 0.009374591827392578, 0.008831071853637695, 0.008866080284118652, 0.008912608146667481, 0.008919072151184083, 0.008803872108459473, 0.008769856452941895, 0.008740127563476563, 0.008739839553833008, 0.00888815975189209, 0.008801664352416992, 0.008638688087463379, 0.008779904365539552, 0.008810015678405761, 0.008840864181518554, 0.008870719909667969, 0.00879753589630127, 0.00882755184173584, 0.008829119682312012, 0.008814687728881837, 0.00894761562347412, 0.008859295845031739, 0.00883523178100586, 0.008859647750854491, 0.008820063591003418, 0.008827520370483398, 0.008820832252502442, 0.00884553623199463, 0.008838272094726563, 0.00886240005493164, 0.008859552383422852, 0.008845312118530273, 0.008785823822021484, 0.00890384006500244, 0.008821696281433105, 0.008822784423828126, 0.008737024307250976, 0.008772768020629883, 0.008743616104125976, 0.008932255744934081, 0.008796223640441895, 0.00875001621246338, 0.008773183822631836, 0.008718784332275391, 0.008828927993774414, 0.008780927658081054, 0.008731648445129395, 0.008760640144348145, 0.008747584342956543, 0.008764543533325196, 0.008775615692138672, 0.008885184288024902, 0.00880624008178711, 0.00879849624633789, 0.008777600288391113, 0.00880838394165039, 0.008773152351379394, 0.008731264114379883, 0.008828831672668456, 0.008813632011413575, 0.00882579231262207, 0.008789248466491699, 0.008773951530456544, 0.008763872146606445, 0.008826592445373534, 0.008936032295227051, 0.008917887687683106, 0.008903712272644042, 0.00886143970489502, 0.008935423851013183, 0.009125887870788574, 0.009605119705200196, 0.009017696380615234, 0.009166175842285157, 0.0087193603515625, 0.008868864059448242, 0.00880620765686035, 0.008820927619934082, 0.008824864387512206, 0.008843232154846191, 0.00892518424987793, 0.008889439582824708, 0.008778335571289063, 0.008844960212707519, 0.008774016380310059, 0.008775360107421875, 0.008723039627075196, 0.008768575668334961, 0.008801216125488282, 0.009195232391357422, 0.008799967765808105, 0.008796704292297364, 0.008781855583190917, 0.008905823707580567, 0.008811424255371094, 0.008771583557128907, 0.008822336196899414, 0.008902079582214355, 0.008829216003417968, 0.008770272254943848, 0.008764960289001466, 0.00876147174835205, 0.008742976188659667, 0.008816767692565917, 0.008761216163635253, 0.008745247840881348, 0.008800224304199218, 0.008785663604736328, 0.008821023941040038, 0.008783840179443359, 0.008863648414611817, 0.008811871528625488, 0.008796192169189454, 0.008825599670410156, 0.008804351806640624, 0.008793215751647949, 0.008770432472229004, 0.008859423637390137, 0.008881759643554688, 0.008894559860229492, 0.008737312316894532, 0.008769248008728027, 0.008778335571289063, 0.008863264083862305, 0.008815936088562012, 0.008842016220092773, 0.008832768440246582, 0.008895872116088867, 0.008864704132080079, 0.008781824111938476, 0.008865983963012695, 0.00885091209411621, 0.008851936340332031, 0.008806400299072266, 0.00878707218170166, 0.008847135543823242, 0.00875823974609375]",tokens/s,112.78608499610277,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4293.066752,4878.958592,0.0,4483.710976,4465.672704,s,1,10.4649501953125,10.4649501953125,0.0,10.4649501953125,10.4649501953125,10.4649501953125,10.4649501953125,[10.4649501953125],,kWh,0.0001039129356708372,1.1454931439208603e-05,3.2453081518002413e-05,0.00014782094862804822,,MB,2150.473728,5302.583296,0.0,4892.655616,4837.669376,s,10,1.8069629058837893,0.18069629058837888,0.00038065052024590545,0.18078084564208985,0.18096441345214842,0.1810408920288086,0.18110207489013672,"[0.18040179443359375, 0.17968453979492188, 0.18072915649414062, 0.1807381134033203, 0.18081753540039064, 0.18074415588378906, 0.18085792541503906, 0.18111737060546876, 0.18092489624023436, 0.18094741821289062]",tokens/s,1416.7418665121404,kWh,5.291679808333072e-06,5.835758006887894e-07,3.5194075774285953e-06,9.394663186450456e-06,tokens/kWh,27249513.358735252,MB,2154.483712,5470.355456,0.0,5060.427776,5014.227968,s,10,15.805345214843749,1.5805345214843751,0.0018606053167179034,1.580326416015625,1.5817849731445313,1.583616668701172,1.5850820251464843,"[1.5804212646484375, 1.5793768310546874, 1.578682373046875, 1.57875439453125, 1.580556884765625, 1.5811287841796875, 1.5802315673828125, 1.5793668212890626, 1.5854483642578125, 1.5813779296875]",tokens/s,39.85993291739867,kWh,4.620386784125182e-05,5.096016547766361e-06,3.064478642057143e-05,8.194467080958962e-05,tokens/kWh,768811.4355403255,,s,630,15.80259872817993,0.02508349004473005,0.00030434212382670045,0.02506243133544922,0.0253934139251709,0.025437215518951416,0.025531290798187257,"[0.025378976821899414, 0.02512076759338379, 0.024822816848754883, 0.024773056030273438, 0.024729824066162108, 0.02475449562072754, 0.02472492790222168, 0.024762752532958985, 0.02472979164123535, 0.024735807418823242, 0.024715200424194336, 0.02478665542602539, 0.02487286376953125, 0.0248590087890625, 0.024809728622436522, 0.02471443176269531, 0.024785087585449218, 0.02492608070373535, 0.024797439575195312, 0.02476643180847168, 0.024733983993530273, 0.02481113624572754, 0.024938880920410158, 0.024821983337402344, 0.024854207992553713, 0.024904960632324218, 0.024951648712158204, 0.024877056121826172, 0.02660883140563965, 0.0250130558013916, 0.024995840072631836, 0.024965343475341798, 0.02494438362121582, 0.024997119903564454, 0.02495158386230469, 0.02497331237792969, 0.02511052894592285, 0.02531443214416504, 0.025232255935668944, 0.025182207107543944, 0.025568607330322266, 0.02527299118041992, 0.025204320907592774, 0.02526025581359863, 0.025254079818725586, 0.025157663345336916, 0.02517398452758789, 0.0252127685546875, 0.02525609588623047, 0.025364320755004884, 0.025487520217895507, 0.025272319793701172, 0.025306848526000975, 0.025264415740966797, 0.025956064224243163, 0.025404735565185545, 0.02520124816894531, 0.025223552703857424, 0.02531942367553711, 0.025268192291259765, 0.025228960037231445, 0.025341312408447267, 0.02541472053527832, 0.025241567611694337, 0.024945856094360352, 0.024897567749023436, 0.024748863220214842, 0.024667903900146483, 0.02466022491455078, 0.024772607803344726, 0.024819135665893555, 0.024816192626953126, 0.024796703338623046, 0.024764896392822266, 0.02493440055847168, 0.02511193656921387, 0.024814207077026366, 0.0247459831237793, 0.024774816513061522, 0.024821599960327147, 0.024816831588745116, 0.024885248184204102, 0.0248855037689209, 0.024975423812866212, 0.02500249671936035, 0.024913919448852538, 0.024903615951538085, 0.024917728424072267, 0.024973344802856446, 0.024973119735717773, 0.024923648834228516, 0.024953855514526366, 0.025001983642578125, 0.02494054412841797, 0.024975231170654297, 0.025273759841918944, 0.025061439514160157, 0.02496575927734375, 0.025009567260742188, 0.025055776596069335, 0.02515065574645996, 0.025077856063842774, 0.025155872344970704, 0.02520323181152344, 0.025324735641479492, 0.02512361526489258, 0.025122848510742188, 0.025198591232299804, 0.025158912658691406, 0.025101055145263673, 0.02533580780029297, 0.025401344299316408, 0.025380800247192383, 0.025312448501586916, 0.02529974365234375, 0.025434207916259766, 0.025302080154418944, 0.02525062370300293, 0.02532364845275879, 0.025358335494995117, 0.02531839942932129, 0.025357311248779296, 0.02539107131958008, 0.025418975830078124, 0.025383743286132812, 0.02545254325866699, 0.024966751098632813, 0.024760704040527343, 0.02467840003967285, 0.024513824462890625, 0.024592735290527343, 0.024573312759399415, 0.02459075164794922, 0.024592927932739258, 0.0247807674407959, 0.024873151779174804, 0.024837343215942383, 0.024754880905151367, 0.02472563171386719, 0.024885120391845702, 0.024907167434692384, 0.024883808135986327, 0.024856576919555663, 0.024833951950073242, 0.02487715148925781, 0.024868831634521485, 0.02485865592956543, 0.024861696243286133, 0.02490671920776367, 0.02498966407775879, 0.025004095077514648, 0.02513100814819336, 0.02510211181640625, 0.025188575744628905, 0.0251146240234375, 0.025048095703125, 0.025091039657592774, 0.025102527618408203, 0.025066688537597658, 0.025172000885009767, 0.025217727661132814, 0.02516694450378418, 0.025074495315551757, 0.025147359848022462, 0.02509008026123047, 0.025082944869995117, 0.02503715133666992, 0.02510710334777832, 0.025407424926757814, 0.025047040939331053, 0.02502000045776367, 0.025257408142089845, 0.02537980842590332, 0.025324607849121095, 0.02524880027770996, 0.025276544570922852, 0.025280288696289063, 0.025384960174560548, 0.02524569511413574, 0.025341535568237306, 0.025313695907592772, 0.025329343795776366, 0.025327840805053712, 0.025438079833984373, 0.025393375396728514, 0.025405439376831054, 0.025364479064941405, 0.02533184051513672, 0.025382783889770506, 0.025343679428100587, 0.024797119140625, 0.024867263793945313, 0.024775999069213867, 0.024736127853393554, 0.02468176078796387, 0.024713279724121094, 0.02471388816833496, 0.024757440567016602, 0.02466694450378418, 0.02462918472290039, 0.024659296035766602, 0.02474671936035156, 0.024793088912963866, 0.024771808624267578, 0.024777055740356446, 0.024880735397338868, 0.024873823165893556, 0.024896831512451173, 0.024916416168212892, 0.02489779281616211, 0.024923231124877928, 0.024877920150756835, 0.02491935920715332, 0.02497203254699707, 0.024981727600097658, 0.02493417549133301, 0.024993759155273437, 0.025053440093994142, 0.025032672882080078, 0.025003360748291015, 0.025002464294433594, 0.02509823989868164, 0.025096288681030275, 0.025103328704833984, 0.025267135620117186, 0.025264127731323242, 0.025302911758422853, 0.02534003257751465, 0.025204736709594725, 0.025181631088256835, 0.025241952896118164, 0.025413568496704102, 0.025181888580322265, 0.025207231521606446, 0.025184415817260743, 0.025153535842895508, 0.025223167419433593, 0.025296543121337892, 0.025239871978759765, 0.025118751525878905, 0.025165824890136718, 0.025154592514038086, 0.025210880279541017, 0.02527507209777832, 0.025329631805419923, 0.025337503433227538, 0.025407392501831053, 0.025328384399414063, 0.025370624542236327, 0.025447967529296876, 0.025432640075683594, 0.025331615447998047, 0.025241600036621094, 0.024887296676635744, 0.024879104614257814, 0.024840192794799806, 0.024758399963378905, 0.024823680877685547, 0.024785120010375975, 0.024743711471557617, 0.024919456481933593, 0.024916479110717774, 0.02481315231323242, 0.02479871940612793, 0.02476972770690918, 0.024794784545898438, 0.024768672943115234, 0.024802911758422853, 0.024751775741577147, 0.024813823699951172, 0.02484230422973633, 0.02475667190551758, 0.024737184524536132, 0.02498620796203613, 0.02507980728149414, 0.025030656814575194, 0.02489958381652832, 0.024887487411499022, 0.025031648635864257, 0.025060192108154297, 0.02502230453491211, 0.02502057647705078, 0.025036800384521486, 0.025038240432739257, 0.025024639129638673, 0.025037279129028322, 0.025093599319458006, 0.025098751068115235, 0.025091615676879883, 0.025108991622924806, 0.02540083122253418, 0.02540185546875, 0.025255935668945313, 0.025244895935058593, 0.02532841682434082, 0.025280511856079102, 0.02528665542602539, 0.02527177619934082, 0.025343679428100587, 0.02545136070251465, 0.025395200729370116, 0.025401344299316408, 0.02539491271972656, 0.02531068801879883, 0.025279296875, 0.025470975875854493, 0.025362432479858397, 0.025282400131225586, 0.0252642879486084, 0.025185407638549803, 0.025182432174682617, 0.025275039672851562, 0.02532966423034668, 0.02540345573425293, 0.025481151580810546, 0.02501180839538574, 0.024852575302124022, 0.024852800369262695, 0.024819711685180663, 0.024760351181030274, 0.02470908737182617, 0.024861791610717773, 0.024851360321044923, 0.024833696365356445, 0.024809919357299804, 0.02477609634399414, 0.024888992309570312, 0.02486966323852539, 0.024864288330078126, 0.024848928451538087, 0.024792383193969727, 0.024890047073364258, 0.025016319274902343, 0.024922111511230468, 0.024897695541381836, 0.02497724723815918, 0.024946687698364257, 0.0249036808013916, 0.02495052719116211, 0.02502681541442871, 0.024989696502685548, 0.0249036808013916, 0.024921791076660156, 0.025003744125366212, 0.0251312313079834, 0.02527680015563965, 0.024801279067993166, 0.024919071197509766, 0.025037792205810545, 0.025038848876953124, 0.025032543182373048, 0.02505686378479004, 0.024994304656982422, 0.02533510398864746, 0.025035520553588868, 0.02525814437866211, 0.025255775451660155, 0.02532761573791504, 0.02537676811218262, 0.025300991058349608, 0.025280511856079102, 0.025308767318725587, 0.025239967346191407, 0.02536038398742676, 0.025397247314453125, 0.025319232940673828, 0.025303232192993165, 0.02534809684753418, 0.025327072143554688, 0.025241439819335937, 0.02543996810913086, 0.025471744537353517, 0.025450719833374023, 0.025450496673583983, 0.025450496673583983, 0.025436159133911132, 0.025645055770874024, 0.025417728424072264, 0.02519584083557129, 0.024853376388549803, 0.02471299171447754, 0.024602527618408202, 0.02470911979675293, 0.02488047981262207, 0.024758047103881835, 0.02469171142578125, 0.024673887252807617, 0.024852767944335937, 0.024847488403320312, 0.024800224304199217, 0.024755584716796876, 0.024848928451538087, 0.025014015197753908, 0.024940128326416015, 0.024796831130981446, 0.024888288497924804, 0.024899616241455078, 0.024936447143554686, 0.024830015182495117, 0.024905664443969727, 0.024987680435180664, 0.025010143280029297, 0.024954879760742187, 0.024832000732421877, 0.024995199203491213, 0.025078239440917968, 0.025075872421264647, 0.025032703399658202, 0.024997888565063478, 0.025370655059814454, 0.025220928192138673, 0.025079967498779297, 0.025176063537597656, 0.025167423248291014, 0.02510995292663574, 0.025068544387817384, 0.025223167419433593, 0.025217023849487305, 0.025190399169921874, 0.025183359146118165, 0.025145952224731444, 0.025112287521362305, 0.02513363265991211, 0.025145343780517578, 0.025177663803100585, 0.02513350486755371, 0.025264127731323242, 0.025455936431884766, 0.025160383224487305, 0.025343040466308593, 0.025334720611572267, 0.025290752410888673, 0.025438207626342774, 0.025323392868041993, 0.025313024520874024, 0.025512319564819336, 0.025490463256835936, 0.025475423812866212, 0.02551158332824707, 0.025393760681152344, 0.02542777633666992, 0.025227327346801758, 0.024924095153808595, 0.02494054412841797, 0.024767616271972655, 0.02473664093017578, 0.024749216079711915, 0.02478780746459961, 0.024753183364868165, 0.02473878479003906, 0.02473369598388672, 0.024725439071655274, 0.024729408264160157, 0.024796831130981446, 0.024789600372314452, 0.02478291130065918, 0.024776351928710937, 0.024803615570068358, 0.024903520584106446, 0.024850656509399414, 0.024923648834228516, 0.02490208053588867, 0.02487727928161621, 0.024864543914794923, 0.02492006492614746, 0.025056991577148437, 0.025069984436035156, 0.02515977668762207, 0.025044767379760743, 0.025036575317382813, 0.025045215606689455, 0.0251146240234375, 0.024978464126586913, 0.02498454475402832, 0.025012224197387696, 0.0251312313079834, 0.024981279373168946, 0.02513920021057129, 0.025206560134887694, 0.025180383682250975, 0.02515932846069336, 0.025123104095458985, 0.025366336822509765, 0.025260128021240235, 0.025128416061401367, 0.02529158401489258, 0.02526585578918457, 0.025292512893676757, 0.025264608383178712, 0.025208255767822266, 0.025213504791259767, 0.025250848770141603, 0.02517091178894043, 0.02521244812011719, 0.0252127685546875, 0.025248224258422852, 0.025308992385864256, 0.025443967819213868, 0.02550364875793457, 0.025434944152832033, 0.02531942367553711, 0.025421024322509766, 0.02540355110168457, 0.025449087142944336, 0.025401344299316408, 0.025222272872924806, 0.025035392761230468, 0.02483331108093262, 0.024686880111694336, 0.02468524742126465, 0.024731679916381834, 0.02470275115966797, 0.024680639266967775, 0.024814912796020508, 0.0248624324798584, 0.024946815490722658, 0.02492297554016113, 0.024803327560424804, 0.02482579231262207, 0.02485641670227051, 0.02493667221069336, 0.02484947204589844, 0.02482681655883789, 0.024825664520263673, 0.024903871536254882, 0.024928255081176756, 0.02506342315673828, 0.025024255752563476, 0.025147647857666017, 0.025051103591918946, 0.025002208709716797, 0.02502022361755371, 0.025142911911010743, 0.02499827194213867, 0.024870912551879884, 0.024867967605590822, 0.0250984001159668, 0.02512131118774414, 0.025196735382080077, 0.025206783294677734, 0.025247743606567383, 0.025118719100952147, 0.02515497589111328, 0.025328224182128906, 0.025316383361816405, 0.025227840423583985, 0.025268672943115234, 0.02524291229248047, 0.02968150329589844, 0.025079839706420897, 0.02509609603881836, 0.02511555290222168, 0.025191871643066407, 0.02524790382385254, 0.02539094352722168, 0.02521343994140625, 0.02525539207458496, 0.025338239669799804, 0.02535238456726074, 0.02541366386413574, 0.02538265609741211, 0.025343391418457033, 0.02537353515625, 0.025319520950317382, 0.025340896606445312, 0.025539039611816406, 0.02551036834716797, 0.024875072479248046, 0.02473382377624512, 0.02472332763671875, 0.024762367248535155, 0.024809471130371095, 0.0247576961517334, 0.02487763214111328, 0.02490777587890625, 0.02486800003051758, 0.024957792282104492, 0.02487500762939453, 0.024829952239990235, 0.02485043144226074, 0.024903039932250976, 0.024824352264404298, 0.024780895233154295, 0.02478188705444336, 0.024804288864135743, 0.024774528503417968, 0.024946815490722658, 0.025026559829711914, 0.024949951171875, 0.024905824661254884, 0.024996448516845703, 0.024950912475585937, 0.024999584197998047, 0.025039199829101563, 0.02517100715637207, 0.02521183967590332, 0.025417728424072264, 0.025054336547851563, 0.025008319854736328, 0.025010879516601563, 0.025101472854614258, 0.025121631622314452, 0.025034751892089844, 0.025059328079223633, 0.025057279586791992, 0.025198591232299804, 0.025165824890136718, 0.02524470329284668, 0.02535113525390625, 0.025320608139038085, 0.025254751205444338, 0.025285984039306642, 0.025256607055664064, 0.025266176223754884, 0.025273504257202147, 0.025215391159057618, 0.025223264694213866, 0.02532796859741211, 0.025425504684448243, 0.025409055709838868, 0.02545724868774414, 0.025284704208374024, 0.025243839263916015, 0.02543939208984375, 0.025381727218627928, 0.025603328704833984, 0.02538390350341797, 0.025435583114624023, 0.025416032791137695, 0.025475103378295897]",tokens/s,39.86686056114015,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1261.113344,1247.674368,0.0,845.152256,799.789056,s,1,8.64376953125,8.64376953125,0.0,8.64376953125,8.64376953125,8.64376953125,8.64376953125,[8.64376953125],,kWh,3.170180405003672e-05,3.4896483212432986e-06,9.61389657999634e-06,4.4805348951276364e-05,,MB,1493.594112,1277.034496,0.0,859.83232,828.70272,s,10,0.3604922866821289,0.03604922866821289,7.667501844237113e-05,0.036047359466552735,0.03613456802368164,0.03615778007507324,0.03617634971618652,"[0.03618099212646484, 0.036036670684814455, 0.03612940979003906, 0.036058048248291015, 0.0360230712890625, 0.03589731216430664, 0.03596515274047852, 0.036079551696777346, 0.03602406311035156, 0.03609801483154297]",tokens/s,7101.400209035069,kWh,1.0841176879168056e-06,1.1955873401831323e-07,7.157073626888456e-07,1.9193837846239645e-06,tokens/kWh,133376139.80632548,MB,1518.391296,1277.034496,0.0,859.83232,828.70528,s,10,12.378177612304686,1.2378177612304688,0.010720349464337503,1.2367702026367189,1.2510011840820312,1.252628643798828,1.2539306115722657,"[1.228803466796875, 1.2438837890625, 1.245914306640625, 1.2362222900390625, 1.235802001953125, 1.22837890625, 1.2506395263671874, 1.254256103515625, 1.237318115234375, 1.2169591064453125]",tokens/s,50.896021993878996,kWh,3.545936274124976e-05,3.910676059303261e-06,1.520016648111232e-05,5.457020528166535e-05,tokens/kWh,1154476.1408688875,,s,630,12.372183635711652,0.01963838672335186,0.0003693130777556063,0.01964168071746826,0.019957938957214358,0.020109158420562746,0.0206960510635376,"[0.01964355278015137, 0.019493919372558594, 0.01932080078125, 0.019351551055908203, 0.019304447174072266, 0.0192675838470459, 0.019603456497192383, 0.019322879791259767, 0.019316736221313476, 0.019496383666992186, 0.019472639083862306, 0.01929862403869629, 0.01920204734802246, 0.01949286460876465, 0.019407936096191406, 0.019740991592407227, 0.019413087844848635, 0.019357696533203125, 0.01929884719848633, 0.01922425651550293, 0.01911596870422363, 0.019097888946533203, 0.01912022399902344, 0.0193023681640625, 0.01908073616027832, 0.019210752487182618, 0.019095552444458007, 0.01902720069885254, 0.0191147518157959, 0.019224576950073242, 0.019386016845703125, 0.019404895782470705, 0.01937766456604004, 0.01966361618041992, 0.01966694450378418, 0.019598527908325194, 0.01972447967529297, 0.01959161567687988, 0.019652799606323244, 0.019722240447998047, 0.019656160354614257, 0.019755552291870117, 0.019475456237792968, 0.01964134407043457, 0.019683296203613282, 0.01982057571411133, 0.019869535446166993, 0.019578784942626954, 0.019591136932373045, 0.01960758399963379, 0.01962828826904297, 0.019812351226806642, 0.019570688247680663, 0.01964236831665039, 0.019570688247680663, 0.02012099266052246, 0.019638879776000977, 0.01955414390563965, 0.01948687934875488, 0.019505151748657225, 0.02004787254333496, 0.019869695663452147, 0.019851264953613282, 0.019775487899780272, 0.01985286331176758, 0.02006483268737793, 0.019834495544433593, 0.0198023681640625, 0.019935392379760743, 0.01995759963989258, 0.01978553581237793, 0.019812543869018553, 0.01998451232910156, 0.019893152236938477, 0.01993212890625, 0.019949567794799804, 0.01964771270751953, 0.01989468765258789, 0.019853311538696287, 0.019813919067382814, 0.019592031478881836, 0.01957683181762695, 0.01984511947631836, 0.019761152267456054, 0.019719776153564454, 0.019992992401123046, 0.019759103775024413, 0.01968886375427246, 0.019755615234375, 0.019582015991210937, 0.01959542465209961, 0.019671104431152345, 0.019607551574707033, 0.019583711624145506, 0.019641376495361327, 0.019861536026000978, 0.019582080841064452, 0.0196298885345459, 0.019605791091918946, 0.019340511322021484, 0.01963273620605469, 0.01976655960083008, 0.01974540710449219, 0.01946985626220703, 0.019460575103759765, 0.01951465606689453, 0.019424192428588866, 0.019493696212768554, 0.01954915237426758, 0.01968035125732422, 0.019680160522460938, 0.019578176498413084, 0.01967737579345703, 0.019428960800170897, 0.019594144821166993, 0.019734272003173826, 0.020154815673828125, 0.019816064834594728, 0.01971628761291504, 0.019757055282592775, 0.019562496185302734, 0.019707008361816405, 0.02068943977355957, 0.01982703971862793, 0.019736320495605468, 0.01969935989379883, 0.019734527587890623, 0.019777727127075196, 0.01964851188659668, 0.019568639755249022, 0.019525888442993165, 0.019843008041381834, 0.019670463562011718, 0.019740447998046876, 0.020573951721191405, 0.02032316780090332, 0.01990003204345703, 0.02005567932128906, 0.019677951812744142, 0.019742143630981444, 0.019685951232910157, 0.019704832077026366, 0.019724384307861328, 0.0196409912109375, 0.019732736587524415, 0.019596288681030274, 0.01964339256286621, 0.019549247741699218, 0.019599935531616212, 0.01957484817504883, 0.01955583953857422, 0.0195533447265625, 0.019943071365356446, 0.019693439483642577, 0.01960976028442383, 0.01954150390625, 0.019774015426635743, 0.019525440216064453, 0.019541215896606446, 0.01954694366455078, 0.019533983230590822, 0.019521120071411133, 0.019444095611572267, 0.019453792572021483, 0.019652128219604492, 0.01956435203552246, 0.01980259132385254, 0.020029472351074218, 0.019896480560302736, 0.01970604705810547, 0.019815807342529298, 0.019747615814208985, 0.020385631561279295, 0.020709375381469726, 0.019785152435302735, 0.019999008178710937, 0.019921184539794922, 0.01983024024963379, 0.01982313537597656, 0.01984921646118164, 0.019911712646484375, 0.01974143981933594, 0.01989039993286133, 0.020011104583740235, 0.01979702377319336, 0.019698560714721678, 0.019685375213623048, 0.01962598419189453, 0.019957151412963867, 0.019914655685424804, 0.02009516716003418, 0.0196844482421875, 0.019738943099975585, 0.01981488037109375, 0.019683296203613282, 0.019865312576293946, 0.01970249557495117, 0.019662559509277342, 0.019783519744873048, 0.019486207962036133, 0.019337888717651366, 0.019625247955322264, 0.019932992935180666, 0.019831968307495118, 0.01971993637084961, 0.01985945510864258, 0.019824064254760743, 0.01962246322631836, 0.019637632369995116, 0.019612287521362303, 0.019641984939575197, 0.019701311111450195, 0.019614015579223633, 0.01963216018676758, 0.019523744583129884, 0.019611455917358397, 0.019632543563842773, 0.019763328552246093, 0.01963520050048828, 0.019629024505615236, 0.019713279724121093, 0.019598079681396485, 0.019863807678222656, 0.019541759490966797, 0.019550207138061524, 0.01943267250061035, 0.019272192001342774, 0.01927587127685547, 0.019216384887695313, 0.019253440856933594, 0.019238271713256837, 0.019279808044433595, 0.019921600341796877, 0.019701631546020507, 0.01958924865722656, 0.019592832565307618, 0.01957484817504883, 0.01977136039733887, 0.01958742332458496, 0.019509151458740236, 0.019461311340332032, 0.019567487716674804, 0.01948467254638672, 0.019535360336303712, 0.019801855087280273, 0.01971894454956055, 0.01961359977722168, 0.019550304412841796, 0.0194703369140625, 0.019353248596191405, 0.019345760345458984, 0.01943142318725586, 0.01950409507751465, 0.019494720458984375, 0.01956643295288086, 0.01969536018371582, 0.019499711990356446, 0.01948464012145996, 0.01937980842590332, 0.019377887725830076, 0.01928646469116211, 0.01929583930969238, 0.01952351951599121, 0.019239391326904297, 0.01907731246948242, 0.0191997127532959, 0.01927084732055664, 0.019311519622802736, 0.019334207534790038, 0.019311552047729493, 0.019332319259643554, 0.019473535537719727, 0.019383968353271483, 0.019499008178710937, 0.020068351745605468, 0.019555999755859376, 0.019577184677124024, 0.019726335525512697, 0.01985945510864258, 0.01987593650817871, 0.01988096046447754, 0.020136863708496093, 0.019887487411499025, 0.019747455596923827, 0.019804159164428712, 0.01982975959777832, 0.019823232650756837, 0.019855871200561523, 0.020778879165649414, 0.020010976791381835, 0.019763456344604493, 0.019834207534790038, 0.01992134475708008, 0.01995676803588867, 0.01993814468383789, 0.019945600509643554, 0.0196092472076416, 0.019781984329223633, 0.01969152069091797, 0.01964784049987793, 0.01957289505004883, 0.01957747268676758, 0.019697343826293946, 0.019760927200317382, 0.019638751983642577, 0.019713983535766602, 0.019697664260864257, 0.01994688034057617, 0.01939436721801758, 0.019243135452270507, 0.019176128387451172, 0.01915395164489746, 0.019176416397094727, 0.019281919479370118, 0.01920595169067383, 0.019494176864624024, 0.01963926315307617, 0.019519487380981446, 0.019422431945800782, 0.019204896926879884, 0.01932464027404785, 0.019250623703002928, 0.02301590347290039, 0.020005247116088868, 0.019144704818725586, 0.019113088607788088, 0.019047296524047852, 0.01923276710510254, 0.019525632858276368, 0.019574783325195313, 0.019619359970092773, 0.019380319595336915, 0.019612031936645506, 0.019861503601074217, 0.019566848754882814, 0.019398624420166016, 0.019639680862426758, 0.019323135375976564, 0.019372095108032228, 0.01912022399902344, 0.01904640007019043, 0.019108095169067384, 0.019131807327270507, 0.019076448440551758, 0.018983936309814452, 0.019933183670043944, 0.019628032684326172, 0.019314464569091798, 0.019255136489868162, 0.019363616943359373, 0.019263103485107423, 0.019693792343139647, 0.019163904190063478, 0.019099647521972657, 0.019952896118164063, 0.019266304016113282, 0.01951478385925293, 0.019446367263793944, 0.01946009635925293, 0.01940275192260742, 0.019369983673095705, 0.019230688095092773, 0.01907459259033203, 0.019204608917236327, 0.019220575332641602, 0.019308448791503906, 0.019223743438720704, 0.019306751251220704, 0.019383903503417968, 0.0194815673828125, 0.020224063873291016, 0.019555648803710936, 0.019692447662353514, 0.019570144653320312, 0.019419391632080077, 0.020853919982910155, 0.01950601577758789, 0.019630048751831056, 0.019744672775268556, 0.019762943267822266, 0.01994745635986328, 0.019563135147094728, 0.019499008178710937, 0.019410144805908202, 0.019493663787841797, 0.02002943992614746, 0.019647903442382812, 0.019696416854858397, 0.019681087493896486, 0.019664896011352538, 0.019550016403198242, 0.023481599807739256, 0.019784639358520508, 0.01986355209350586, 0.01988319969177246, 0.019884864807128907, 0.019809951782226564, 0.019681631088256837, 0.01981820869445801, 0.019796255111694337, 0.01971785545349121, 0.019579168319702148, 0.01953388786315918, 0.01964847946166992, 0.019658720016479492, 0.019746015548706055, 0.01953001594543457, 0.01971251106262207, 0.019801952362060546, 0.01998863983154297, 0.019924991607666014, 0.02002124786376953, 0.01954316711425781, 0.019667839050292967, 0.0198734073638916, 0.01980863952636719, 0.01972012710571289, 0.019709312438964843, 0.02001744079589844, 0.019903135299682618, 0.01991267204284668, 0.019683103561401367, 0.019781152725219728, 0.020134368896484376, 0.019674623489379883, 0.019651071548461914, 0.01966694450378418, 0.01960960006713867, 0.019981760025024414, 0.01983545684814453, 0.019926944732666017, 0.02038387107849121, 0.019812511444091796, 0.01981145668029785, 0.02003424072265625, 0.02028758430480957, 0.020137887954711914, 0.019725631713867188, 0.01979667282104492, 0.019763200759887696, 0.01963007926940918, 0.01997337532043457, 0.02007731246948242, 0.020186336517333984, 0.019755424499511717, 0.019652992248535155, 0.01960140800476074, 0.019566144943237305, 0.020025440216064453, 0.019499359130859376, 0.019519487380981446, 0.019421375274658204, 0.019902048110961915, 0.02070140838623047, 0.020319839477539063, 0.01979433631896973, 0.019722335815429686, 0.01980191993713379, 0.02007219123840332, 0.019824991226196288, 0.01964031982421875, 0.019701759338378907, 0.019845279693603515, 0.019687551498413086, 0.01985708808898926, 0.019757087707519532, 0.019719839096069336, 0.020064607620239257, 0.02011871910095215, 0.01980601692199707, 0.019812351226806642, 0.019870719909667968, 0.020105215072631837, 0.020322303771972656, 0.019910783767700196, 0.019904384613037108, 0.020025344848632814, 0.020379648208618165, 0.02047590446472168, 0.02032649612426758, 0.02011238479614258, 0.019865663528442382, 0.02003388786315918, 0.01992300796508789, 0.019927488327026368, 0.01987174415588379, 0.02069875144958496, 0.019881759643554688, 0.019825151443481445, 0.01996099281311035, 0.020478912353515625, 0.01970534324645996, 0.019741472244262696, 0.019652320861816407, 0.01960710334777832, 0.02007084846496582, 0.019853311538696287, 0.01970899200439453, 0.01973263931274414, 0.01965135955810547, 0.019466239929199217, 0.019678560256958008, 0.01967584037780762, 0.019765216827392577, 0.019697280883789064, 0.019663135528564454, 0.019395679473876954, 0.019329343795776367, 0.019284671783447265, 0.019354623794555666, 0.01944883155822754, 0.01923276710510254, 0.019161088943481445, 0.019023231506347656, 0.01916169548034668, 0.01928214454650879, 0.02058835220336914, 0.019640575408935546, 0.01948646354675293, 0.0194334716796875, 0.019331071853637697, 0.01980620765686035, 0.019396127700805663, 0.019456159591674804, 0.019450239181518555, 0.01940678405761719, 0.01972982406616211, 0.01959721565246582, 0.019647167205810546, 0.01928793525695801, 0.019290239334106445, 0.019415327072143555, 0.019680992126464843, 0.019865760803222655, 0.019467679977416993, 0.019524032592773438, 0.01961084747314453, 0.019591968536376955, 0.01965670394897461, 0.019779584884643556, 0.019931135177612306, 0.01981439971923828, 0.01989836883544922, 0.019755008697509766, 0.019674816131591798, 0.01976959991455078, 0.019679071426391602, 0.019830463409423828, 0.020010784149169923, 0.019765567779541016, 0.01971664047241211, 0.019601312637329102, 0.020119104385375976, 0.019816991806030273, 0.019752864837646485, 0.019666688919067383, 0.019642623901367187, 0.01965056037902832, 0.01963590431213379, 0.019711456298828124, 0.019935136795043946, 0.01983296012878418, 0.020069183349609374, 0.02002355194091797, 0.019601152420043944, 0.01966713523864746, 0.01993503952026367, 0.019738975524902343, 0.019686912536621092, 0.01954080009460449, 0.019531904220581056, 0.0194388484954834, 0.019628480911254884, 0.019567935943603516, 0.019541215896606446, 0.019865375518798828, 0.01989347267150879, 0.019741472244262696, 0.01936147117614746, 0.019234912872314453, 0.019213600158691405, 0.019274688720703124, 0.019539264678955077, 0.019337215423583985, 0.019358400344848634, 0.019343135833740234, 0.01949513626098633, 0.01923891258239746, 0.019146751403808594, 0.01919561576843262, 0.019136287689208983, 0.019214847564697265, 0.019029216766357424, 0.019083648681640624, 0.019130592346191407, 0.019647872924804688, 0.020357343673706056, 0.019277503967285156, 0.019063488006591797, 0.018980064392089845, 0.01898931121826172, 0.019034656524658203, 0.019023359298706053, 0.019058879852294923, 0.019067424774169922, 0.019166496276855467, 0.019284000396728517, 0.01912841606140137, 0.01901532745361328, 0.01903651237487793, 0.0189835205078125, 0.01904761505126953, 0.019138879776000976, 0.019464704513549806, 0.019294015884399413, 0.01920979118347168, 0.020285791397094726, 0.019143999099731444, 0.019368383407592775, 0.019134592056274415, 0.019013504028320312, 0.019011104583740234, 0.01908790397644043, 0.01900172805786133, 0.01898259162902832, 0.01887887954711914, 0.01902387237548828, 0.01941913604736328, 0.019726335525512697, 0.01949238395690918]",tokens/s,50.92067969162189,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,2226.302976,2551.119872,0.0,2155.872256,2032.413184,s,1,8.8302705078125,8.8302705078125,0.0,8.8302705078125,8.8302705078125,8.8302705078125,8.8302705078125,[8.8302705078125],,kWh,4.982181362083793e-05,5.4883185529186004e-06,1.5603901372000795e-05,7.091403354575732e-05,,MB,2278.109184,2827.943936,0.0,2418.016256,2279.563776,s,10,0.7768848495483398,0.07768848495483398,0.00018375020280323443,0.07763300704956055,0.07796125717163085,0.07802150688171387,0.07806970664978027,"[0.07808175659179688, 0.07794786834716796, 0.0775453109741211, 0.07749890899658203, 0.0775704345703125, 0.07762175750732422, 0.07768287658691406, 0.07777859497070312, 0.0775130844116211, 0.07764425659179687]",tokens/s,3295.211641066647,kWh,2.320956485185219e-06,2.5596015964888385e-07,1.5457397727461e-06,4.1226564175802024e-06,tokens/kWh,62095885.29093566,MB,2280.640512,2911.830016,0.0,2501.902336,2389.801984,s,10,13.968810546875003,1.3968810546875001,0.007208082407808386,1.3988131713867187,1.4043124389648438,1.4047796325683592,1.4051533874511717,"[1.4042086181640625, 1.399265380859375, 1.403767578125, 1.4041328125, 1.405246826171875, 1.3888050537109375, 1.389106201171875, 1.3983609619140625, 1.38671240234375, 1.3892047119140625]",tokens/s,45.10047565509713,kWh,4.043888521106479e-05,4.46003448440396e-06,2.2972029841453642e-05,6.787094953692238e-05,tokens/kWh,928232.1881429913,,s,630,13.961103496551505,0.022160481740557958,0.00038663754401263264,0.022070960044860838,0.022473354148864748,0.022643639183044434,0.023520151882171634,"[0.023783424377441405, 0.02206924819946289, 0.02187264060974121, 0.021822751998901366, 0.021869279861450194, 0.022290143966674805, 0.02199171257019043, 0.02190540885925293, 0.02216671943664551, 0.022102272033691406, 0.021979711532592774, 0.021938175201416017, 0.021950464248657226, 0.021906784057617187, 0.022032064437866213, 0.021890016555786134, 0.022204160690307617, 0.021960384368896486, 0.022028608322143553, 0.022403327941894532, 0.026284032821655274, 0.02223411178588867, 0.022187007904052734, 0.02222800064086914, 0.022481887817382813, 0.022255008697509765, 0.022736608505249025, 0.02326412773132324, 0.02292531204223633, 0.022677152633666993, 0.02248534393310547, 0.02237225532531738, 0.022280031204223633, 0.022093952178955076, 0.02204431915283203, 0.022130367279052734, 0.022099903106689453, 0.022219615936279295, 0.02222230339050293, 0.022047264099121094, 0.022208192825317382, 0.02295020866394043, 0.022237152099609376, 0.022106143951416017, 0.02213478469848633, 0.022255392074584962, 0.022191648483276368, 0.022223039627075194, 0.022204927444458008, 0.02227609634399414, 0.02227609634399414, 0.022468288421630858, 0.022290143966674805, 0.022190656661987305, 0.022165504455566407, 0.022073375701904298, 0.022046144485473634, 0.021994047164916992, 0.022001663208007814, 0.022024192810058595, 0.02209721565246582, 0.02201055908203125, 0.02207459259033203, 0.022956031799316406, 0.022390783309936522, 0.022339263916015626, 0.022362783432006837, 0.022232736587524414, 0.022413312911987306, 0.02225152015686035, 0.022179840087890625, 0.022124544143676757, 0.02200371170043945, 0.022013952255249023, 0.021951520919799804, 0.022024639129638673, 0.021946880340576173, 0.022091808319091796, 0.02214297676086426, 0.022788095474243163, 0.022384288787841797, 0.022202367782592772, 0.02214672088623047, 0.022090431213378905, 0.022108224868774413, 0.022372064590454103, 0.022130016326904298, 0.022012800216674806, 0.022026239395141603, 0.02191155242919922, 0.021977088928222657, 0.021936128616333008, 0.02191974449157715, 0.021935712814331054, 0.022019872665405272, 0.022013599395751954, 0.02231983947753906, 0.02264463996887207, 0.022671808242797853, 0.022517663955688477, 0.022202367782592772, 0.02224127960205078, 0.02215116882324219, 0.022138496398925782, 0.021980575561523438, 0.0220927677154541, 0.022255615234375, 0.021946048736572264, 0.021981151580810546, 0.023011680603027343, 0.021925888061523437, 0.02206719970703125, 0.022070816040039062, 0.02244041633605957, 0.022356063842773437, 0.022564512252807617, 0.02239923286437988, 0.022300384521484376, 0.022060384750366212, 0.022098880767822265, 0.02201190376281738, 0.022140928268432617, 0.02204182434082031, 0.022016799926757813, 0.022460416793823244, 0.022029727935791017, 0.022417856216430665, 0.022187295913696288, 0.022159584045410158, 0.022195775985717772, 0.02268992042541504, 0.02261075210571289, 0.022517440795898437, 0.02212076759338379, 0.02230067253112793, 0.022034431457519533, 0.022125696182250975, 0.022194591522216797, 0.02234976005554199, 0.022587423324584962, 0.022409727096557617, 0.0224768009185791, 0.022624256134033204, 0.02253824043273926, 0.022335487365722655, 0.022218751907348632, 0.02218121528625488, 0.022192575454711913, 0.02226924705505371, 0.02247488021850586, 0.022344480514526366, 0.022465599060058593, 0.022282943725585938, 0.022114559173583983, 0.022482656478881837, 0.022022432327270507, 0.02224083137512207, 0.022006111145019533, 0.022171743392944337, 0.02198019218444824, 0.022115135192871095, 0.021938047409057616, 0.022119712829589844, 0.022047264099121094, 0.022360544204711914, 0.022161407470703123, 0.022609920501708985, 0.022616352081298828, 0.022410112380981444, 0.02231587219238281, 0.022378496170043945, 0.022316608428955078, 0.022342079162597655, 0.022161407470703123, 0.02215936088562012, 0.02206675148010254, 0.02205740737915039, 0.0220446720123291, 0.022149120330810547, 0.022108160018920898, 0.02197427177429199, 0.021858335494995117, 0.02206332778930664, 0.022101760864257813, 0.022333696365356447, 0.022225151062011717, 0.02246272087097168, 0.022642688751220705, 0.022451839447021484, 0.02286185646057129, 0.022526464462280273, 0.022526975631713866, 0.02248940849304199, 0.022270656585693358, 0.022564863204956053, 0.022386655807495118, 0.02280841636657715, 0.022431936264038086, 0.022246719360351563, 0.022305471420288086, 0.022205440521240235, 0.022340543746948244, 0.022180959701538085, 0.02376550483703613, 0.023262847900390626, 0.02228646469116211, 0.021996192932128907, 0.022412384033203125, 0.02189411163330078, 0.022038463592529298, 0.021999711990356444, 0.02195452880859375, 0.02191564750671387, 0.022226367950439453, 0.021967008590698243, 0.022002080917358398, 0.02208563232421875, 0.02207561683654785, 0.022126367568969726, 0.02230067253112793, 0.02228620719909668, 0.022414783477783203, 0.022188735961914063, 0.022013952255249023, 0.02199078369140625, 0.02212723159790039, 0.022036575317382814, 0.022103967666625975, 0.021997568130493163, 0.02199295997619629, 0.022026752471923827, 0.02205286407470703, 0.02224742317199707, 0.022097728729248048, 0.022209823608398436, 0.02228316879272461, 0.02206822395324707, 0.02216969680786133, 0.02210908889770508, 0.022402431488037108, 0.022558719635009765, 0.022485376358032227, 0.022151424407958985, 0.022284063339233398, 0.022378719329833985, 0.022391807556152343, 0.022477535247802733, 0.022448415756225585, 0.022201471328735352, 0.022197248458862305, 0.02216643142700195, 0.022315999984741212, 0.022747007369995118, 0.02616524887084961, 0.023795360565185546, 0.02264441680908203, 0.022417184829711913, 0.02247318458557129, 0.02233996772766113, 0.022361440658569334, 0.022499488830566405, 0.022378751754760742, 0.02226521682739258, 0.022413503646850585, 0.02239356803894043, 0.022169599533081053, 0.02214476776123047, 0.0220797119140625, 0.02235372734069824, 0.022227071762084962, 0.022204511642456053, 0.022390432357788086, 0.022331743240356444, 0.022304479598999023, 0.022317344665527344, 0.0224399356842041, 0.022208511352539064, 0.022054912567138672, 0.02196803283691406, 0.021950592041015626, 0.021945056915283204, 0.021884927749633788, 0.023011327743530274, 0.02221612739562988, 0.023364128112792967, 0.022064735412597656, 0.02207379150390625, 0.021972415924072265, 0.021997312545776367, 0.022100799560546874, 0.022216352462768554, 0.022128671646118165, 0.022100032806396483, 0.022062463760375975, 0.021975103378295897, 0.022045215606689452, 0.02185651206970215, 0.021985055923461914, 0.021829887390136717, 0.022368255615234374, 0.0218787841796875, 0.02186355209350586, 0.02255961608886719, 0.022003679275512694, 0.02201398468017578, 0.02211840057373047, 0.022023775100708007, 0.02219664001464844, 0.022153215408325197, 0.02226371192932129, 0.02212873649597168, 0.02208697509765625, 0.021917472839355467, 0.021971872329711914, 0.02201190376281738, 0.022270175933837892, 0.021952512741088868, 0.021895040512084962, 0.021894752502441408, 0.02195510482788086, 0.022030336380004883, 0.02217932891845703, 0.02202470397949219, 0.02245427131652832, 0.021999616622924805, 0.02200364875793457, 0.021844032287597657, 0.021956672668457033, 0.021931615829467774, 0.02199177551269531, 0.021792768478393554, 0.02194819259643555, 0.021913728713989257, 0.022000831604003908, 0.021960639953613283, 0.022019039154052733, 0.022161407470703123, 0.02208563232421875, 0.021952512741088868, 0.021982688903808594, 0.021960927963256837, 0.02221878433227539, 0.02193027114868164, 0.022330944061279296, 0.022352319717407226, 0.022394880294799805, 0.022101984024047852, 0.02232499122619629, 0.022266143798828124, 0.022078880310058592, 0.022106719970703126, 0.022040576934814454, 0.022150943756103516, 0.02190332794189453, 0.021901567459106444, 0.021936128616333008, 0.02193164825439453, 0.021896928787231446, 0.0219204158782959, 0.022072608947753907, 0.022180736541748045, 0.022256799697875976, 0.02226041603088379, 0.021984607696533202, 0.021944095611572265, 0.021764991760253906, 0.02198944091796875, 0.021882816314697264, 0.021944320678710938, 0.022191104888916017, 0.022064128875732423, 0.021880416870117186, 0.021882623672485352, 0.021803680419921874, 0.02190438461303711, 0.02194054412841797, 0.021946687698364258, 0.02204265594482422, 0.022526111602783203, 0.022200159072875977, 0.021932031631469725, 0.021966848373413086, 0.021796096801757814, 0.022024799346923828, 0.021940223693847655, 0.021850271224975584, 0.021798912048339843, 0.02184726333618164, 0.021809247970581053, 0.021907903671264647, 0.021797056198120116, 0.021876800537109376, 0.021932031631469725, 0.022038528442382813, 0.02212819290161133, 0.02217731285095215, 0.022080415725708007, 0.021979135513305666, 0.021986400604248047, 0.02193292808532715, 0.021880863189697265, 0.02189516830444336, 0.02190336036682129, 0.02231062316894531, 0.022022144317626953, 0.022063392639160156, 0.022591487884521484, 0.021917695999145507, 0.021827583312988282, 0.021893119812011717, 0.02207043266296387, 0.02235887908935547, 0.02243699264526367, 0.022244224548339842, 0.022192127227783204, 0.021985279083251954, 0.0220546875, 0.021899488449096678, 0.02188649559020996, 0.02187104034423828, 0.02181532859802246, 0.022063104629516602, 0.02198271942138672, 0.022039039611816406, 0.02196201515197754, 0.021859039306640626, 0.02192355155944824, 0.0219072322845459, 0.022428064346313475, 0.023104896545410158, 0.02225430488586426, 0.022007648468017577, 0.02212236785888672, 0.022281503677368163, 0.02213337516784668, 0.02192617607116699, 0.021944416046142577, 0.021935680389404296, 0.021944671630859374, 0.021905183792114258, 0.021936447143554687, 0.02201260757446289, 0.02197817611694336, 0.021865407943725587, 0.02205286407470703, 0.02327756881713867, 0.025399295806884766, 0.022755327224731444, 0.022487039566040038, 0.022203968048095702, 0.022081024169921876, 0.02219718360900879, 0.022993919372558593, 0.022952959060668944, 0.022512800216674806, 0.02230672073364258, 0.022178752899169922, 0.022054336547851563, 0.0218855037689209, 0.022255008697509765, 0.021869152069091798, 0.021954559326171876, 0.021952384948730468, 0.021932159423828125, 0.02209721565246582, 0.022176448822021484, 0.022215808868408203, 0.021989856719970703, 0.022091936111450196, 0.021879039764404296, 0.02231500816345215, 0.02186444854736328, 0.021843967437744142, 0.02187017631530762, 0.021782943725585938, 0.021960704803466798, 0.021817344665527344, 0.021790143966674804, 0.022001216888427735, 0.021969919204711915, 0.021872095108032227, 0.02188751983642578, 0.021948415756225585, 0.022321151733398437, 0.02215116882324219, 0.022005664825439454, 0.021999711990356444, 0.02185603141784668, 0.021992992401123047, 0.021907136917114257, 0.023540512084960937, 0.023470304489135743, 0.022000959396362305, 0.021928831100463866, 0.021897024154663085, 0.021939456939697264, 0.0218505916595459, 0.022026752471923827, 0.022066879272460937, 0.022227039337158205, 0.02202614402770996, 0.021893375396728514, 0.021900224685668945, 0.021986207962036132, 0.02213039970397949, 0.021889888763427734, 0.021872671127319335, 0.021997535705566406, 0.02195644760131836, 0.022024383544921877, 0.022117631912231445, 0.021919551849365233, 0.02228665542602539, 0.021860128402709962, 0.02198819160461426, 0.021932031631469725, 0.02210767936706543, 0.022071104049682617, 0.022041088104248048, 0.02190118408203125, 0.02204425621032715, 0.022022560119628908, 0.0219015998840332, 0.02185775947570801, 0.0219116153717041, 0.021898880004882812, 0.02186467170715332, 0.02188966369628906, 0.021942176818847657, 0.021868640899658204, 0.02194144058227539, 0.02181407928466797, 0.022176895141601562, 0.02192438316345215, 0.02202412796020508, 0.022116767883300782, 0.022091007232666014, 0.022033151626586915, 0.021977088928222657, 0.021999040603637696, 0.02243168067932129, 0.022260351181030272, 0.02224742317199707, 0.02208563232421875, 0.02206719970703125, 0.02194540786743164, 0.021895328521728517, 0.02183225631713867, 0.02192131233215332, 0.021875295639038086, 0.021940319061279297, 0.02185139274597168, 0.02220515251159668, 0.02194175910949707, 0.022032224655151367, 0.02204537582397461, 0.022138879776000975, 0.022507455825805663, 0.02196665573120117, 0.022015840530395507, 0.021889440536499022, 0.021900896072387696, 0.021878559112548827, 0.021895200729370116, 0.021919519424438476, 0.02189593505859375, 0.02188889694213867, 0.022648895263671875, 0.022239231109619142, 0.021970815658569336, 0.022139007568359376, 0.021929983139038087, 0.022150976181030273, 0.02201580810546875, 0.022094207763671873, 0.021954559326171876, 0.02189676856994629, 0.02192019271850586, 0.02187468719482422, 0.021829023361206054, 0.021936159133911132, 0.022133312225341796, 0.021935935974121093, 0.0219150390625, 0.02194099235534668, 0.021970975875854493, 0.022107999801635744, 0.022081695556640624, 0.02206719970703125, 0.021932031631469725, 0.02191564750671387, 0.021940223693847655, 0.02190540885925293, 0.021843360900878905, 0.021852767944335938, 0.02191564750671387, 0.021945856094360353, 0.021965152740478517, 0.02187280082702637, 0.022231039047241212, 0.021972991943359374, 0.021893119812011717, 0.021897279739379882, 0.021968832015991212, 0.02192959976196289, 0.022032768249511718, 0.021997568130493163, 0.02220185661315918, 0.021995359420776368, 0.021978784561157226, 0.02211327934265137, 0.021995519638061522, 0.02187398338317871, 0.021826240539550783, 0.021803007125854493, 0.021834911346435545, 0.02179692840576172, 0.021790655136108398, 0.021902175903320314, 0.022269952774047853, 0.021979135513305666, 0.022200319290161134, 0.022423839569091795, 0.022502304077148438, 0.02258188819885254, 0.02243708801269531, 0.02234220886230469, 0.0224051513671875, 0.02214950370788574, 0.022185983657836913]",tokens/s,45.12537280134154,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1169.977344,1101.98784,0.0,706.740224,681.6384,s,1,8.240458984375,8.240458984375,0.0,8.240458984375,8.240458984375,8.240458984375,8.240458984375,[8.240458984375],,kWh,3.042966298750116e-05,3.348905752097596e-06,8.997784976011447e-06,4.2776353715610206e-05,,MB,1477.947392,1406.07488,0.0,996.1472,949.238272,s,10,0.2649172801971435,0.026491728019714354,0.00029793763482712345,0.026321727752685545,0.02689329662322998,0.026940312099456788,0.02697792448043823,"[0.026882848739624023, 0.026987327575683593, 0.026252416610717772, 0.02670947265625, 0.026139328002929688, 0.026283327102661132, 0.026788543701171875, 0.026230560302734376, 0.026358240127563475, 0.026285215377807616]",tokens/s,9663.39378878918,kWh,7.905608014752041e-07,8.718526049061943e-08,4.875897294107915e-07,1.365335791376615e-06,tokens/kWh,187499662.43973225,MB,1498.300416,1414.463488,0.0,1004.535808,949.240832,s,10,12.595625854492187,1.2595625854492187,0.007672994935525482,1.2575475463867187,1.2665170166015625,1.2722932983398436,1.2769143237304688,"[1.278069580078125, 1.26454345703125, 1.250636962890625, 1.25344140625, 1.259962646484375, 1.2652333984375, 1.255451171875, 1.254578369140625, 1.2596439208984376, 1.25406494140625]",tokens/s,50.01736374817077,kWh,3.732229134477349e-05,4.116098889717311e-06,1.5007144888589593e-05,5.6445535123080395e-05,tokens/kWh,1116120.165441385,,s,630,12.593511472702021,0.019989700750320676,0.0003206107088227993,0.019910863876342774,0.02026016883850098,0.02048119831085205,0.02131405834197999,"[0.02076630401611328, 0.02052751922607422, 0.020439136505126954, 0.020158016204833984, 0.020348800659179687, 0.02035145568847656, 0.020225791931152343, 0.020098335266113283, 0.020228767395019533, 0.020021568298339842, 0.020414464950561522, 0.020178239822387697, 0.01999942398071289, 0.019785728454589844, 0.01985955238342285, 0.02021798324584961, 0.020110143661499023, 0.019719135284423827, 0.019830976486206055, 0.020127552032470703, 0.020385631561279295, 0.020480159759521485, 0.020315168380737304, 0.0209946231842041, 0.020470176696777344, 0.02031001663208008, 0.020108768463134766, 0.02002998352050781, 0.01989836883544922, 0.019933183670043944, 0.020107040405273436, 0.020168928146362303, 0.019895776748657227, 0.019812896728515626, 0.01998624038696289, 0.02004412841796875, 0.020193119049072266, 0.02015827178955078, 0.020273344039916992, 0.020234304428100584, 0.019994239807128906, 0.020224319458007813, 0.021409408569335937, 0.019943807601928713, 0.020174047470092774, 0.02036796760559082, 0.02034294319152832, 0.019965343475341797, 0.02000048065185547, 0.019989408493041993, 0.020133888244628906, 0.020100608825683593, 0.02318751907348633, 0.02056550407409668, 0.02014508819580078, 0.020105215072631837, 0.02022400093078613, 0.02007859230041504, 0.020330656051635743, 0.021389152526855467, 0.020841567993164063, 0.02059561538696289, 0.020523008346557618, 0.020568384170532226, 0.020301408767700195, 0.020048704147338867, 0.020000448226928712, 0.020154687881469728, 0.020146175384521483, 0.02004755210876465, 0.020164352416992187, 0.02005459213256836, 0.020008960723876954, 0.019984064102172853, 0.020611391067504883, 0.019893312454223634, 0.019960128784179687, 0.01991334342956543, 0.019918848037719726, 0.020000768661499024, 0.020059776306152344, 0.019853696823120118, 0.01985536003112793, 0.01999977684020996, 0.02113020706176758, 0.020808736801147462, 0.020384735107421875, 0.020258207321166993, 0.020103103637695314, 0.020191200256347658, 0.020443359375, 0.020122079849243163, 0.02029497528076172, 0.02003424072265625, 0.02003321647644043, 0.019908287048339843, 0.019873823165893555, 0.019794143676757813, 0.0199048957824707, 0.01995955276489258, 0.020133312225341798, 0.02018771171569824, 0.019949855804443358, 0.02037881660461426, 0.01994211196899414, 0.019732736587524415, 0.01975276756286621, 0.020083808898925783, 0.020108192443847657, 0.019970048904418947, 0.01989593505859375, 0.01991436767578125, 0.019864320755004883, 0.01991231918334961, 0.01977494430541992, 0.019865888595581055, 0.019937919616699218, 0.01995699119567871, 0.019800832748413086, 0.019918848037719726, 0.020229408264160156, 0.020214048385620118, 0.02011123275756836, 0.02001158332824707, 0.01998147201538086, 0.019925695419311523, 0.0198089599609375, 0.019951520919799806, 0.019779680252075195, 0.019796255111694337, 0.019748512268066405, 0.019847007751464845, 0.0198305606842041, 0.019767744064331055, 0.019793376922607422, 0.019814144134521483, 0.019888927459716797, 0.019799711227416993, 0.019751264572143556, 0.0199267520904541, 0.019900703430175783, 0.019939136505126954, 0.0199354248046875, 0.01993280029296875, 0.020251007080078126, 0.020122751235961914, 0.01995395278930664, 0.019790016174316406, 0.0198701114654541, 0.019916000366210936, 0.019927295684814453, 0.02000092887878418, 0.0203001594543457, 0.019891584396362304, 0.01987443161010742, 0.019810464859008788, 0.019715967178344725, 0.02018876838684082, 0.019917184829711915, 0.019707807540893553, 0.01983292770385742, 0.019742719650268553, 0.019705312728881836, 0.019691904067993163, 0.01977471923828125, 0.019614463806152345, 0.019675296783447267, 0.01963827133178711, 0.019779584884643556, 0.01989360046386719, 0.019791616439819335, 0.019762239456176757, 0.019731327056884764, 0.01976176071166992, 0.019874176025390624, 0.019736576080322265, 0.019761152267456054, 0.019961856842041017, 0.019762432098388672, 0.019886751174926758, 0.019861215591430663, 0.020029823303222657, 0.01985945510864258, 0.019779584884643556, 0.01982464027404785, 0.019741792678833008, 0.019766176223754883, 0.019775487899780272, 0.019936607360839843, 0.01998585510253906, 0.019958208084106446, 0.019869823455810547, 0.019900415420532228, 0.01987583923339844, 0.01984716796875, 0.01984214401245117, 0.019798944473266602, 0.019808256149291992, 0.019655967712402345, 0.019673728942871095, 0.019730527877807616, 0.019900415420532228, 0.01988761520385742, 0.019734655380249023, 0.0197857608795166, 0.01996556854248047, 0.01993187141418457, 0.019795967102050782, 0.019769344329833984, 0.01982259178161621, 0.01979372787475586, 0.019959999084472657, 0.01978748893737793, 0.01977987289428711, 0.019775775909423827, 0.01981996726989746, 0.019800352096557616, 0.019900703430175783, 0.019914464950561525, 0.02009516716003418, 0.019945247650146485, 0.019948896408081056, 0.020085151672363282, 0.019900703430175783, 0.019932735443115236, 0.020009408950805663, 0.019982559204101562, 0.019877664566040038, 0.01969270324707031, 0.020119808197021485, 0.019993183135986328, 0.02025187110900879, 0.02015648078918457, 0.01995644760131836, 0.019885663986206056, 0.019807968139648437, 0.019991231918334962, 0.019853311538696287, 0.019944511413574218, 0.0197291202545166, 0.019765472412109374, 0.01996143913269043, 0.02023200035095215, 0.02006275177001953, 0.02001296043395996, 0.01992310333251953, 0.019804159164428712, 0.019832639694213866, 0.019763391494750978, 0.01984716796875, 0.019900415420532228, 0.019877023696899414, 0.019928543090820313, 0.019927391052246092, 0.020050111770629882, 0.020622976303100587, 0.01988595199584961, 0.019819007873535157, 0.019767295837402343, 0.01984511947631836, 0.0200581111907959, 0.019926496505737305, 0.019868192672729493, 0.021798912048339843, 0.02038118362426758, 0.02036787223815918, 0.020105215072631837, 0.019947200775146483, 0.019878208160400392, 0.019767488479614258, 0.019787296295166016, 0.019812320709228514, 0.01991628837585449, 0.01989846420288086, 0.019870431900024414, 0.019901567459106446, 0.020454336166381835, 0.019989631652832032, 0.019947904586791992, 0.019806720733642577, 0.02017683219909668, 0.02019327926635742, 0.020313535690307617, 0.020238367080688477, 0.02020102310180664, 0.020214752197265626, 0.020024511337280275, 0.020144384384155275, 0.01987180709838867, 0.019810815811157227, 0.019868959426879884, 0.01993974494934082, 0.019802431106567382, 0.019752960205078125, 0.019715232849121092, 0.019770048141479493, 0.019822111129760744, 0.019763839721679687, 0.019765247344970704, 0.01975654411315918, 0.01992755126953125, 0.01992428779602051, 0.019853599548339845, 0.01991107177734375, 0.02001203155517578, 0.019892448425292968, 0.01989459228515625, 0.02007206344604492, 0.020089696884155274, 0.019832223892211915, 0.020075103759765626, 0.02001919937133789, 0.019997760772705077, 0.01995622444152832, 0.01983459281921387, 0.019830175399780273, 0.019931711196899415, 0.019814367294311522, 0.019861888885498048, 0.020291584014892578, 0.020150272369384766, 0.02233568000793457, 0.021124000549316405, 0.019907487869262695, 0.01988921546936035, 0.020192192077636718, 0.02002943992614746, 0.019804159164428712, 0.019920064926147462, 0.020043712615966797, 0.01991062355041504, 0.019921823501586913, 0.01992038345336914, 0.01989059257507324, 0.01985955238342285, 0.01983283233642578, 0.020092927932739257, 0.01982259178161621, 0.01985536003112793, 0.019783679962158202, 0.01984921646118164, 0.02003126335144043, 0.019990751266479492, 0.019889503479003905, 0.019986783981323242, 0.01999839973449707, 0.02006687927246094, 0.020104480743408204, 0.020111520767211913, 0.019968544006347656, 0.020172191619873048, 0.020203487396240234, 0.020175392150878907, 0.020695295333862305, 0.020547391891479493, 0.020977216720581053, 0.02008438491821289, 0.019859615325927733, 0.019792640686035156, 0.01972809600830078, 0.019800352096557616, 0.02021990394592285, 0.020176511764526367, 0.0201582088470459, 0.019831167221069337, 0.019880191802978515, 0.020068351745605468, 0.019859039306640625, 0.0198701114654541, 0.019826751708984375, 0.0199964485168457, 0.02026630401611328, 0.020052448272705078, 0.020093120574951173, 0.019932704925537108, 0.020294303894042968, 0.020191232681274415, 0.020247840881347658, 0.020064287185668946, 0.020150976181030275, 0.02025062370300293, 0.02046566390991211, 0.020112991333007812, 0.019851680755615234, 0.01984921646118164, 0.019935232162475586, 0.02002943992614746, 0.019802112579345704, 0.019715103149414062, 0.019921215057373046, 0.019899040222167968, 0.019931007385253906, 0.019931264877319336, 0.019773088455200195, 0.0198372802734375, 0.019910655975341796, 0.022226783752441408, 0.019900575637817382, 0.019740671157836915, 0.01982259178161621, 0.01968332862854004, 0.019910655975341796, 0.019881824493408203, 0.019773120880126952, 0.01980259132385254, 0.020684799194335936, 0.019795967102050782, 0.01973151969909668, 0.019759296417236328, 0.019755199432373048, 0.019929183959960937, 0.019933664321899414, 0.019937280654907227, 0.02008883285522461, 0.01986764717102051, 0.01984921646118164, 0.019844383239746095, 0.019710016250610352, 0.019800512313842774, 0.019828319549560547, 0.019839071273803712, 0.01983951950073242, 0.01988403129577637, 0.019998720169067383, 0.019932191848754884, 0.020022239685058594, 0.019887840270996094, 0.019826175689697266, 0.01985206413269043, 0.019763200759887696, 0.019742176055908202, 0.019886623382568358, 0.019793920516967774, 0.019933183670043944, 0.01983692741394043, 0.019687231063842774, 0.0196231689453125, 0.019718719482421876, 0.019734912872314454, 0.019748096466064454, 0.01998691177368164, 0.019900224685668946, 0.019924768447875975, 0.020724128723144532, 0.020008703231811524, 0.019941024780273438, 0.019805952072143553, 0.019729183197021483, 0.019809951782226564, 0.019837120056152343, 0.019788000106811525, 0.02048204803466797, 0.019982336044311523, 0.019873760223388673, 0.0198123836517334, 0.019973119735717772, 0.020351999282836913, 0.019848800659179686, 0.01970012855529785, 0.019740671157836915, 0.019802112579345704, 0.019955711364746095, 0.019930335998535158, 0.01985820770263672, 0.01978982353210449, 0.019726335525512697, 0.0198922233581543, 0.01983014488220215, 0.01990060806274414, 0.019765695571899413, 0.019826719284057617, 0.020092384338378906, 0.02013433647155762, 0.019900224685668946, 0.01988140869140625, 0.019893056869506837, 0.019808256149291992, 0.019827871322631835, 0.019831647872924806, 0.01981644821166992, 0.01978278350830078, 0.01990131187438965, 0.02005734443664551, 0.020161279678344725, 0.019932159423828123, 0.019962879180908204, 0.01982259178161621, 0.019978368759155273, 0.0198286075592041, 0.019826528549194335, 0.01984118461608887, 0.019930496215820312, 0.02007094383239746, 0.019998815536499022, 0.019933183670043944, 0.019840160369873048, 0.019851711273193358, 0.01982512092590332, 0.01978156852722168, 0.01970979118347168, 0.01992310333251953, 0.019823871612548827, 0.019864320755004883, 0.02002943992614746, 0.0199332160949707, 0.01987740707397461, 0.019855424880981444, 0.01987228775024414, 0.019834943771362305, 0.019810304641723633, 0.01972617530822754, 0.019855520248413087, 0.01983807945251465, 0.019816320419311525, 0.019893247604370116, 0.019673088073730468, 0.019703359603881837, 0.019660383224487304, 0.019634784698486327, 0.01966924858093262, 0.01983692741394043, 0.019744768142700195, 0.019832544326782227, 0.019832128524780272, 0.019843584060668946, 0.019740224838256836, 0.02064886474609375, 0.020162431716918946, 0.020026624679565428, 0.019853408813476563, 0.019851999282836916, 0.019828800201416016, 0.02078950309753418, 0.020059104919433593, 0.019829248428344725, 0.01971545600891113, 0.019755552291870117, 0.01977587127685547, 0.019812288284301757, 0.01993075180053711, 0.019984832763671877, 0.01987107276916504, 0.02101705551147461, 0.02064134407043457, 0.019874399185180663, 0.02002742385864258, 0.020006208419799804, 0.019874496459960936, 0.02004924774169922, 0.020154560089111328, 0.02098627281188965, 0.020230207443237305, 0.02161664009094238, 0.02024630355834961, 0.02003376007080078, 0.019895904541015624, 0.019831199645996094, 0.020331647872924803, 0.01991155242919922, 0.019976192474365235, 0.019992576599121094, 0.019974143981933593, 0.01983036804199219, 0.019829151153564453, 0.019918687820434572, 0.019843231201171874, 0.019971872329711916, 0.019947519302368166, 0.019852319717407228, 0.019720640182495117, 0.01974892807006836, 0.019973728179931642, 0.019950464248657228, 0.01977872085571289, 0.01981923294067383, 0.020088703155517577, 0.020008863449096678, 0.020135263442993163, 0.019893247604370116, 0.019916351318359376, 0.020001440048217772, 0.02042822456359863, 0.02080995178222656, 0.019763328552246093, 0.019793920516967774, 0.019795967102050782, 0.019898656845092774, 0.01988934326171875, 0.019885759353637695, 0.019841888427734374, 0.01987993621826172, 0.01994704055786133, 0.02016876792907715, 0.019800256729125977, 0.01972879981994629, 0.019797855377197266, 0.01976300811767578, 0.019911872863769532, 0.020279935836791992, 0.019860063552856445, 0.019754079818725585, 0.02025948715209961, 0.019875104904174806, 0.019987167358398436, 0.019871295928955077, 0.019932640075683593, 0.01981500816345215, 0.01999910354614258, 0.020165792465209963, 0.01991894340515137, 0.01986636734008789, 0.019916799545288084, 0.01989753532409668, 0.020126527786254882, 0.019961727142333983, 0.019949087142944334, 0.01979372787475586, 0.019820640563964844, 0.019744831085205077, 0.019679391860961914, 0.019701759338378907, 0.019668895721435545, 0.019730112075805665, 0.019673599243164062, 0.019655040740966797, 0.019920896530151368, 0.01981158447265625, 0.0198023681640625, 0.019742303848266602, 0.019753887176513673]",tokens/s,50.025761390347874,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1845.997568,2760.835072,0.0,2365.587456,2314.318336,s,1,8.89519921875,8.89519921875,0.0,8.89519921875,8.89519921875,8.89519921875,8.89519921875,[8.89519921875],,kWh,5.4660155666647367e-05,6.017403879746309e-06,1.6371401986001177e-05,7.704896153239485e-05,,MB,1897.926656,3092.185088,0.0,2682.257408,2607.60832,s,10,0.48081442642211913,0.04808144264221191,0.00017205538202914872,0.04805052757263184,0.048120225143432616,0.04835307178497314,0.048539349098205564,"[0.04858591842651367, 0.04797695922851562, 0.048004062652587894, 0.0480684814453125, 0.04806121444702149, 0.048005023956298826, 0.048048126220703126, 0.04805516815185547, 0.04805292892456055, 0.04795654296875]",tokens/s,5324.299478802475,kWh,1.4727333594430756e-06,1.624139553476128e-07,9.766018421407165e-07,2.611749156931405e-06,tokens/kWh,98018601.56462322,MB,1902.24384,3092.185088,0.0,2682.257408,2607.61088,s,10,13.363730224609373,1.3363730224609376,0.007735199172256561,1.3361744995117188,1.3469821655273437,1.3478011413574218,1.3484563220214845,"[1.3357327880859375, 1.3290960693359375, 1.3468001708984374, 1.336394287109375, 1.340294921875, 1.3486201171875, 1.323949951171875, 1.340486328125, 1.32640087890625, 1.3359547119140625]",tokens/s,47.142526032129254,kWh,3.889775208722348e-05,4.289722245383801e-06,1.924219211065894e-05,6.242966644326624e-05,tokens/kWh,1009135.6175553503,,s,630,13.36166525077821,0.021208992461552695,0.00041553953410139157,0.021115599632263185,0.021474857711791993,0.021689506912231444,0.02319046390533448,"[0.02129305648803711, 0.021180543899536133, 0.021196672439575195, 0.02103091239929199, 0.02104115104675293, 0.021098495483398438, 0.02106582450866699, 0.021067680358886717, 0.021098207473754883, 0.021122848510742188, 0.021251712799072266, 0.021353343963623046, 0.021423744201660155, 0.021078336715698243, 0.02110451126098633, 0.021038656234741212, 0.021176959991455076, 0.02127257537841797, 0.02119785690307617, 0.02130646324157715, 0.02111680030822754, 0.02102783966064453, 0.02122444725036621, 0.021110240936279296, 0.020978048324584962, 0.02158198356628418, 0.02154889678955078, 0.02133967971801758, 0.02140755271911621, 0.021250911712646484, 0.021047136306762696, 0.02124812889099121, 0.021020671844482423, 0.021082111358642578, 0.02101180839538574, 0.021189279556274414, 0.021307392120361326, 0.021180416107177736, 0.021315584182739256, 0.021372224807739256, 0.021383712768554688, 0.02140380859375, 0.021399423599243163, 0.02138140869140625, 0.02134000015258789, 0.02157129669189453, 0.021334239959716797, 0.021293119430541994, 0.021174272537231444, 0.021140607833862304, 0.021111135482788087, 0.02106617546081543, 0.02106787109375, 0.020996095657348633, 0.020959232330322267, 0.021084096908569334, 0.021122976303100584, 0.021334304809570312, 0.02127039909362793, 0.021155839920043946, 0.020972543716430665, 0.02106060791015625, 0.021151744842529296, 0.021368831634521485, 0.021350400924682617, 0.021755712509155273, 0.021196191787719726, 0.0211822395324707, 0.021084896087646486, 0.021018911361694335, 0.021040767669677735, 0.020989568710327148, 0.021039648056030272, 0.02111631965637207, 0.021034912109375, 0.021082239151000978, 0.021086944580078124, 0.02099001693725586, 0.021042335510253907, 0.021154144287109374, 0.021143455505371094, 0.021142112731933595, 0.021155839920043946, 0.02126848030090332, 0.021217279434204102, 0.021143104553222655, 0.021322175979614257, 0.02098329544067383, 0.021658111572265625, 0.021204832077026368, 0.02096143913269043, 0.021548671722412108, 0.020979616165161134, 0.0210416316986084, 0.020912128448486327, 0.020926464080810548, 0.020992000579833983, 0.021034015655517577, 0.02091209602355957, 0.020855615615844727, 0.020863168716430663, 0.020946943283081054, 0.020926496505737305, 0.02113942337036133, 0.021172096252441406, 0.021250175476074218, 0.02089574432373047, 0.020936704635620116, 0.02089369583129883, 0.02096892738342285, 0.021162015914916992, 0.020886016845703126, 0.021323776245117186, 0.021045248031616212, 0.02108006477355957, 0.02130339241027832, 0.020977088928222656, 0.0209781436920166, 0.02090188789367676, 0.02110652732849121, 0.020981376647949218, 0.02099830436706543, 0.021023103713989258, 0.02096678352355957, 0.021254783630371095, 0.02098099136352539, 0.02143846321105957, 0.021618688583374023, 0.02132508850097656, 0.02137980842590332, 0.021354496002197267, 0.021250207901000975, 0.021538656234741212, 0.021157888412475585, 0.02104115104675293, 0.02127462387084961, 0.021511199951171876, 0.021150976181030273, 0.021108064651489258, 0.021082496643066405, 0.02106547164916992, 0.021182207107543944, 0.021368640899658203, 0.02099065589904785, 0.020965152740478516, 0.021358816146850586, 0.02105529594421387, 0.020983999252319335, 0.0210831356048584, 0.024739904403686522, 0.0228787841796875, 0.021362432479858397, 0.021254783630371095, 0.02116534423828125, 0.02111756706237793, 0.02106172752380371, 0.021104032516479493, 0.021132991790771483, 0.021003168106079103, 0.021217279434204102, 0.021114751815795897, 0.02104275131225586, 0.021023296356201173, 0.021061632156372072, 0.021060800552368163, 0.020963167190551756, 0.021052543640136718, 0.02116796875, 0.02113849639892578, 0.021437376022338868, 0.021114879608154297, 0.02102796745300293, 0.02115827178955078, 0.021101055145263673, 0.02247475242614746, 0.021249055862426758, 0.021365440368652344, 0.02134668731689453, 0.0214136962890625, 0.021428319931030275, 0.02347996711730957, 0.02214512062072754, 0.021247327804565428, 0.021750688552856445, 0.02141107177734375, 0.02150003242492676, 0.021273216247558593, 0.021304512023925783, 0.021391775131225584, 0.02135878372192383, 0.02228873634338379, 0.021049087524414062, 0.020902143478393555, 0.021244960784912108, 0.021127519607543947, 0.021000640869140625, 0.02100022315979004, 0.021049503326416016, 0.021017919540405272, 0.021086912155151367, 0.02127257537841797, 0.02121548843383789, 0.021052255630493164, 0.021250112533569336, 0.021453664779663085, 0.021243904113769533, 0.020992191314697265, 0.021112640380859374, 0.02104729652404785, 0.020961503982543945, 0.020909759521484376, 0.02107811164855957, 0.021440511703491212, 0.0230231990814209, 0.021638816833496093, 0.021410560607910155, 0.021470783233642578, 0.02130169677734375, 0.02104319953918457, 0.02122537612915039, 0.02126652717590332, 0.02101043128967285, 0.021135360717773437, 0.0223372802734375, 0.02143984031677246, 0.02131648063659668, 0.021026687622070314, 0.02106153678894043, 0.02107744026184082, 0.021086847305297852, 0.020926752090454102, 0.02100419235229492, 0.02123776054382324, 0.021271680831909178, 0.021131999969482423, 0.021161727905273438, 0.02112348747253418, 0.02117580795288086, 0.02114918327331543, 0.02121939277648926, 0.021301631927490235, 0.02090451240539551, 0.020944896697998046, 0.021379072189331053, 0.021102399826049806, 0.021158079147338867, 0.02101980781555176, 0.02097990417480469, 0.02089846420288086, 0.02103209686279297, 0.021019487380981444, 0.02102681541442871, 0.02105276870727539, 0.021016544342041015, 0.020980384826660155, 0.02101865577697754, 0.020869056701660157, 0.020944576263427734, 0.021173856735229493, 0.021473440170288086, 0.021213855743408203, 0.021769599914550783, 0.02137264060974121, 0.021226303100585937, 0.02122140884399414, 0.021154848098754883, 0.021187360763549806, 0.02121299171447754, 0.021281152725219726, 0.02146441650390625, 0.02180073547363281, 0.02128783988952637, 0.021124256134033202, 0.02154787254333496, 0.024763551712036133, 0.021267295837402344, 0.021190784454345704, 0.021176191329956056, 0.021106687545776368, 0.021333343505859376, 0.021391103744506836, 0.021160863876342775, 0.02120694351196289, 0.02111087989807129, 0.020961280822753905, 0.02130240058898926, 0.021023616790771485, 0.021182464599609374, 0.021147647857666017, 0.021124191284179687, 0.021119840621948244, 0.021026687622070314, 0.02119603157043457, 0.02099500846862793, 0.02149580764770508, 0.02148761558532715, 0.021542207717895508, 0.02124777603149414, 0.021016576766967773, 0.021097375869750978, 0.0211125431060791, 0.02106172752380371, 0.02121308708190918, 0.021152000427246093, 0.021128351211547852, 0.02091916847229004, 0.021182464599609374, 0.021315584182739256, 0.021339359283447264, 0.021307552337646484, 0.021279359817504884, 0.02125823974609375, 0.02104729652404785, 0.021249664306640624, 0.021423871994018556, 0.022934303283691407, 0.021489376068115233, 0.021776575088500977, 0.021768287658691408, 0.021327072143554688, 0.02118079948425293, 0.021159456253051757, 0.021162879943847655, 0.021185855865478515, 0.02129596710205078, 0.021337600708007814, 0.021194175720214845, 0.021160863876342775, 0.02125823974609375, 0.021544960021972655, 0.02130534362792969, 0.02107187271118164, 0.02114112091064453, 0.021062015533447265, 0.021058624267578124, 0.021046207427978515, 0.02107529640197754, 0.02122819137573242, 0.021118976593017577, 0.020908031463623047, 0.02104297637939453, 0.021042783737182616, 0.020967039108276367, 0.02286899185180664, 0.021313535690307618, 0.021323104858398438, 0.022659744262695312, 0.021001855850219728, 0.021038623809814454, 0.021162847518920898, 0.021088031768798827, 0.02098745536804199, 0.02092233657836914, 0.021424831390380858, 0.0232587833404541, 0.021631328582763672, 0.021325824737548828, 0.021252031326293944, 0.02111267280578613, 0.021259872436523438, 0.021455488204956054, 0.02211123275756836, 0.021692703247070313, 0.021601247787475585, 0.02295577621459961, 0.021519584655761717, 0.02138806343078613, 0.02131350326538086, 0.021057184219360352, 0.021156448364257813, 0.021131040573120118, 0.02107596778869629, 0.021188575744628905, 0.021182016372680666, 0.021457376480102538, 0.021169855117797853, 0.022363487243652343, 0.02112406349182129, 0.021180416107177736, 0.021076192855834962, 0.021159168243408202, 0.021070016860961913, 0.021108896255493163, 0.020961471557617187, 0.021303295135498047, 0.02109619140625, 0.02101696014404297, 0.020950912475585937, 0.020920320510864256, 0.020918272018432618, 0.02097385597229004, 0.021022560119628907, 0.02090729522705078, 0.02100806427001953, 0.020973567962646485, 0.02086390495300293, 0.02086502456665039, 0.02084454345703125, 0.020941823959350587, 0.02101759910583496, 0.021275871276855467, 0.020972320556640625, 0.021050432205200195, 0.021074880599975587, 0.020961280822753905, 0.02128825569152832, 0.021190656661987304, 0.020992704391479492, 0.02087936019897461, 0.021492767333984374, 0.021211328506469725, 0.020982559204101563, 0.021014528274536134, 0.021002464294433594, 0.020977439880371093, 0.020907360076904295, 0.021039775848388672, 0.020987903594970703, 0.021130815505981445, 0.021070207595825195, 0.021030975341796876, 0.02103910446166992, 0.021087871551513673, 0.020859264373779298, 0.020942623138427735, 0.021002464294433594, 0.020921567916870117, 0.020768768310546876, 0.020846399307250976, 0.020997087478637697, 0.020989887237548827, 0.02089369583129883, 0.020887615203857422, 0.02103910446166992, 0.020931840896606446, 0.020869375228881836, 0.020886016845703126, 0.020953088760375976, 0.02106777572631836, 0.02101817512512207, 0.021039487838745118, 0.021174272537231444, 0.021139455795288087, 0.021503711700439455, 0.021831968307495116, 0.02105958366394043, 0.02127359962463379, 0.021148672103881837, 0.021161983489990235, 0.020993343353271486, 0.024013376235961913, 0.021709983825683593, 0.021321760177612305, 0.021033920288085938, 0.021014528274536134, 0.02130512046813965, 0.021569759368896484, 0.021450431823730468, 0.02114182472229004, 0.021538816452026367, 0.02103193664550781, 0.021072895050048827, 0.021013887405395507, 0.02100204849243164, 0.02097760009765625, 0.021101312637329103, 0.021108863830566406, 0.02168560028076172, 0.02128505516052246, 0.02124847984313965, 0.02133206367492676, 0.021178112030029297, 0.021126495361328126, 0.02102764892578125, 0.02102272033691406, 0.02122444725036621, 0.02117849540710449, 0.02115814399719238, 0.02109913635253906, 0.021237247467041014, 0.02123529624938965, 0.021201791763305663, 0.021133344650268556, 0.021398847579956054, 0.02114134407043457, 0.021250911712646484, 0.021362272262573243, 0.021268735885620116, 0.021187936782836914, 0.021420864105224608, 0.0210402889251709, 0.021137535095214845, 0.021106559753417967, 0.0209785270690918, 0.021284448623657228, 0.021492128372192384, 0.021958656311035156, 0.021465087890625, 0.02128633689880371, 0.02125404739379883, 0.021213600158691406, 0.02106598472595215, 0.02094697570800781, 0.020950016021728517, 0.021710847854614256, 0.021114879608154297, 0.02103059196472168, 0.020961759567260742, 0.020993408203125, 0.020799072265625, 0.020853504180908203, 0.02097983932495117, 0.02103628730773926, 0.02087603187561035, 0.020876863479614257, 0.02090438461303711, 0.020914176940917968, 0.02086502456665039, 0.02084454345703125, 0.020964927673339843, 0.020910112380981446, 0.02095555114746094, 0.020936704635620116, 0.020954368591308593, 0.020929279327392577, 0.020887199401855468, 0.020951391220092774, 0.02093609619140625, 0.020855327606201172, 0.020875551223754882, 0.02101219177246094, 0.021071935653686525, 0.02116636848449707, 0.02098873519897461, 0.021003168106079103, 0.02105958366394043, 0.020940351486206054, 0.020904064178466797, 0.021063999176025392, 0.02088960075378418, 0.020873472213745116, 0.020927648544311523, 0.02104140853881836, 0.02104560089111328, 0.021445920944213867, 0.020975391387939454, 0.021016672134399415, 0.02095734405517578, 0.023843584060668947, 0.021544223785400392, 0.02129337692260742, 0.021107040405273437, 0.021060800552368163, 0.020976448059082033, 0.020960575103759767, 0.021392063140869142, 0.02094607925415039, 0.020853376388549803, 0.020895967483520506, 0.021219327926635743, 0.02093062400817871, 0.021053375244140624, 0.020991167068481444, 0.021060415267944336, 0.020916223526000977, 0.020924543380737303, 0.020932479858398436, 0.021146303176879884, 0.021149728775024416, 0.021008384704589843, 0.021166080474853514, 0.020971616744995116, 0.021002143859863282, 0.02106777572631836, 0.021004287719726563, 0.021097503662109374, 0.021005504608154296, 0.021102304458618163, 0.021071935653686525, 0.02119875144958496, 0.021602399826049806, 0.02132352066040039, 0.02114995193481445, 0.021141664505004883, 0.02107129669189453, 0.021019039154052736, 0.021129215240478515, 0.021169696807861328, 0.02121161651611328, 0.021331552505493165, 0.021259744644165038, 0.02111199951171875, 0.021544704437255858, 0.02109235191345215, 0.021151744842529296, 0.02113155174255371, 0.02097737693786621, 0.021137184143066406, 0.021589567184448242, 0.021234336853027343, 0.021570688247680665, 0.020976512908935548, 0.021024063110351564, 0.02099884796142578, 0.022760671615600588, 0.02390838432312012, 0.021390079498291015, 0.021200895309448242, 0.02103091239929199, 0.021743616104125976, 0.021024768829345702, 0.020891679763793945, 0.020966432571411134, 0.021060543060302736, 0.02098899269104004, 0.02093769645690918, 0.020985952377319338, 0.021132192611694335, 0.02106857681274414, 0.021004480361938478, 0.02098092842102051, 0.02099078369140625, 0.02097267150878906, 0.020839296340942382, 0.021020671844482423, 0.021018848419189454, 0.02107779121398926, 0.021022111892700195, 0.021662303924560547, 0.021121023178100586]",tokens/s,47.14981165714417,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,810.528768,554.631168,0.0,159.383552,143.673856,s,1,7.36071435546875,7.36071435546875,0.0,7.36071435546875,7.36071435546875,7.36071435546875,7.36071435546875,[7.36071435546875],,kWh,1.0813214037504318e-05,1.185590018481142e-06,3.592502873996095e-06,1.5591306929981556e-05,,MB,1303.117824,609.15712,0.0,199.22944,186.684928,s,32,0.20167136001586916,0.006302230000495911,0.000168425978490321,0.006266864061355591,0.006366675138473511,0.006626598286628723,0.006932350602149964,"[0.006303711891174317, 0.006245567798614502, 0.006314752101898194, 0.006281055927276611, 0.006163584232330322, 0.006309823989868164, 0.006356768131256104, 0.006266240119934082, 0.006225791931152344, 0.006520671844482422, 0.0062631678581237795, 0.006187839984893799, 0.006303487777709961, 0.006266240119934082, 0.006181600093841552, 0.006254816055297851, 0.006316256046295166, 0.007011551856994629, 0.006272352218627929, 0.006756063938140869, 0.006186975955963135, 0.006275455951690673, 0.006190144062042237, 0.006277184009552002, 0.0062674880027771, 0.00620630407333374, 0.006271008014678955, 0.006223264217376709, 0.006367775917053222, 0.006219423770904541, 0.00614844799041748, 0.006236544132232666]",tokens/s,40620.54224930792,kWh,1.8653826384802866e-07,2.0571929875286933e-08,7.918076098593644e-08,2.8629095470925205e-07,tokens/kWh,894195208.7168993,MB,1316.548608,613.351424,0.0,203.423744,186.687488,s,32,10.012835144042967,0.31290109825134277,0.001958896783369078,0.31242938232421874,0.3154934631347656,0.31649754333496094,0.31895257751464845,"[0.3195748291015625, 0.3123941955566406, 0.31391668701171876, 0.3119555969238281, 0.31176416015625, 0.3124645690917969, 0.3128881225585938, 0.3126595153808594, 0.31342831420898437, 0.31559823608398435, 0.31210430908203124, 0.3114669494628906, 0.3137422180175781, 0.3132228088378906, 0.31349627685546877, 0.31101092529296875, 0.3145505065917969, 0.3116888427734375, 0.3113709716796875, 0.31756756591796875, 0.31312869262695314, 0.3112661437988281, 0.3110016784667969, 0.31158636474609375, 0.3106305847167969, 0.3118937072753906, 0.3117104187011719, 0.31084735107421874, 0.3113258972167969, 0.3141195983886719, 0.3128370361328125, 0.3156220703125]",tokens/s,201.3415751880623,kWh,8.93105547428338e-06,9.849508270135154e-07,3.2166215389534714e-06,1.3132627840250369e-05,tokens/kWh,4797212.01014396,,s,2016,9.99823603630067,0.0049594424783237405,0.00011499753868454484,0.004934175968170166,0.0050223200321197516,0.005089400053024292,0.00545977780818939,"[0.005140672206878662, 0.005021312236785889, 0.005017792224884033, 0.005008959770202636, 0.00498137617111206, 0.00510975980758667, 0.005124095916748047, 0.005224448204040527, 0.005203648090362549, 0.005284160137176514, 0.005240928173065185, 0.005596799850463867, 0.005198304176330566, 0.005713727951049805, 0.005277696132659912, 0.0051113600730896, 0.005108160018920898, 0.0050687680244445804, 0.0050826559066772465, 0.005220320224761963, 0.005112287998199463, 0.005017951965332031, 0.005125855922698975, 0.005063680171966553, 0.004981503963470459, 0.00505401611328125, 0.005067455768585205, 0.0050785279273986815, 0.004995584011077881, 0.0049909758567810054, 0.004932960033416748, 0.004951807975769043, 0.004944799900054931, 0.004920864105224609, 0.004951935768127441, 0.0050183038711547855, 0.0049558720588684085, 0.004910880088806152, 0.004959904193878174, 0.005035967826843262, 0.0049795198440551755, 0.004982495784759521, 0.005095583915710449, 0.005507232189178467, 0.0049749441146850586, 0.004953728199005127, 0.0050094079971313476, 0.00496454381942749, 0.0049723520278930665, 0.005053440093994141, 0.005011744022369384, 0.005027711868286133, 0.004985695838928223, 0.004980127811431884, 0.005007455825805664, 0.004971007823944092, 0.004956319808959961, 0.005011295795440674, 0.004978240013122559, 0.005058559894561767, 0.0049333758354187015, 0.004917695999145508, 0.004942240238189698, 0.004884479999542236, 0.0049879360198974605, 0.004936768054962158, 0.00494377613067627, 0.0049064640998840335, 0.004995872020721435, 0.00494975996017456, 0.004927487850189209, 0.004927487850189209, 0.004974592208862305, 0.004984831809997559, 0.004917407989501953, 0.004955999851226807, 0.00502569580078125, 0.00498803186416626, 0.005108191967010498, 0.004957024097442627, 0.004972415924072266, 0.004992800235748291, 0.0049500160217285155, 0.004933792114257813, 0.00493174409866333, 0.00517468786239624, 0.0049647679328918455, 0.004933504104614258, 0.004915200233459473, 0.005033120155334473, 0.004936543941497803, 0.00491315221786499, 0.004910528182983398, 0.004990592002868653, 0.004946879863739014, 0.004924575805664063, 0.004916063785552979, 0.004972479820251465, 0.004970111846923828, 0.004909408092498779, 0.004986048221588135, 0.005101471900939941, 0.004936704158782959, 0.004919519901275635, 0.004898591995239258, 0.004941823959350586, 0.004966400146484375, 0.004911327838897705, 0.004898335933685303, 0.004905055999755859, 0.005129727840423584, 0.004915872097015381, 0.004892096042633056, 0.004885056018829346, 0.00494979190826416, 0.004937119960784912, 0.004887680053710938, 0.004894303798675537, 0.0049890241622924806, 0.004951488018035888, 0.00487286376953125, 0.004898496150970459, 0.004896992206573486, 0.004978496074676514, 0.004929728031158447, 0.004919328212738037, 0.004826272010803222, 0.004882847785949707, 0.004951935768127441, 0.004966976165771485, 0.004937248229980469, 0.004923871994018555, 0.004931615829467773, 0.005019040107727051, 0.004958720207214356, 0.004925536155700684, 0.004916607856750488, 0.0050366721153259275, 0.00497376012802124, 0.004928319931030273, 0.0049235520362854, 0.005005023956298828, 0.004947743892669678, 0.005089759826660156, 0.0050198397636413575, 0.005158592224121094, 0.004959392070770264, 0.004915743827819824, 0.004919616222381592, 0.004988319873809814, 0.0049359679222106935, 0.004966879844665527, 0.004998688220977783, 0.004989247798919677, 0.00494979190826416, 0.004913375854492187, 0.004925439834594727, 0.005012767791748047, 0.004956831932067871, 0.004925504207611084, 0.004917535781860351, 0.004931104183197021, 0.004954304218292236, 0.004941343784332275, 0.00505244779586792, 0.005046720027923584, 0.004976064205169678, 0.005040703773498535, 0.005074944019317627, 0.005128064155578613, 0.005016736030578613, 0.0049714879989624025, 0.004989120006561279, 0.0050133118629455565, 0.004978432178497315, 0.005203936100006103, 0.004962592124938965, 0.0050709757804870605, 0.004982367992401123, 0.004919583797454834, 0.004923391819000244, 0.00499507188796997, 0.005074944019317627, 0.004941023826599121, 0.0049344320297241215, 0.004974592208862305, 0.004945343971252442, 0.004926015853881836, 0.004914976119995117, 0.004784383773803711, 0.004888319969177246, 0.00498089599609375, 0.004916927814483643, 0.004890783786773682, 0.004888576030731201, 0.0049006400108337405, 0.004962528228759765, 0.004902912139892578, 0.004898111820220947, 0.0048913278579711915, 0.004971903800964355, 0.00493395185470581, 0.004888895988464355, 0.004897791862487793, 0.004968800067901611, 0.004915647983551026, 0.005030111789703369, 0.0049192957878112795, 0.004999135971069336, 0.005074304103851318, 0.0049424958229064946, 0.004948224067687988, 0.00496614408493042, 0.004974592208862305, 0.004943871974945068, 0.0049269118309021, 0.004936255931854248, 0.0050152640342712405, 0.004945631980895996, 0.004926015853881836, 0.004929535865783692, 0.005045472145080566, 0.004966911792755127, 0.00493126392364502, 0.004933792114257813, 0.004971968173980713, 0.004939807891845703, 0.004943967819213867, 0.004926047801971436, 0.004999296188354492, 0.004952191829681396, 0.004931359767913818, 0.004951488018035888, 0.004997951984405518, 0.004953311920166016, 0.00495411205291748, 0.004926239967346192, 0.005006656169891357, 0.004956863880157471, 0.004921343803405762, 0.005025792121887207, 0.004904960155487061, 0.0049725441932678225, 0.00491315221786499, 0.004917247772216797, 0.004912320137023926, 0.004989759922027588, 0.004940991878509522, 0.004911231994628906, 0.004924320220947266, 0.00499894380569458, 0.004941823959350586, 0.004853055953979492, 0.004907264232635498, 0.00490070390701294, 0.004915808200836182, 0.004995168209075928, 0.004951871871948242, 0.004908383846282959, 0.004913919925689697, 0.004958208084106445, 0.0049090561866760255, 0.004929376125335693, 0.0049153599739074705, 0.00491315221786499, 0.004989952087402344, 0.004951039791107178, 0.004921343803405762, 0.004939199924468994, 0.004987743854522705, 0.004939487934112549, 0.004921343803405762, 0.0049210238456726075, 0.0049708161354064945, 0.004941311836242676, 0.004898560047149658, 0.004932415962219238, 0.0049376640319824215, 0.004955520153045654, 0.0049424638748168945, 0.004907360076904297, 0.004898752212524414, 0.004970208168029785, 0.005074912071228027, 0.004902944087982177, 0.004917247772216797, 0.0049879360198974605, 0.004957151889801025, 0.004929535865783692, 0.00491315221786499, 0.004974751949310303, 0.004928544044494629, 0.005005152225494385, 0.004913536071777344, 0.004954720020294189, 0.004941215991973877, 0.004946527957916259, 0.004902912139892578, 0.004898816108703613, 0.004978367805480957, 0.004931104183197021, 0.0049119038581848145, 0.0049023680686950686, 0.0049894719123840334, 0.004935679912567138, 0.004925407886505127, 0.004931615829467773, 0.005005536079406739, 0.005074719905853272, 0.005015552043914795, 0.004921535968780518, 0.004998176097869873, 0.004944255828857422, 0.004917407989501953, 0.004897280216217041, 0.004851712226867676, 0.004919551849365235, 0.005010848045349121, 0.004945600032806396, 0.0049097280502319336, 0.0049021439552307125, 0.004976607799530029, 0.004934783935546875, 0.004906655788421631, 0.004908383846282959, 0.004967328071594238, 0.0049600000381469726, 0.004903168201446533, 0.004921088218688965, 0.004898816108703613, 0.004984831809997559, 0.004941760063171386, 0.004912864208221435, 0.004908736228942871, 0.004991615772247314, 0.004947840213775635, 0.0049032001495361325, 0.0049023680686950686, 0.0049872961044311525, 0.004925439834594727, 0.004902912139892578, 0.004902976036071777, 0.004929599761962891, 0.004935647964477539, 0.0049622077941894535, 0.004904960155487061, 0.004902912139892578, 0.004952064037322998, 0.004908480167388916, 0.004892928123474121, 0.004902944087982177, 0.004961599826812744, 0.004930528163909912, 0.0049170880317687985, 0.0049025602340698245, 0.0049541440010070804, 0.004968544006347656, 0.004913119792938232, 0.004897183895111084, 0.004917247772216797, 0.004976640224456787, 0.0049192638397216795, 0.005021535873413086, 0.004917439937591553, 0.004976640224456787, 0.004929535865783692, 0.00491487979888916, 0.004968768119812012, 0.0053116798400878905, 0.004977471828460693, 0.005061952114105225, 0.004948671817779541, 0.005167103767395019, 0.005076288223266601, 0.004963007926940918, 0.0049575681686401364, 0.00514899206161499, 0.004997439861297607, 0.004909503936767578, 0.0049459199905395506, 0.0049090561866760255, 0.004963967800140381, 0.004938432216644287, 0.004910783767700195, 0.0049294400215148925, 0.004976736068725586, 0.0050440959930419926, 0.004923520088195801, 0.004907008171081543, 0.004951231956481934, 0.004968992233276367, 0.004923679828643799, 0.004918816089630127, 0.004905087947845459, 0.00501091194152832, 0.00504307222366333, 0.004947872161865235, 0.004923103809356689, 0.0050191679000854495, 0.004968768119812012, 0.004929215908050537, 0.004917727947235107, 0.005021215915679932, 0.004946688175201416, 0.004924960136413574, 0.004909952163696289, 0.004976319789886475, 0.004957312107086181, 0.005378015995025635, 0.00493455982208252, 0.005002816200256347, 0.004958816051483154, 0.004912992000579834, 0.004914976119995117, 0.004983007907867432, 0.005089280128479004, 0.004925439834594727, 0.004947968006134033, 0.004994944095611572, 0.004999135971069336, 0.004921696186065674, 0.0049227199554443355, 0.0049730238914489746, 0.00497599983215332, 0.0049056000709533695, 0.004904096126556397, 0.004889567852020263, 0.0049844799041748045, 0.0050845761299133305, 0.004889408111572266, 0.004913504123687744, 0.004961952209472657, 0.0049552001953125, 0.004926591873168945, 0.004994688034057618, 0.004989120006561279, 0.004966080188751221, 0.004919392108917236, 0.004902400016784668, 0.004969183921813965, 0.004935679912567138, 0.004882431983947754, 0.004911104202270508, 0.004890048027038575, 0.004887328147888183, 0.00495411205291748, 0.004896543979644776, 0.004890175819396972, 0.004903359889984131, 0.0049827837944030765, 0.004960256099700928, 0.004921343803405762, 0.00491264009475708, 0.004927999973297119, 0.005006432056427002, 0.004934336185455322, 0.004925343990325928, 0.00494214391708374, 0.004995296001434326, 0.0050001277923583986, 0.004904863834381104, 0.0049285759925842285, 0.004963712215423584, 0.004935423851013184, 0.004906847953796387, 0.0049222722053527835, 0.005035871982574463, 0.004974751949310303, 0.0049459199905395506, 0.0049502081871032715, 0.00498252820968628, 0.004949535846710205, 0.0049342079162597655, 0.004937376022338867, 0.004990496158599853, 0.0049613118171691895, 0.004962048053741455, 0.004935679912567138, 0.004923423767089844, 0.005021664142608642, 0.004941823959350586, 0.005175295829772949, 0.0049348797798156735, 0.005085504055023194, 0.004910880088806152, 0.0050183038711547855, 0.004976640224456787, 0.004999167919158935, 0.004933440208435059, 0.004933631896972656, 0.004928736209869384, 0.005006303787231445, 0.005115903854370117, 0.004928639888763428, 0.004961152076721192, 0.004999167919158935, 0.004968448162078858, 0.004929120063781738, 0.004929952144622803, 0.004993023872375488, 0.004947008132934571, 0.004916351795196533, 0.00490831995010376, 0.004977439880371094, 0.00488486385345459, 0.00496288013458252, 0.004927616119384765, 0.0049212160110473635, 0.004904960155487061, 0.004974143981933594, 0.004938240051269531, 0.0049006400108337405, 0.004909120082855225, 0.005009664058685303, 0.004935776233673096, 0.004910655975341797, 0.004909247875213623, 0.004906367778778076, 0.004972799777984619, 0.004949440002441406, 0.004916160106658935, 0.005070655822753906, 0.005066944122314453, 0.004943871974945068, 0.004925439834594727, 0.004929376125335693, 0.0050157117843627926, 0.0049500160217285155, 0.004941184043884278, 0.004917600154876709, 0.00504585599899292, 0.004953855991363525, 0.004918208122253418, 0.0050503678321838375, 0.005011231899261475, 0.00512175989151001, 0.004951583862304688, 0.004952479839324951, 0.005007936000823975, 0.0049618239402771, 0.004955935955047608, 0.005056672096252442, 0.005001503944396973, 0.004983168125152588, 0.0049558720588684085, 0.004929696083068848, 0.004994368076324463, 0.004965055942535401, 0.004923391819000244, 0.004925439834594727, 0.00504249620437622, 0.005052000045776367, 0.004917376041412354, 0.005030111789703369, 0.004973887920379639, 0.004962751865386963, 0.00491315221786499, 0.004927487850189209, 0.00499129581451416, 0.005121376037597656, 0.005063007831573487, 0.004966368198394775, 0.004960256099700928, 0.004940095901489258, 0.004929056167602539, 0.004953663825988769, 0.004919936180114746, 0.004837535858154297, 0.005166399955749512, 0.005185632228851318, 0.004962944030761719, 0.004972511768341064, 0.005197824001312256, 0.0049517440795898435, 0.004940095901489258, 0.005025951862335205, 0.005374144077301026, 0.004986144065856933, 0.005155136108398437, 0.005506175994873047, 0.005598144054412842, 0.004953375816345215, 0.005445824146270752, 0.0049352960586547855, 0.004920224189758301, 0.004968448162078858, 0.005054304122924805, 0.004935808181762695, 0.004911136150360107, 0.004914656162261963, 0.004999551773071289, 0.0049378881454467775, 0.0050462718009948735, 0.0049090561866760255, 0.004986688137054444, 0.004930047988891601, 0.004910816192626953, 0.004910399913787842, 0.004975264072418213, 0.004916800022125244, 0.0049996161460876464, 0.005217440128326416, 0.004985695838928223, 0.004925439834594727, 0.004909215927124023, 0.004906847953796387, 0.004976640224456787, 0.00496230411529541, 0.004941823959350586, 0.004915200233459473, 0.004966176033020019, 0.0049686717987060544, 0.0050421757698059086, 0.004941855907440186, 0.0049072961807250974, 0.005013408184051514, 0.004947743892669678, 0.004909023761749267, 0.004920896053314209, 0.004978752136230469, 0.004962719917297363, 0.0049090561866760255, 0.004900735855102539, 0.004991104125976562, 0.004924831867218017, 0.004978591918945312, 0.004899136066436768, 0.004968832015991211, 0.004937727928161621, 0.004931039810180664, 0.0048653120994567875, 0.005022496223449707, 0.004890143871307373, 0.004959936141967773, 0.004913887977600098, 0.004911104202270508, 0.004911104202270508, 0.0049725441932678225, 0.005000736236572266, 0.004926271915435791, 0.004904607772827149, 0.004933184146881104, 0.004931104183197021, 0.0049284157752990726, 0.004909023761749267, 0.004884064197540283, 0.004958655834197998, 0.004920479774475098, 0.004932447910308838, 0.004902912139892578, 0.004953440189361572, 0.005042431831359863, 0.004925856113433838, 0.0049621758460998535, 0.005000991821289063, 0.005000927925109863, 0.004928127765655517, 0.004937727928161621, 0.004986656188964844, 0.004978591918945312, 0.004933695793151855, 0.004900767803192139, 0.004911168098449707, 0.004986591815948486, 0.004927455902099609, 0.004942431926727295, 0.004925248146057129, 0.004997312068939209, 0.004957888126373291, 0.004935679912567138, 0.004933152198791504, 0.005063456058502197, 0.005000671863555908, 0.004907072067260742, 0.004891007900238037, 0.004984960079193115, 0.004939487934112549, 0.004910687923431397, 0.005075615882873535, 0.00497049617767334, 0.004955264091491699, 0.004903808116912842, 0.004880383968353271, 0.004888576030731201, 0.004976640224456787, 0.004904416084289551, 0.004928224086761475, 0.00490067195892334, 0.005152607917785644, 0.004933536052703857, 0.004901120185852051, 0.004918655872344971, 0.005017792224884033, 0.004832672119140625, 0.004946752071380615, 0.004916192054748535, 0.0049160318374633786, 0.004917247772216797, 0.0049725441932678225, 0.004931583881378174, 0.004918816089630127, 0.004915840148925781, 0.005025023937225342, 0.005007775783538818, 0.004924736022949219, 0.00489353609085083, 0.004974656105041504, 0.005006400108337403, 0.0049081602096557615, 0.004899680137634277, 0.004942431926727295, 0.004933919906616211, 0.004905087947845459, 0.004896607875823975, 0.004905055999755859, 0.004967584133148194, 0.00491974401473999, 0.004903264045715332, 0.004900479793548584, 0.004979104042053223, 0.004949215888977051, 0.00491542387008667, 0.004913504123687744, 0.00508134412765503, 0.004952064037322998, 0.004927487850189209, 0.005005311965942383, 0.004960256099700928, 0.004959455966949463, 0.004913951873779297, 0.0049164161682128905, 0.004938560009002686, 0.005009088039398193, 0.00492575979232788, 0.004939519882202149, 0.004912928104400634, 0.004972671985626221, 0.004933568000793457, 0.004897024154663086, 0.004911263942718506, 0.004945216178894043, 0.004928192138671875, 0.0049210238456726075, 0.004892223834991455, 0.004897535800933838, 0.0049909758567810054, 0.004937727928161621, 0.0049285759925842285, 0.004905920028686523, 0.0049827837944030765, 0.004923391819000244, 0.004908927917480469, 0.004898367881774902, 0.005033728122711182, 0.004947999954223633, 0.004894527912139892, 0.004827231884002686, 0.004894847869873047, 0.004892096042633056, 0.004960671901702881, 0.004922783851623535, 0.004914944171905518, 0.004908127784729004, 0.004954048156738281, 0.004942048072814942, 0.0049006080627441405, 0.004907008171081543, 0.004935488224029541, 0.004982016086578369, 0.004944831848144531, 0.004978687763214112, 0.004944096088409424, 0.004994592189788818, 0.0049136638641357425, 0.004929120063781738, 0.004923232078552246, 0.005078368186950684, 0.005070047855377197, 0.005023136138916016, 0.005102047920227051, 0.0051274561882019044, 0.005151391983032227, 0.005082111835479736, 0.005128928184509277, 0.005075327873229981, 0.00504531192779541, 0.005036831855773926, 0.005058464050292969, 0.004991072177886963, 0.004964352130889893, 0.00495411205291748, 0.005027103900909424, 0.004967040061950684, 0.004925536155700684, 0.00491315221786499, 0.005023744106292725, 0.004964416027069092, 0.00496940803527832, 0.0049316477775573735, 0.004944767951965332, 0.00494598388671875, 0.004967455863952637, 0.005005695819854736, 0.00524073600769043, 0.004950975894927979, 0.004904704093933105, 0.004911327838897705, 0.0049287681579589845, 0.004968992233276367, 0.0049231362342834475, 0.004903456211090088, 0.004939487934112549, 0.005029888153076172, 0.004932928085327149, 0.004918975830078125, 0.004908415794372558, 0.004960959911346436, 0.0049304962158203125, 0.004906144142150879, 0.004806911945343018, 0.004887839794158935, 0.004902656078338623, 0.00496943998336792, 0.004928736209869384, 0.0048997759819030765, 0.004906847953796387, 0.005009024143218994, 0.004940159797668457, 0.0049229440689086915, 0.005048031806945801, 0.005133376121520996, 0.005221888065338135, 0.005219615936279297, 0.005217152118682861, 0.005197247982025146, 0.0051205759048461915, 0.005043424129486084, 0.005030687808990479, 0.004972415924072266, 0.004927584171295166, 0.004987167835235596, 0.00498252820968628, 0.004974592208862305, 0.004923488140106201, 0.004908991813659668, 0.004935520172119141, 0.004969791889190674, 0.004905663967132569, 0.004921088218688965, 0.004890975952148438, 0.004962495803833008, 0.004923232078552246, 0.00490012788772583, 0.0048904638290405275, 0.005042848110198975, 0.004940000057220459, 0.004924799919128418, 0.004895359992980957, 0.004910272121429443, 0.004926271915435791, 0.004955967903137207, 0.004902239799499512, 0.004901535987854004, 0.004985023975372314, 0.00495411205291748, 0.004924960136413574, 0.004925920009613037, 0.005087232112884522, 0.004943679809570312, 0.0049276800155639644, 0.00491315221786499, 0.0049862079620361325, 0.004922016143798828, 0.004915200233459473, 0.004923456192016602, 0.004978240013122559, 0.004944255828857422, 0.004921343803405762, 0.004927487850189209, 0.004892096042633056, 0.004989823818206787, 0.0049202880859375, 0.004890848159790039, 0.004896895885467529, 0.004912543773651123, 0.004947328090667725, 0.0049344320297241215, 0.004889984130859375, 0.004879072189331055, 0.004900864124298096, 0.004975743770599365, 0.004915296077728271, 0.004877088069915772, 0.004889664173126221, 0.004992127895355225, 0.0049269118309021, 0.004934304237365723, 0.004921055793762207, 0.004968448162078858, 0.004997312068939209, 0.004929344177246094, 0.004907008171081543, 0.005462240219116211, 0.005614751815795898, 0.006055776119232178, 0.005397280216217041, 0.00496454381942749, 0.004939104080200195, 0.004997600078582764, 0.004936863899230957, 0.0049090561866760255, 0.004922207832336426, 0.004945504188537598, 0.004958752155303955, 0.004921120166778564, 0.0048919677734375, 0.004887328147888183, 0.004968448162078858, 0.005000671863555908, 0.0048932161331176754, 0.004882016181945801, 0.004941472053527832, 0.004913919925689697, 0.004895999908447266, 0.004877056121826172, 0.004914175987243652, 0.0049398717880249025, 0.004885536193847656, 0.004894527912139892, 0.004881631851196289, 0.004950655937194824, 0.004907392024993897, 0.004878176212310791, 0.004881984233856202, 0.0049647998809814456, 0.004911104202270508, 0.004909279823303222, 0.004888351917266846, 0.004896768093109131, 0.0050032639503479, 0.0049168958663940426, 0.004946271896362305, 0.004902463912963867, 0.0049853758811950685, 0.004935840129852295, 0.004874239921569825, 0.004917247772216797, 0.004902944087982177, 0.004892608165740967, 0.004976672172546387, 0.004905151844024658, 0.004893631935119629, 0.004963551998138428, 0.004996767997741699, 0.004941023826599121, 0.004917791843414307, 0.00491545581817627, 0.00497049617767334, 0.004925439834594727, 0.0049209918975830075, 0.004925695896148682, 0.004943967819213867, 0.00495411205291748, 0.004917247772216797, 0.004897024154663086, 0.004938784122467041, 0.0049731841087341305, 0.004943583965301514, 0.004929887771606445, 0.004886655807495117, 0.0050627517700195315, 0.004928959846496582, 0.004893152236938476, 0.004908959865570068, 0.004966400146484375, 0.004947135925292969, 0.004905792236328125, 0.004896224021911621, 0.0049129600524902345, 0.00497327995300293, 0.004920608043670655, 0.0048913278579711915, 0.004896800041198731, 0.004993311882019043, 0.0049286079406738285, 0.005003903865814209, 0.004895967960357666, 0.004973343849182129, 0.004930592060089111, 0.004881631851196289, 0.004919007778167724, 0.004923423767089844, 0.004924928188323975, 0.004892704010009766, 0.004876959800720215, 0.004873631954193115, 0.0050917439460754395, 0.0049909758567810054, 0.004898303985595703, 0.004936192035675049, 0.00498092794418335, 0.004923200130462647, 0.004889855861663819, 0.004868864059448242, 0.004904895782470703, 0.004919360160827637, 0.004933536052703857, 0.004888351917266846, 0.004906400203704834, 0.004890431880950927, 0.004958144187927246, 0.004911968231201172, 0.004875743865966797, 0.004886208057403564, 0.004895584106445312, 0.004961696147918701, 0.004958816051483154, 0.0048798398971557615, 0.004907872200012207, 0.004973824024200439, 0.004932127952575684, 0.004922560214996338, 0.004944608211517334, 0.00499507188796997, 0.004957312107086181, 0.004907167911529541, 0.004930560111999512, 0.004936768054962158, 0.004952544212341308, 0.005071040153503418, 0.004918528079986572, 0.004938144207000733, 0.0050059518814086915, 0.004946879863739014, 0.004903711795806885, 0.004908063888549805, 0.004983551979064941, 0.004945312023162842, 0.004909471988677978, 0.004933856010437012, 0.005012735843658447, 0.004910016059875488, 0.00493123197555542, 0.0048990721702575684, 0.0050711679458618165, 0.004966176033020019, 0.005049824237823486, 0.004936031818389893, 0.005001408100128174, 0.005069888114929199, 0.004913856029510498, 0.004943168163299561, 0.004979584217071533, 0.004954016208648682, 0.0049214081764221195, 0.00490831995010376, 0.00495084810256958, 0.005081088066101074, 0.00493945598602295, 0.0049155521392822265, 0.004921088218688965, 0.0049686717987060544, 0.004923391819000244, 0.004925439834594727, 0.004906432151794433, 0.004976704120635987, 0.005050655841827393, 0.005214272022247314, 0.00598031997680664, 0.005379871845245361, 0.0054271998405456545, 0.00486240005493164, 0.004984640121459961, 0.005197792053222656, 0.004910399913787842, 0.005126495838165283, 0.004956416130065918, 0.004916607856750488, 0.004905983924865722, 0.0049600000381469726, 0.004955615997314453, 0.004921472072601319, 0.004901567935943604, 0.004904575824737549, 0.004948480129241943, 0.004963744163513184, 0.004903359889984131, 0.0050670399665832516, 0.004990687847137451, 0.00493171215057373, 0.004893887996673584, 0.004917024135589599, 0.0049836158752441405, 0.004945727825164795, 0.0048867521286010745, 0.004877471923828125, 0.004957312107086181, 0.004962016105651856, 0.004925439834594727, 0.004931583881378174, 0.004911104202270508, 0.005005504131317139, 0.004943679809570312, 0.004941504001617431, 0.004919616222381592, 0.004993279933929443, 0.004988287925720215, 0.0049313921928405766, 0.004923967838287353, 0.0049799041748046875, 0.005020480155944824, 0.0049296321868896485, 0.004906976222991943, 0.004975872039794922, 0.004915967941284179, 0.004898975849151612, 0.004902688026428223, 0.004883488178253174, 0.004956768035888672, 0.00492902421951294, 0.004879039764404297, 0.0048807039260864256, 0.0049409279823303225, 0.004928063869476318, 0.004903103828430176, 0.004889920234680176, 0.004934336185455322, 0.00501145601272583, 0.004930880069732666, 0.00490937614440918, 0.004872576236724853, 0.004959968090057373, 0.0048925762176513675, 0.004874207973480225, 0.004814655780792237, 0.00488640022277832, 0.004907519817352295, 0.0049721598625183105, 0.0049276800155639644, 0.004904607772827149, 0.00491487979888916, 0.00497926378250122, 0.004941279888153076, 0.0049136319160461425, 0.004882048130035401, 0.00497654390335083, 0.005003615856170655, 0.00490115213394165, 0.004896768093109131, 0.004882431983947754, 0.0050299839973449705, 0.0049294400215148925, 0.004900352001190185, 0.004905471801757813, 0.00495849609375, 0.004919072151184082, 0.004881824016571045, 0.0048891201019287105, 0.004952064037322998, 0.004922560214996338, 0.004895552158355713, 0.004903935909271241, 0.004955135822296143, 0.005002528190612793, 0.00491158390045166, 0.004905216217041016, 0.004933919906616211, 0.004977952003479004, 0.0049320321083068846, 0.004898816108703613, 0.004892831802368164, 0.004970335960388183, 0.004939775943756103, 0.004907392024993897, 0.004901599884033203, 0.0049407038688659664, 0.004962016105651856, 0.0049169921875, 0.004931583881378174, 0.0048893442153930666, 0.004987711906433105, 0.004901855945587158, 0.004886271953582763, 0.004904352188110352, 0.005019584178924561, 0.004946080207824707, 0.0049237761497497555, 0.00492083215713501, 0.0049777917861938475, 0.004946815967559815, 0.00494271993637085, 0.004933023929595947, 0.004973152160644531, 0.0049903359413146975, 0.004931968212127685, 0.004941055774688721, 0.005190656185150146, 0.005370783805847168, 0.005498591899871826, 0.005083424091339111, 0.0050032639503479, 0.005531712055206299, 0.007346208095550537, 0.005023839950561523, 0.005019743919372558, 0.004979423999786377, 0.00510595178604126, 0.0049387521743774416, 0.004962016105651856, 0.005017600059509277, 0.004980480194091797, 0.004947679996490479, 0.0049507198333740236, 0.00500105619430542, 0.006049280166625977, 0.005109632015228271, 0.005034239768981934, 0.004962592124938965, 0.004960256099700928, 0.004912896156311035, 0.00495417594909668, 0.0049359679222106935, 0.0049213762283325194, 0.004894688129425049, 0.004888895988464355, 0.004947648048400879, 0.004910463809967041, 0.0048847999572753905, 0.0048807039260864256, 0.00499894380569458, 0.005361504077911377, 0.004901247978210449, 0.004900320053100586, 0.00495465612411499, 0.004941823959350586, 0.004888351917266846, 0.004899040222167968, 0.004959263801574707, 0.004926047801971436, 0.004929567813873291, 0.004911776065826416, 0.004881184101104736, 0.0049919037818908696, 0.004912896156311035, 0.004873983860015869, 0.004878911972045898, 0.004966335773468018, 0.004917247772216797, 0.004917247772216797, 0.004891808032989502, 0.004919871807098388, 0.005060895919799805, 0.004900288105010986, 0.004883008003234863, 0.004906816005706787, 0.0049823040962219236, 0.004917920112609863, 0.004888063907623291, 0.0048849921226501464, 0.004964352130889893, 0.004827328205108642, 0.004947775840759277, 0.0049192957878112795, 0.0049376640319824215, 0.004934783935546875, 0.005016831874847412, 0.0049714879989624025, 0.004960383892059326, 0.004952672004699707, 0.004994688034057618, 0.005103839874267578, 0.004943168163299561, 0.00492796802520752, 0.005042272090911865, 0.004956448078155517, 0.004939775943756103, 0.004912608146667481, 0.004963871955871582, 0.004944896221160889, 0.0049090561866760255, 0.004919360160827637, 0.004949952125549316, 0.004933631896972656, 0.004900191783905029, 0.004895391941070556, 0.004894720077514648, 0.0049437122344970705, 0.004933792114257813, 0.00493126392364502, 0.00493555212020874, 0.005330783843994141, 0.004946368217468262, 0.004925504207611084, 0.005036128044128418, 0.005056511878967285, 0.004952064037322998, 0.004937727928161621, 0.004927072048187256, 0.005015967845916748, 0.004974688053131103, 0.004929152011871338, 0.004934144020080566, 0.005146399974822998, 0.005107423782348633, 0.004954304218292236, 0.004916768074035644, 0.004983359813690185, 0.0049519681930541995, 0.004911200046539306, 0.004898816108703613, 0.005006879806518555, 0.004945727825164795, 0.004906752109527588, 0.0049222722053527835, 0.004996992111206054, 0.004970111846923828, 0.00497270393371582, 0.004949920177459717, 0.004972991943359375, 0.004978687763214112, 0.004941504001617431, 0.004939807891845703, 0.004943967819213867, 0.0048009281158447265, 0.004974815845489502, 0.004929279804229737, 0.004903264045715332, 0.004910496234893799, 0.004899199962615967, 0.004963520050048828, 0.004920000076293945, 0.004906847953796387, 0.004890912055969239, 0.004966271877288818, 0.004928639888763428, 0.004889408111572266, 0.004907072067260742, 0.005000192165374756, 0.004924416065216064, 0.004894720077514648, 0.004911104202270508, 0.004931583881378174, 0.004936863899230957, 0.004931968212127685, 0.004887135982513427, 0.004924960136413574, 0.004995488166809082, 0.004986720085144043, 0.004929855823516845, 0.004908927917480469, 0.005014976024627685, 0.004952544212341308, 0.004911295890808106, 0.004915232181549072, 0.004966176033020019, 0.00501145601272583, 0.00491545581817627, 0.004896512031555176, 0.004941472053527832, 0.004947487831115723, 0.004911839962005615, 0.004898623943328857, 0.0048887357711791994, 0.005002848148345947, 0.004914847850799561, 0.004925792217254639, 0.004913504123687744, 0.004980671882629395, 0.004948224067687988, 0.004925439834594727, 0.004936863899230957, 0.0049836478233337405, 0.004995007991790771, 0.004892735958099365, 0.004918784141540527, 0.0049333758354187015, 0.0049301118850708005, 0.004929887771606445, 0.004890687942504883, 0.004890399932861328, 0.005023744106292725, 0.00493779182434082, 0.004925216197967529, 0.0049378881454467775, 0.005000895977020263, 0.004963647842407226, 0.004881184101104736, 0.0049147200584411625, 0.004911359786987305, 0.004883008003234863, 0.005113664150238037, 0.004913087844848633, 0.004898240089416504, 0.004897632122039795, 0.004973440170288086, 0.004946368217468262, 0.00490339183807373, 0.0049147200584411625, 0.004962783813476562, 0.004969632148742676, 0.00492575979232788, 0.0049025602340698245, 0.004909952163696289, 0.0049725441932678225, 0.004911104202270508, 0.004894368171691895, 0.00494646406173706, 0.0049764480590820314, 0.004923711776733399, 0.004886208057403564, 0.004902847766876221, 0.00495849609375, 0.0049201598167419435, 0.004888576030731201, 0.00491206407546997, 0.004897024154663086, 0.004980480194091797, 0.004921311855316162, 0.004915008068084717, 0.0048949441909790035, 0.004957759857177734, 0.004911456108093262, 0.004925024032592773, 0.004893184185028076, 0.004964352130889893, 0.004918528079986572, 0.0048893442153930666, 0.004900544166564941, 0.004886847972869873, 0.004978687763214112, 0.004914783954620362, 0.004896448135375976, 0.004905824184417725, 0.004999040126800537, 0.004924863815307617, 0.004925280094146728, 0.0048891201019287105, 0.004961631774902344, 0.004948832035064697, 0.004908512115478516, 0.004958752155303955, 0.004976640224456787, 0.004952064037322998, 0.004933631896972656, 0.0048865280151367185, 0.004896768093109131, 0.005003168106079102, 0.004943967819213867, 0.004988639831542969, 0.00487772798538208, 0.00489740800857544, 0.005010816097259521, 0.004950655937194824, 0.004952064037322998, 0.004907008171081543, 0.00491542387008667, 0.004997983932495117, 0.00492844820022583, 0.004914400100708008, 0.004905695915222168, 0.0049889922142028805, 0.004915264129638672, 0.004949471950531006, 0.00492796802520752, 0.005021183967590332, 0.004966464042663574, 0.004927103996276856, 0.004935488224029541, 0.004982016086578369, 0.005150400161743164, 0.004918687820434571, 0.005040544033050537, 0.005003520011901855, 0.0049498558044433595, 0.004898975849151612, 0.00490009593963623, 0.004926239967346192, 0.004941760063171386, 0.004900896072387696, 0.004890624046325683, 0.004906623840332031, 0.0049565439224243165, 0.004913375854492187, 0.004875328063964844, 0.004895296096801758, 0.004957888126373291, 0.004929056167602539, 0.004901343822479248, 0.004897471904754639, 0.004885312080383301, 0.004950463771820068, 0.004911776065826416, 0.004936704158782959, 0.004889664173126221, 0.004953536033630371, 0.004991360187530517, 0.004935840129852295, 0.005012608051300048, 0.004979072093963623, 0.004921696186065674, 0.004892000198364258, 0.0048831038475036625, 0.004923391819000244, 0.004954271793365479, 0.004923232078552246, 0.004897984027862548, 0.004897600173950196, 0.004990848064422607, 0.004926688194274902, 0.004916128158569336, 0.0049292478561401365, 0.005003551959991455, 0.004872608184814453, 0.004930912017822265, 0.004925087928771972, 0.004891136169433594, 0.004880864143371582, 0.0049807682037353515, 0.004919007778167724, 0.004892928123474121, 0.004902912139892578, 0.005011104106903076, 0.0049153599739074705, 0.004899007797241211, 0.004888319969177246, 0.0049192638397216795, 0.004932191848754883, 0.0050028800964355465, 0.004948031902313232, 0.0048863358497619625, 0.004997312068939209, 0.004904511928558349, 0.0048800320625305175, 0.004915999889373779, 0.004984320163726807, 0.004929952144622803, 0.004892767906188965, 0.004908991813659668, 0.004933695793151855, 0.0049268159866333, 0.004911424160003662, 0.004882783889770508, 0.004910208225250244, 0.004977663993835449, 0.004933504104614258, 0.004888576030731201, 0.00493174409866333, 0.005061728000640869, 0.004958975791931153, 0.004904607772827149, 0.004907519817352295, 0.004954239845275879, 0.0049407358169555665, 0.004879136085510254, 0.0048925762176513675, 0.0049398717880249025, 0.004960256099700928, 0.004902976036071777, 0.004890048027038575, 0.004897280216217041, 0.004946239948272705, 0.004902592182159424, 0.004918655872344971, 0.004896639823913574, 0.004956319808959961, 0.0049030079841613766, 0.004881951808929443, 0.004899807929992676, 0.004917247772216797, 0.00497270393371582, 0.004908895969390869, 0.004901919841766357, 0.004888671875, 0.004963200092315673, 0.004937727928161621, 0.00486195182800293, 0.004899839878082276, 0.004894015789031983, 0.0048921918869018555, 0.004970304012298584, 0.004903264045715332, 0.0049582719802856445, 0.004905151844024658, 0.00500710391998291, 0.004958208084106445, 0.004898272037506104, 0.004886816024780274, 0.00493609619140625, 0.004929376125335693, 0.004943071842193603, 0.0049119038581848145, 0.004929535865783692, 0.00516096019744873, 0.004966176033020019, 0.004954432010650635, 0.004945151805877686, 0.00502236795425415, 0.004952064037322998, 0.004941472053527832, 0.005025311946868897, 0.004993855953216553, 0.004970848083496094, 0.004904704093933105, 0.004984000205993652, 0.0049663038253784176, 0.0049236159324646, 0.004973055839538575, 0.004911200046539306, 0.004952064037322998, 0.004929535865783692, 0.004892000198364258, 0.004908031940460205, 0.00492303991317749, 0.004964352130889893, 0.004924575805664063, 0.004926303863525391, 0.004918687820434571, 0.005077600002288818, 0.00496230411529541, 0.004937727928161621, 0.00491107177734375, 0.004945951938629151, 0.004904960155487061, 0.004909152030944824, 0.004894847869873047, 0.0049634242057800294, 0.004946623802185059, 0.0049311680793762205, 0.00494598388671875, 0.004962656021118164, 0.004990464210510254, 0.004944511890411377, 0.004986752033233643, 0.0049090561866760255, 0.004974431991577148, 0.004920959949493408, 0.004875103950500489, 0.004912735939025879, 0.004785632133483887, 0.00491100788116455, 0.004971424102783203, 0.004890624046325683, 0.004896768093109131, 0.004876543998718261, 0.004963391780853271, 0.004921792030334472, 0.004875936031341553, 0.004899424076080322, 0.004952095985412598, 0.004920864105224609, 0.004879968166351318, 0.004911200046539306, 0.00489731216430664, 0.004950399875640869, 0.004910528182983398, 0.004877952098846436, 0.004881184101104736, 0.004966815948486328, 0.004919072151184082, 0.004879968166351318, 0.004870368003845215, 0.004931136131286621, 0.0049480957984924315, 0.0049073281288146975, 0.004904287815093994, 0.004897439956665039, 0.0049779839515686035, 0.00493228816986084, 0.005016895771026611, 0.004901567935943604, 0.005364992141723633, 0.005319424152374267, 0.004945824146270752, 0.005002943992614746, 0.0049647679328918455, 0.00491315221786499, 0.004917215824127197, 0.004924992084503174, 0.004950496196746826, 0.004903071880340576, 0.004902592182159424, 0.004907167911529541, 0.004980127811431884, 0.004927743911743164, 0.004904511928558349, 0.004948256015777588, 0.00497001600265503, 0.005057024002075195, 0.004931424140930176, 0.00489734411239624, 0.004976319789886475, 0.004924895763397216, 0.004903840065002441, 0.004894720077514648, 0.0049060797691345215, 0.0049705920219421384, 0.00499180793762207, 0.00495411205291748, 0.004918879985809326, 0.005007167816162109, 0.004960864067077637, 0.004906559944152832, 0.004958015918731689, 0.004916927814483643, 0.005090271949768067, 0.004929535865783692, 0.004924928188323975, 0.004905471801757813, 0.0049090561866760255, 0.004968448162078858, 0.004923391819000244, 0.004898816108703613, 0.004900352001190185, 0.004985119819641113, 0.004928895950317383, 0.004903647899627685, 0.004904255867004394, 0.004978911876678467, 0.004913887977600098, 0.004889472007751465, 0.0049342079162597655, 0.004864799976348877, 0.004964000225067139, 0.0048990721702575684, 0.004893695831298828, 0.004864863872528076, 0.004957183837890625, 0.004921599864959717, 0.004907680034637451, 0.004905983924865722, 0.004918272018432617, 0.004929696083068848, 0.004896607875823975, 0.004880383968353271, 0.0048724479675292965, 0.004943615913391113, 0.0049060478210449215, 0.004861983776092529, 0.004889503955841064, 0.004943295955657959, 0.005022272109985352, 0.00490831995010376, 0.00490399980545044, 0.004947616100311279, 0.005014592170715332, 0.004883296012878418, 0.004898496150970459, 0.004878528118133545, 0.004942048072814942, 0.004902912139892578, 0.0049090561866760255, 0.004876287937164306, 0.00495849609375, 0.004915264129638672, 0.004885983943939209, 0.004931776046752929, 0.004917024135589599, 0.005044447898864746, 0.004982207775115967, 0.0049567360877990725, 0.0049593281745910645, 0.004987808227539062, 0.004933631896972656, 0.004955840110778808, 0.004860288143157959, 0.00493833589553833, 0.00499507188796997, 0.004943871974945068, 0.004907008171081543, 0.0049192957878112795, 0.004923232078552246, 0.004946080207824707, 0.004911104202270508, 0.004933472156524658, 0.004894879817962647, 0.004970272064208984, 0.004912384033203125, 0.004911776065826416, 0.004913472175598145, 0.004998816013336182, 0.004940127849578858, 0.005001215934753418, 0.004908256053924561, 0.0049684162139892575, 0.004973120212554931, 0.004880640029907226, 0.004896768093109131, 0.005087232112884522, 0.004974592208862305, 0.004961535930633545, 0.00491590404510498, 0.004947999954223633, 0.0051528000831604, 0.004965504169464111, 0.0049509119987487795, 0.004904607772827149, 0.00498908805847168, 0.004929376125335693, 0.004915328025817871, 0.004908512115478516, 0.004956639766693115, 0.004947487831115723, 0.004883200168609619, 0.004894720077514648, 0.004964352130889893, 0.004933631896972656, 0.004902272224426269, 0.004909696102142334, 0.004884223937988281, 0.004958240032196045, 0.004904416084289551, 0.004882559776306152, 0.004877984046936035, 0.004946208000183106, 0.004973120212554931, 0.004905087947845459, 0.0048925762176513675, 0.004968607902526855, 0.004937376022338867, 0.004880671977996826, 0.004874239921569825, 0.004900864124298096, 0.004972095966339111, 0.004904672145843506, 0.004893407821655273, 0.004902912139892578, 0.004976640224456787, 0.005228544235229492, 0.00609062385559082, 0.00565670394897461, 0.005205152034759522, 0.004921440124511719, 0.004914944171905518, 0.004919424057006836, 0.004959104061126709, 0.004925439834594727, 0.004906847953796387, 0.004931104183197021, 0.005060575962066651, 0.004936351776123047, 0.004929183959960937, 0.005044576168060303, 0.004999135971069336, 0.004964384078979492, 0.004935679912567138, 0.005027679920196533, 0.004984992027282715, 0.0049398717880249025, 0.004918687820434571, 0.004907519817352295, 0.004997119903564453, 0.004925439834594727, 0.004911104202270508, 0.004892672061920166, 0.004933119773864746, 0.004958655834197998, 0.004925504207611084, 0.004904960155487061, 0.004896063804626465, 0.0049600000381469726, 0.00490502405166626, 0.0048951997756958, 0.004909471988677978, 0.004986048221588135, 0.004993855953216553, 0.00494704008102417, 0.005143199920654297, 0.005004576206207275, 0.004935776233673096, 0.004920191764831543, 0.00491315221786499, 0.004968448162078858, 0.004929344177246094, 0.004894752025604248, 0.004894176006317139, 0.004902847766876221, 0.0049630718231201175, 0.004894720077514648, 0.0048837437629699705, 0.00488428783416748, 0.004967328071594238, 0.004925439834594727, 0.004894720077514648, 0.004902912139892578, 0.004959807872772217, 0.004917407989501953, 0.00490115213394165, 0.004873600006103516, 0.004906879901885986, 0.004997568130493164, 0.0048558077812194825, 0.0049192957878112795, 0.0049090561866760255, 0.00488592004776001, 0.00489737606048584, 0.004956160068511963, 0.00496566390991211, 0.005740992069244385, 0.0050005121231079105, 0.004942240238189698, 0.004901440143585205, 0.004910719871520996, 0.0049565439224243165, 0.004910848140716553, 0.004897024154663086, 0.004902912139892578, 0.004923391819000244, 0.004956160068511963, 0.004916287899017334, 0.004897696018218994, 0.004888607978820801, 0.0049576001167297365, 0.004932191848754883, 0.004910560131072998, 0.004913695812225342, 0.00496230411529541, 0.004943871974945068, 0.004898816108703613, 0.004933599948883057, 0.004974624156951904, 0.004978367805480957, 0.004947648048400879, 0.004909599781036377, 0.004900959968566895, 0.0049909758567810054, 0.00495411205291748, 0.0049192957878112795, 0.004904960155487061, 0.00497049617767334, 0.004920959949493408, 0.004894432067871094, 0.004902719974517822, 0.00494268798828125, 0.004907008171081543, 0.005056511878967285, 0.004921343803405762, 0.004894432067871094, 0.004963808059692383, 0.004958687782287597, 0.0049088640213012695, 0.004897056102752685, 0.004962560176849365, 0.004998303890228271, 0.005817183971405029, 0.005048319816589355, 0.004933279991149902, 0.004890975952148438, 0.004914463996887207, 0.004958943843841553, 0.00491315221786499, 0.004898272037506104, 0.004891168117523193, 0.004902016162872314, 0.004799071788787842, 0.005559391975402832, 0.004971424102783203, 0.005586944103240967, 0.005717984199523926, 0.00562179183959961, 0.004939775943756103, 0.005017055988311768, 0.004950560092926025, 0.0049192957878112795, 0.004912896156311035, 0.004995039939880371, 0.004945888042449951, 0.0049073281288146975, 0.0049231362342834475, 0.004993279933929443, 0.004964352130889893, 0.004918943881988525, 0.004904863834381104, 0.0049585280418396, 0.0049706239700317385, 0.004930848121643066, 0.004903103828430176, 0.0050078401565551755, 0.004986944198608398, 0.004925439834594727, 0.004913407802581787, 0.004900735855102539, 0.004986752033233643, 0.004960256099700928, 0.005027040004730225, 0.00521830415725708, 0.005036255836486816, 0.00516870403289795, 0.004904287815093994, 0.005216991901397705, 0.004996032238006592, 0.005007359981536865, 0.004926623821258545, 0.004942207813262939, 0.004975071907043457, 0.004925439834594727, 0.0048865280151367185, 0.004933631896972656, 0.0049909758567810054, 0.004923391819000244, 0.004960256099700928, 0.0049862079620361325, 0.005001791954040527, 0.005130112171173096, 0.004925663948059082, 0.004921343803405762, 0.00499507188796997, 0.004918975830078125, 0.004922688007354736, 0.0048846077919006345, 0.004946720123291015, 0.004902624130249024, 0.004964735984802246, 0.004883584022521973, 0.004884384155273438, 0.004981279850006104, 0.0049054079055786135]",tokens/s,201.63556778220635,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1849.319424,2552.102912,0.0,2149.5808,2147.522048,s,1,8.9376376953125,8.9376376953125,0.0,8.9376376953125,8.9376376953125,8.9376376953125,8.9376376953125,[8.9376376953125],,kWh,4.5413617404161264e-05,5.002089600946453e-06,1.485751188600587e-05,6.527321889111359e-05,,MB,1958.514688,2621.308928,0.0,2204.106752,2190.132736,s,10,0.6495389785766602,0.064953897857666,0.00041509130689101745,0.06485004806518554,0.06547903366088868,0.06555778846740723,0.06562079231262206,"[0.06563654327392578, 0.06476588439941407, 0.06546153259277344, 0.06542412567138672, 0.0646209945678711, 0.06459053039550781, 0.06511309051513672, 0.06493421173095704, 0.06466182708740234, 0.06433023834228516]",tokens/s,3941.2569290448873,kWh,1.953870639777708e-06,2.1537560801401674e-07,1.3012825225066268e-06,3.4705287702983517e-06,tokens/kWh,73763975.73502679,MB,1962.668032,2621.308928,0.0,2204.106752,2190.135296,s,10,14.828645629882812,1.4828645629882813,0.019202764508675304,1.4756652221679687,1.5087432373046874,1.510645849609375,1.512167939453125,"[1.5125484619140626, 1.5083204345703125, 1.5064305419921875, 1.4908870849609375, 1.4831348876953125, 1.466040771484375, 1.4614727783203125, 1.468195556640625, 1.4664501953125, 1.4651649169921874]",tokens/s,42.48533653878805,kWh,4.313238608272145e-05,4.7572657995504e-06,2.4416488051693756e-05,7.23061399339656e-05,tokens/kWh,871295.3015820712,,s,630,14.826518163681026,0.023534155815366713,0.00045487003014112366,0.023433664321899417,0.024002842140197756,0.02421023416519165,0.025238732814788823,"[0.026103584289550782, 0.025550464630126953, 0.024367712020874024, 0.02411724853515625, 0.02426470375061035, 0.024006656646728516, 0.024024223327636717, 0.024015008926391603, 0.02400480079650879, 0.02380793571472168, 0.023937599182128906, 0.023791616439819335, 0.02402681541442871, 0.023599519729614257, 0.023406496047973634, 0.023564287185668945, 0.023747776031494142, 0.023661376953125, 0.023602752685546874, 0.023560640335083007, 0.023633920669555664, 0.023666080474853517, 0.023616096496582032, 0.023718175888061525, 0.02385468864440918, 0.023768800735473633, 0.023892383575439453, 0.023740415573120118, 0.02406345558166504, 0.02390412712097168, 0.024033472061157225, 0.024005088806152344, 0.025151487350463866, 0.024102912902832032, 0.024221696853637696, 0.024526144027709963, 0.0240435848236084, 0.023925376892089845, 0.02397916793823242, 0.02398089599609375, 0.024579872131347658, 0.0241297607421875, 0.023783424377441405, 0.023900096893310546, 0.02396985626220703, 0.02376483154296875, 0.023957664489746094, 0.023844480514526367, 0.023888160705566406, 0.024029279708862306, 0.02390998458862305, 0.024017120361328127, 0.02423356819152832, 0.023990848541259765, 0.02384899139404297, 0.0238920955657959, 0.02400787162780762, 0.023958208084106446, 0.024055360794067383, 0.023969440460205077, 0.023935487747192383, 0.02376905632019043, 0.023877952575683595, 0.024723455429077147, 0.02432614326477051, 0.02422985649108887, 0.023969823837280274, 0.023828479766845705, 0.023826431274414063, 0.023907424926757813, 0.023800735473632813, 0.02411859130859375, 0.02387014389038086, 0.023828447341918944, 0.024157215118408203, 0.023938047409057618, 0.023885759353637695, 0.02400262451171875, 0.02391587257385254, 0.024154783248901367, 0.02379961585998535, 0.023947168350219726, 0.023884063720703126, 0.02371788787841797, 0.02370256042480469, 0.023827423095703126, 0.023834623336791993, 0.023867071151733397, 0.023783744812011717, 0.02375833511352539, 0.02368275260925293, 0.024260992050170897, 0.023832191467285158, 0.023720767974853514, 0.02371174430847168, 0.023760896682739258, 0.024023040771484375, 0.02378313636779785, 0.02377884864807129, 0.023807903289794922, 0.02381020736694336, 0.02384761619567871, 0.023866592407226564, 0.023728927612304686, 0.023781375885009767, 0.02389187240600586, 0.023784704208374023, 0.0241910400390625, 0.023771936416625977, 0.02380201530456543, 0.02391606330871582, 0.023889408111572266, 0.02372844886779785, 0.023851104736328125, 0.023771551132202147, 0.023801855087280274, 0.023992319107055664, 0.023875583648681642, 0.023795520782470703, 0.023895263671875, 0.0239400634765625, 0.02446233558654785, 0.024411136627197266, 0.023924543380737306, 0.02502003288269043, 0.0243507194519043, 0.024615455627441406, 0.02406012725830078, 0.023983264923095705, 0.02396451187133789, 0.02370159912109375, 0.023785375595092775, 0.023875072479248048, 0.024043392181396485, 0.024005216598510744, 0.023789535522460936, 0.02382758331298828, 0.023863264083862305, 0.02394620704650879, 0.023826431274414063, 0.02391606330871582, 0.024253087997436522, 0.02394246482849121, 0.023855615615844726, 0.023937023162841797, 0.023862688064575196, 0.024087135314941405, 0.027627519607543945, 0.024300928115844726, 0.024021280288696288, 0.023834976196289062, 0.023810047149658203, 0.023836671829223634, 0.02391593551635742, 0.023928640365600586, 0.023982784271240235, 0.023791711807250978, 0.023766464233398437, 0.023807775497436522, 0.0239071044921875, 0.02381532859802246, 0.023863807678222656, 0.024043167114257812, 0.023986591339111327, 0.023764768600463868, 0.023773120880126952, 0.023829055786132813, 0.023859199523925782, 0.023910400390625, 0.023847967147827148, 0.02373731231689453, 0.023760671615600585, 0.023791839599609375, 0.02371552085876465, 0.02370560073852539, 0.02374073600769043, 0.02375017547607422, 0.023767520904541015, 0.023738367080688477, 0.02369536018371582, 0.02353708839416504, 0.02361555290222168, 0.02367145538330078, 0.023631423950195313, 0.023628063201904297, 0.023582592010498046, 0.023601280212402344, 0.023490335464477537, 0.023683040618896485, 0.02545552062988281, 0.02393289566040039, 0.02382441520690918, 0.023812095642089845, 0.023773183822631837, 0.023727264404296875, 0.02371459197998047, 0.023707616806030275, 0.02388796806335449, 0.023713151931762697, 0.02427903938293457, 0.02372368049621582, 0.02374345588684082, 0.023799808502197265, 0.02355200004577637, 0.024152063369750978, 0.023664384841918944, 0.023599359512329103, 0.023678335189819336, 0.023544095993041993, 0.023435615539550782, 0.02364563179016113, 0.025912960052490233, 0.02367584037780762, 0.023521280288696288, 0.02344905662536621, 0.023412832260131834, 0.02326291275024414, 0.0233002872467041, 0.023573055267333984, 0.02331443214416504, 0.023242591857910156, 0.023314592361450195, 0.02346953582763672, 0.023871776580810546, 0.023451904296875, 0.023397567749023438, 0.023368511199951172, 0.02344473648071289, 0.023454463958740235, 0.023347200393676756, 0.02349056053161621, 0.02330624008178711, 0.023340768814086914, 0.024570144653320313, 0.023355392456054686, 0.023382015228271484, 0.023202848434448243, 0.02316592025756836, 0.02315673637390137, 0.023313888549804686, 0.023204383850097657, 0.0260599365234375, 0.0235631046295166, 0.023432479858398438, 0.023537824630737305, 0.023508928298950196, 0.023648160934448242, 0.023439199447631835, 0.023434112548828125, 0.02352332878112793, 0.023375871658325196, 0.02351513671875, 0.024302751541137695, 0.02365715217590332, 0.023560352325439453, 0.023586816787719726, 0.023472127914428712, 0.023558143615722657, 0.023617088317871095, 0.02353923225402832, 0.023393184661865234, 0.023416831970214845, 0.023442720413208006, 0.02344940757751465, 0.023462656021118165, 0.023504735946655274, 0.023521600723266603, 0.02375823974609375, 0.023530080795288087, 0.023433216094970705, 0.023557472229003906, 0.023667360305786134, 0.023556095123291015, 0.02385513687133789, 0.023727296829223633, 0.02371798324584961, 0.023625856399536134, 0.02373484802246094, 0.02350694465637207, 0.023407936096191406, 0.023462112426757813, 0.02394745635986328, 0.023795936584472658, 0.023640127182006837, 0.023572416305541993, 0.02348409652709961, 0.023647615432739258, 0.023833152770996093, 0.02376156806945801, 0.023397695541381835, 0.023345632553100584, 0.023404224395751953, 0.023548223495483397, 0.023655744552612306, 0.02375734329223633, 0.02360745620727539, 0.023454912185668947, 0.023515520095825197, 0.02347257614135742, 0.02331180763244629, 0.023247360229492187, 0.023539264678955077, 0.02327350425720215, 0.02328828811645508, 0.023310335159301757, 0.023812095642089845, 0.023430816650390623, 0.02347792053222656, 0.023368352890014647, 0.023334623336791992, 0.023427007675170898, 0.023322399139404298, 0.023216224670410155, 0.02334771156311035, 0.023347200393676756, 0.024196224212646486, 0.02353856086730957, 0.023418399810791017, 0.02345212745666504, 0.02338934326171875, 0.02348640060424805, 0.023431936264038087, 0.023537824630737305, 0.02333286476135254, 0.023473472595214845, 0.023425472259521483, 0.023490240097045898, 0.02333139228820801, 0.02344960021972656, 0.02327356719970703, 0.02326665687561035, 0.02317158317565918, 0.023275583267211915, 0.023195583343505858, 0.02320185661315918, 0.02305971145629883, 0.023794624328613283, 0.023177024841308593, 0.023201791763305665, 0.023236608505249022, 0.02323683166503906, 0.023204736709594727, 0.02318943977355957, 0.023206783294677735, 0.02308515167236328, 0.023086816787719726, 0.02312835121154785, 0.023093248367309572, 0.023240703582763672, 0.023097631454467773, 0.023244512557983397, 0.02325503921508789, 0.023129983901977538, 0.023145599365234373, 0.02308531188964844, 0.023167743682861328, 0.023078975677490236, 0.023117759704589843, 0.023242752075195314, 0.02332057571411133, 0.023187456130981447, 0.023222272872924804, 0.023289119720458985, 0.023212799072265623, 0.023078399658203123, 0.023271167755126953, 0.023266016006469728, 0.02349260711669922, 0.023232511520385742, 0.02326118469238281, 0.02305638313293457, 0.02329804801940918, 0.02309119987487793, 0.023003135681152344, 0.023166751861572264, 0.023199968338012696, 0.02312995147705078, 0.02322243118286133, 0.023480735778808593, 0.023198047637939454, 0.023282911300659178, 0.023075616836547852, 0.02315088081359863, 0.02307632064819336, 0.02308531188964844, 0.023218143463134767, 0.02319158363342285, 0.023144447326660156, 0.023116832733154298, 0.023067136764526368, 0.023175167083740233, 0.023095775604248046, 0.023119871139526366, 0.023117824554443358, 0.023262432098388672, 0.02312272071838379, 0.02305574417114258, 0.023097919464111327, 0.023042112350463866, 0.023170463562011717, 0.02321673583984375, 0.023216127395629883, 0.023216127395629883, 0.02317033576965332, 0.023091936111450197, 0.02348441505432129, 0.023177215576171875, 0.023191423416137696, 0.02313033676147461, 0.023113439559936524, 0.02312335968017578, 0.023149343490600587, 0.023248895645141602, 0.023185407638549805, 0.02304204750061035, 0.02308243179321289, 0.023112480163574218, 0.02324019241333008, 0.023335391998291016, 0.02318729591369629, 0.023144704818725586, 0.0231441593170166, 0.023084640502929688, 0.023171487808227538, 0.023205888748168944, 0.02309734344482422, 0.02315657615661621, 0.0231889591217041, 0.023121856689453126, 0.023091039657592773, 0.023145055770874022, 0.023161151885986327, 0.023149824142456053, 0.023142656326293944, 0.02330201530456543, 0.02314854431152344, 0.023287839889526367, 0.023159391403198244, 0.02371379280090332, 0.024290496826171876, 0.02330646324157715, 0.02366054344177246, 0.023535295486450194, 0.023462303161621095, 0.02363337516784668, 0.023350271224975586, 0.023267328262329103, 0.02324198341369629, 0.023326623916625978, 0.023115711212158205, 0.023323135375976564, 0.023098976135253906, 0.023378751754760743, 0.02308710479736328, 0.023143936157226562, 0.02300160026550293, 0.0232857608795166, 0.02326118469238281, 0.02315673637390137, 0.02312348747253418, 0.023082815170288085, 0.02312259292602539, 0.02323184013366699, 0.023146400451660155, 0.023118431091308594, 0.023117984771728516, 0.023226367950439454, 0.02313216018676758, 0.023590816497802734, 0.023236703872680665, 0.023207935333251953, 0.023131263732910155, 0.02325734329223633, 0.02316556739807129, 0.023369728088378908, 0.023072063446044924, 0.023126720428466797, 0.02316223907470703, 0.023237247467041016, 0.023402271270751954, 0.023297632217407226, 0.02330486488342285, 0.02332988739013672, 0.02332147216796875, 0.02323404884338379, 0.02335590362548828, 0.023365535736083985, 0.023408031463623045, 0.023423679351806642, 0.023322463989257813, 0.023381664276123048, 0.023325183868408202, 0.02347007942199707, 0.02344960021972656, 0.023377920150756838, 0.023729663848876953, 0.0235545597076416, 0.023377920150756838, 0.02347007942199707, 0.023371776580810546, 0.023416383743286133, 0.023384511947631834, 0.023390207290649414, 0.023321983337402343, 0.024062143325805665, 0.023519968032836912, 0.0234736328125, 0.023394784927368163, 0.023321983337402343, 0.023334943771362304, 0.023324575424194336, 0.023128671646118162, 0.023179359436035156, 0.023170047760009766, 0.02375359916687012, 0.023066112518310547, 0.023014015197753906, 0.02308095932006836, 0.023062143325805664, 0.023126399993896485, 0.02311155128479004, 0.023132287979125976, 0.023355104446411132, 0.023228384017944335, 0.023085376739501954, 0.023312095642089845, 0.023195583343505858, 0.02325948715209961, 0.023334911346435547, 0.0232061767578125, 0.023611103057861328, 0.024266752243041992, 0.0244715518951416, 0.023305280685424805, 0.023323583602905273, 0.023572032928466796, 0.023235008239746092, 0.02326323127746582, 0.023177215576171875, 0.023181184768676758, 0.023144575119018556, 0.023179264068603517, 0.0232359676361084, 0.02317359924316406, 0.023234720230102538, 0.02303968048095703, 0.023090591430664064, 0.023147424697875976, 0.023058176040649414, 0.02309503936767578, 0.023093759536743166, 0.02372403144836426, 0.02307632064819336, 0.02307516860961914, 0.02311174392700195, 0.023138431549072264, 0.023116031646728517, 0.02336947250366211, 0.024030656814575196, 0.023056959152221678, 0.02307379150390625, 0.023093727111816405, 0.023140159606933594, 0.02300796890258789, 0.022989919662475586, 0.023107744216918944, 0.023014015197753906, 0.023342559814453125, 0.02313680076599121, 0.023175167083740233, 0.023193151473999023, 0.02312224006652832, 0.023066848754882813, 0.023078815460205078, 0.023114847183227538, 0.023321504592895507, 0.023176671981811524, 0.02313680076599121, 0.02297439956665039, 0.023225439071655272, 0.023071039199829103, 0.022981279373168944, 0.02305638313293457, 0.023087263107299805, 0.023175008773803712, 0.023114944458007814, 0.023040063858032228, 0.023612064361572267, 0.023201887130737304, 0.023154687881469727, 0.023228063583374023, 0.023112287521362306, 0.02312553596496582, 0.023050464630126954, 0.02309529685974121, 0.02306662368774414, 0.02305177688598633, 0.02318182373046875, 0.023189504623413085, 0.023072799682617186, 0.023125696182250976, 0.023146047592163085, 0.0230930233001709, 0.023122495651245117, 0.023263616561889647, 0.02305574417114258, 0.0230382080078125, 0.023114303588867187, 0.02313167953491211, 0.023341344833374023, 0.025274368286132814, 0.02398028755187988, 0.023428607940673828, 0.02309760093688965, 0.023170463562011717, 0.023345760345458984, 0.023310335159301757, 0.023218175888061524, 0.023222272872924804, 0.023127391815185548, 0.02347248077392578, 0.02315091133117676, 0.023162879943847657, 0.02457747268676758, 0.023971424102783204, 0.02336025619506836, 0.023194944381713867, 0.02344028854370117, 0.023150592803955077, 0.02314854431152344]",tokens/s,42.49143278583404,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4762.640384,6194.855936,0.0,5792.333824,5786.358272,s,1,11.5642353515625,11.5642353515625,0.0,11.5642353515625,11.5642353515625,11.5642353515625,11.5642353515625,[11.5642353515625],,kWh,0.00012402634582917168,1.3673671878220729e-05,3.930919811399869e-05,0.00017700921582139109,,MB,1789.431808,6396.182528,0.0,5978.980352,5957.812736,s,10,2.279697006225586,0.22796970062255859,0.00027475791062724623,0.22799174499511718,0.22826287994384767,0.2282644660949707,0.22826573501586914,"[0.22776016235351562, 0.22813168334960937, 0.228225341796875, 0.22743142700195312, 0.22826605224609375, 0.22770851135253906, 0.22784701538085939, 0.22826252746582032, 0.227851806640625, 0.2282124786376953]",tokens/s,1122.9562494528614,kWh,6.679394760606032e-06,7.366171494563723e-07,4.422440406636329e-06,1.1838452316698733e-05,tokens/kWh,21624448.293709736,MB,1798.00064,6408.76544,0.0,5991.563264,5958.339584,s,10,21.698042480468754,2.169804248046875,0.013150681983193758,2.1658510742187502,2.1908701904296874,2.191632971191406,2.1922431958007813,"[2.177510986328125, 2.160566162109375, 2.19070068359375, 2.175353515625, 2.192395751953125, 2.165943359375, 2.162109619140625, 2.1519287109375, 2.15577490234375, 2.1657587890625]",tokens/s,29.034877250659243,kWh,6.349747557522737e-05,7.0036768499871455e-06,4.228173584556419e-05,0.00011278288827077873,tokens/kWh,558595.377064154,,s,630,21.69566422271727,0.03443756225828141,0.000508662654462346,0.03430465507507324,0.03489820747375488,0.03513135070800781,0.03606245227813721,"[0.03550649642944336, 0.034920352935791016, 0.035133438110351564, 0.03477664184570312, 0.03479926300048828, 0.035089279174804686, 0.03489503860473633, 0.035103649139404294, 0.03492240142822266, 0.03487539291381836, 0.03477503967285156, 0.03487948989868164, 0.03475046539306641, 0.0346638069152832, 0.03474649429321289, 0.03471721649169922, 0.034675041198730466, 0.03492262268066406, 0.034895519256591796, 0.03476883316040039, 0.034837406158447264, 0.034678783416748044, 0.03469107055664063, 0.03461734390258789, 0.03464150238037109, 0.03483894348144531, 0.03453488159179687, 0.03449401473999023, 0.03430089569091797, 0.03426268768310547, 0.03409955215454102, 0.03424051284790039, 0.03457001495361328, 0.03430627059936524, 0.03421750259399414, 0.03421750259399414, 0.03417593765258789, 0.03406972885131836, 0.034159393310546876, 0.0341517105102539, 0.03447625732421875, 0.034103809356689455, 0.03597312164306641, 0.03493283081054688, 0.0344370231628418, 0.03429785537719727, 0.03415654373168946, 0.03421494293212891, 0.034136894226074216, 0.03415017700195312, 0.034285823822021486, 0.03425702285766601, 0.03484262466430664, 0.03444326400756836, 0.03420528030395508, 0.034310047149658206, 0.03473664093017578, 0.03466239929199219, 0.03406028747558594, 0.03414217758178711, 0.034187297821044925, 0.03400294494628906, 0.03433216094970703, 0.03483654403686524, 0.03425068664550781, 0.034369537353515625, 0.03414425659179687, 0.03466035079956055, 0.034299102783203125, 0.03416275024414062, 0.0341060791015625, 0.034160545349121094, 0.0340931510925293, 0.03419136047363281, 0.03426508712768555, 0.034342910766601564, 0.03424051284790039, 0.03449446487426758, 0.03423971176147461, 0.03409590530395508, 0.03403571319580078, 0.03397385787963867, 0.03410985565185547, 0.03402700805664063, 0.03405055999755859, 0.034103294372558594, 0.034508544921875, 0.0342817268371582, 0.03422208023071289, 0.034172321319580076, 0.03408092880249024, 0.034165054321289065, 0.03449049758911133, 0.034119678497314454, 0.03410739135742188, 0.03402751922607422, 0.03418873596191406, 0.03409910583496094, 0.03501855850219727, 0.03461203384399414, 0.035495967864990235, 0.034344959259033206, 0.03425843048095703, 0.03407699203491211, 0.03425209426879883, 0.03425779342651367, 0.034543617248535156, 0.03420979309082031, 0.0342529296875, 0.034788768768310545, 0.03412601470947266, 0.03391312026977539, 0.034035518646240236, 0.03393759918212891, 0.034099262237548826, 0.03393734359741211, 0.03440576171875, 0.0346071662902832, 0.03479609680175781, 0.03441459274291992, 0.034435073852539064, 0.034385921478271485, 0.03447356796264649, 0.034318302154541014, 0.03424505615234375, 0.034340320587158205, 0.03519420623779297, 0.034553791046142576, 0.03461215972900391, 0.03474227142333984, 0.03452057647705078, 0.03452345657348633, 0.03447785568237305, 0.03461775970458984, 0.034523136138916014, 0.03449862289428711, 0.034326465606689456, 0.034559967041015625, 0.03452726364135742, 0.034720993041992186, 0.03452380752563477, 0.03456422424316406, 0.0345272331237793, 0.0362000961303711, 0.03454364776611328, 0.03442105484008789, 0.034590721130371094, 0.03487539291381836, 0.03462876892089844, 0.03467542266845703, 0.03457788848876953, 0.034709438323974606, 0.034872032165527346, 0.0348037109375, 0.03463372802734375, 0.03480335998535156, 0.03483385467529297, 0.03483766555786133, 0.038177566528320314, 0.03504022216796875, 0.034746368408203124, 0.03469065475463867, 0.03524444961547851, 0.03489782333374023, 0.03485910415649414, 0.034618976593017575, 0.034479839324951175, 0.034534080505371094, 0.03462307357788086, 0.035240352630615236, 0.03461497497558594, 0.03449273681640625, 0.03448831939697266, 0.034412769317626955, 0.034409854888916014, 0.034609375, 0.034463550567626955, 0.03451238250732422, 0.035699455261230466, 0.034643585205078126, 0.03463129425048828, 0.03486918258666992, 0.03487452697753906, 0.03479939270019531, 0.03477641677856445, 0.03466908645629883, 0.03492057418823242, 0.03482009506225586, 0.03457024002075195, 0.035359039306640624, 0.034756607055664065, 0.03469875335693359, 0.0347960319519043, 0.03476838302612305, 0.03601039886474609, 0.034678592681884765, 0.03496169662475586, 0.03471100616455078, 0.0346071662902832, 0.034673118591308595, 0.03476070404052734, 0.034565567016601566, 0.03460153579711914, 0.034662559509277345, 0.03473990249633789, 0.03470556640625, 0.03469209671020508, 0.03464908981323242, 0.034576385498046876, 0.03464191818237305, 0.03452928161621094, 0.03457228851318359, 0.03467264175415039, 0.03457558441162109, 0.03453817749023438, 0.03466864013671875, 0.0344719352722168, 0.03442451095581055, 0.03446611022949219, 0.034328575134277346, 0.03417292785644531, 0.0340582389831543, 0.038067424774169925, 0.035277599334716796, 0.03434883117675781, 0.03440867233276367, 0.034070526123046875, 0.03414182281494141, 0.034102848052978516, 0.034132801055908206, 0.03406217575073242, 0.03413641738891601, 0.03409408187866211, 0.034140384674072266, 0.03409366226196289, 0.03408259201049805, 0.03406870269775391, 0.03421094512939453, 0.03439091110229492, 0.03402137756347656, 0.034024993896484376, 0.03398617553710938, 0.03399945449829102, 0.033941761016845706, 0.03402262496948242, 0.033966270446777344, 0.03612944030761719, 0.03435513687133789, 0.034273216247558594, 0.034186622619628904, 0.03412963104248047, 0.034176097869873044, 0.035128257751464845, 0.034418689727783204, 0.03447545623779297, 0.03431795120239258, 0.034452415466308596, 0.03449651336669922, 0.034402305603027344, 0.03448147201538086, 0.034871166229248046, 0.03492691040039062, 0.03470796966552735, 0.03454508972167969, 0.03441638565063477, 0.03504620742797852, 0.03455331039428711, 0.03444073486328125, 0.03457449722290039, 0.03471446228027344, 0.03460300827026367, 0.03492816162109375, 0.03515011215209961, 0.03503532791137695, 0.03474959945678711, 0.03471036911010742, 0.03460300827026367, 0.03464342498779297, 0.03469776153564453, 0.03488143920898437, 0.03500236892700195, 0.03471984100341797, 0.03461097717285156, 0.03451884841918945, 0.03456016159057617, 0.03466064071655273, 0.03477081680297851, 0.03509667205810547, 0.035043327331542966, 0.03527679824829102, 0.035213153839111326, 0.034875553131103514, 0.03488153457641602, 0.03472943878173828, 0.03467728042602539, 0.03494851303100586, 0.03593072128295898, 0.03495296096801758, 0.03487958526611328, 0.03490166473388672, 0.034895614624023436, 0.03491097640991211, 0.034830337524414064, 0.035057342529296875, 0.035072193145751954, 0.035618942260742185, 0.03512879943847656, 0.03514831924438477, 0.034762271881103514, 0.03463420867919922, 0.0345676155090332, 0.03459539031982422, 0.03443302536010742, 0.03468265533447266, 0.034549983978271484, 0.03557183837890625, 0.03469868850708008, 0.0347534408569336, 0.03481190490722656, 0.03461510467529297, 0.03416902542114258, 0.034353153228759765, 0.034256256103515625, 0.03414662551879883, 0.03429817581176758, 0.034369537353515625, 0.03450262451171875, 0.03426512145996094, 0.034592769622802735, 0.034418174743652344, 0.034820606231689456, 0.03427135848999024, 0.034236286163330076, 0.034301952362060545, 0.03433801651000976, 0.03422828674316406, 0.034179424285888674, 0.03412416076660156, 0.034351104736328124, 0.03423017501831055, 0.03473132705688477, 0.034445152282714844, 0.03443193435668945, 0.0345022087097168, 0.03462582397460937, 0.034709438323974606, 0.03429951858520508, 0.034177631378173826, 0.034146305084228515, 0.03410739135742188, 0.034344959259033206, 0.034076385498046875, 0.03399900817871094, 0.033983871459960936, 0.034134784698486326, 0.03394268798828125, 0.034126686096191405, 0.03401062393188477, 0.03411199951171875, 0.03470915222167969, 0.034119678497314454, 0.034046302795410155, 0.03491398239135742, 0.035298881530761717, 0.03434307098388672, 0.03447049713134766, 0.03409305572509766, 0.03419750213623047, 0.03444108963012695, 0.034477569580078124, 0.03473590469360351, 0.03499708938598633, 0.034329761505126954, 0.03399078369140625, 0.034082561492919924, 0.034055137634277345, 0.0345863037109375, 0.03401871871948242, 0.035126686096191406, 0.03424111938476562, 0.03416883087158203, 0.034097152709960936, 0.034789119720458984, 0.03417113494873047, 0.034041343688964845, 0.03413043212890625, 0.03402956771850586, 0.03415244674682617, 0.033963550567626954, 0.03396988677978516, 0.03419827270507812, 0.034033214569091794, 0.034259201049804684, 0.034267040252685545, 0.034181407928466793, 0.03425075149536133, 0.03406857681274414, 0.034575519561767576, 0.03404262542724609, 0.03424460983276367, 0.0343037109375, 0.03405852890014648, 0.03446752166748047, 0.034105663299560544, 0.03414252853393555, 0.03401696014404297, 0.03440409469604492, 0.03421820831298828, 0.034248062133789066, 0.03416924667358399, 0.03421209716796875, 0.034236415863037106, 0.034402305603027344, 0.0342999038696289, 0.03434700775146484, 0.03435129547119141, 0.03480559921264648, 0.0343570556640625, 0.03467689514160156, 0.034227615356445314, 0.03409888076782226, 0.03447312164306641, 0.03890969467163086, 0.034489662170410156, 0.03447817611694336, 0.0342083511352539, 0.034133121490478514, 0.03419020843505859, 0.03419468688964844, 0.03412009429931641, 0.034092479705810544, 0.034168865203857424, 0.034134174346923826, 0.0342248649597168, 0.034244384765625, 0.034171104431152344, 0.03415603256225586, 0.03390902328491211, 0.03396435165405273, 0.03398236846923828, 0.03449638366699219, 0.035115425109863284, 0.03424665451049805, 0.03423436737060547, 0.034131103515625, 0.034302433013916014, 0.03436991882324219, 0.034062110900878906, 0.03399497604370117, 0.03391897583007813, 0.03397411346435547, 0.03406380844116211, 0.03403401565551758, 0.033921184539794924, 0.033999073028564454, 0.033943134307861327, 0.03407494354248047, 0.034201984405517576, 0.03403062438964844, 0.03400364685058594, 0.034027294158935545, 0.03395369720458984, 0.03393964767456055, 0.03440243148803711, 0.0340398063659668, 0.033947647094726564, 0.034310142517089845, 0.034196990966796875, 0.034024993896484376, 0.033936351776123044, 0.034053310394287106, 0.03410412979125976, 0.03406028747558594, 0.03424431991577148, 0.0342367057800293, 0.03428694534301758, 0.034374305725097656, 0.03414425659179687, 0.034291614532470704, 0.0340931510925293, 0.034086910247802735, 0.03402278518676758, 0.03416060638427734, 0.03405481719970703, 0.03399884796142578, 0.03419295883178711, 0.03410374450683594, 0.03438796615600586, 0.03417212677001953, 0.0346448974609375, 0.0341605110168457, 0.03407462310791016, 0.0341605110168457, 0.0340766716003418, 0.03406246566772461, 0.03417494583129883, 0.03445148849487305, 0.03432598495483399, 0.03423900985717773, 0.03402342224121094, 0.03415039825439453, 0.0341319694519043, 0.034297439575195314, 0.034248512268066404, 0.035999744415283204, 0.03453952026367187, 0.0349409294128418, 0.03426233673095703, 0.03446780776977539, 0.03464799880981445, 0.03422496032714844, 0.034213855743408204, 0.03418316650390625, 0.03417446517944336, 0.03416438293457031, 0.03436399841308594, 0.03407692718505859, 0.03419686508178711, 0.03405270385742187, 0.034245792388916015, 0.03458137512207031, 0.03435027313232422, 0.034242977142333986, 0.03412009429931641, 0.034369022369384765, 0.0347960319519043, 0.0342210578918457, 0.03440332794189453, 0.03426889419555664, 0.034471614837646485, 0.034622047424316404, 0.03409660720825195, 0.03403532791137695, 0.0342127685546875, 0.03396198272705078, 0.03399292755126953, 0.033957664489746096, 0.03411558532714844, 0.033941505432128906, 0.03399679946899414, 0.03399404907226562, 0.033996833801269534, 0.033865985870361326, 0.034191200256347656, 0.03392979049682617, 0.03398179244995117, 0.033927841186523436, 0.0339944953918457, 0.03397574234008789, 0.03391551971435547, 0.034199745178222656, 0.03414137649536133, 0.03409552001953125, 0.03414672088623047, 0.03393264007568359, 0.03403171157836914, 0.03411740875244141, 0.034380062103271485, 0.0342465934753418, 0.03422220611572266, 0.033988384246826174, 0.03400771331787109, 0.03404307174682617, 0.034065086364746096, 0.033965953826904295, 0.034057823181152344, 0.03462416076660156, 0.03530937576293945, 0.03433801651000976, 0.03417561721801758, 0.03412793731689453, 0.03418105697631836, 0.03414982223510742, 0.03412611389160156, 0.03436947250366211, 0.03407715225219726, 0.03513366317749023, 0.03463283157348633, 0.03405622482299805, 0.03413283157348633, 0.03420479965209961, 0.034226913452148434, 0.033935295104980466, 0.03402364730834961, 0.03400908660888672, 0.033972225189208984, 0.0340513916015625, 0.033944225311279295, 0.03387395095825195, 0.03392102432250976, 0.03383500671386719, 0.0340316162109375, 0.034119678497314454, 0.03405526351928711, 0.03395471954345703, 0.034523136138916014, 0.034105342864990236, 0.034477054595947264, 0.034083839416503905, 0.03406028747558594, 0.03413433456420899, 0.03429548645019531, 0.03419136047363281, 0.03439206314086914, 0.034342910766601564, 0.03430559921264648, 0.03450105667114258, 0.034096672058105466, 0.034138591766357425, 0.03409414291381836, 0.03407558441162109, 0.03420918273925781, 0.034026016235351564, 0.03402051162719726, 0.03406326293945312, 0.03407228851318359, 0.034060577392578124, 0.03884543991088867, 0.03568265533447266, 0.035456768035888674, 0.03417385482788086, 0.03412582397460937, 0.0342806396484375, 0.03440723037719726, 0.03408099365234375, 0.03392899322509765, 0.035076095581054685, 0.03597721481323242, 0.036083713531494144, 0.034179073333740234]",tokens/s,29.038060025852264,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3900, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,6410.006528,8461.877248,0.0,8059.355136,8042.68544,s,1,14.7347255859375,14.7347255859375,0.0,14.7347255859375,14.7347255859375,14.7347255859375,14.7347255859375,[14.7347255859375],,kWh,0.00021281475847919557,2.3467760550763567e-05,6.486755189398508e-05,0.0003011500709239442,,MB,2247.933952,8577.220608,0.0,8160.018432,8135.406592,s,10,3.0633481445312505,0.30633481445312494,0.00046614011423146953,0.3064408721923828,0.3067920135498047,0.3069927474975586,0.3071533346557617,"[0.30568038940429687, 0.3065343017578125, 0.3056856384277344, 0.30581021118164065, 0.3064333190917969, 0.30659112548828127, 0.30644842529296873, 0.3062238464355469, 0.30674740600585937, 0.3071934814453125]",tokens/s,835.6869279027796,kWh,9.024190836994145e-06,9.952170733613705e-07,5.958169749697395e-06,1.597757766005291e-05,tokens/kWh,16022453.806627423,MB,2252.259328,8608.677888,0.0,8191.475712,8135.409152,s,10,44.53597412109375,4.453597412109375,0.02245122303960619,4.45417822265625,4.48051455078125,4.482069775390625,4.483313955078125,"[4.4801689453125, 4.46616650390625, 4.44316650390625, 4.47725927734375, 4.42632421875, 4.483625, 4.46145068359375, 4.43472119140625, 4.44690576171875, 4.41618603515625]",tokens/s,14.14586774922726,kWh,0.00012866990539967321,1.419274351983354e-05,7.919714837450106e-05,0.00022205979729400784,tokens/kWh,283707.3651678958,,s,630,44.53288542938234,0.07068711972917831,0.0010617360121943547,0.07080641555786132,0.07163849563598633,0.0722455192565918,0.07386451042175293,"[0.07119257354736327, 0.07008624267578124, 0.07043318176269531, 0.07078022766113282, 0.07100252532958984, 0.07091212463378906, 0.07085424041748047, 0.07067890930175781, 0.07050035095214843, 0.07126345825195313, 0.07135485076904297, 0.07164569854736329, 0.07127654266357422, 0.0713616943359375, 0.0716685791015625, 0.07153865814208984, 0.07112297821044922, 0.0712921600341797, 0.07129756927490234, 0.07113750457763672, 0.07089971160888672, 0.07049625396728515, 0.07246438598632812, 0.0713127670288086, 0.070766845703125, 0.07082803344726563, 0.07070089721679687, 0.07145731353759766, 0.07119667053222656, 0.0711409912109375, 0.0711335678100586, 0.07130111694335937, 0.07097958374023437, 0.07242546844482421, 0.07175939178466798, 0.07151663970947265, 0.07087718200683593, 0.07114054107666015, 0.07111968231201173, 0.07097468566894531, 0.07153939056396484, 0.07159324645996094, 0.07147821044921875, 0.07238028717041016, 0.07148134613037109, 0.07116614532470703, 0.07085596466064453, 0.0712320327758789, 0.07041580963134765, 0.07054099273681641, 0.07038438415527344, 0.07023366546630859, 0.07085465240478515, 0.07062790679931641, 0.07018243408203124, 0.07092412567138671, 0.07078975677490235, 0.07091200256347656, 0.07119602966308594, 0.07098445129394532, 0.07135955047607422, 0.07130809783935547, 0.07150096130371093, 0.07121539306640624, 0.0707548828125, 0.07053926086425781, 0.07055763244628906, 0.07122354888916016, 0.07132527923583984, 0.07133213043212891, 0.07233734130859375, 0.07147830200195313, 0.07126525115966797, 0.07150796508789062, 0.0716959686279297, 0.07389225769042969, 0.07148134613037109, 0.0718333740234375, 0.07201634979248046, 0.07188761901855469, 0.07149366760253906, 0.07151641845703124, 0.07157833862304687, 0.07085049438476562, 0.07117135620117188, 0.07001487731933594, 0.07003366088867187, 0.07008726501464843, 0.06946205139160157, 0.06989823913574218, 0.06956985473632812, 0.07062598419189453, 0.07281664276123047, 0.06932022094726563, 0.06900169372558594, 0.06930841827392578, 0.07025049591064453, 0.06960297393798828, 0.06953404998779297, 0.07131651306152344, 0.07009311676025391, 0.07022454071044922, 0.06969344329833985, 0.06906185913085937, 0.06998710632324219, 0.06969158172607422, 0.06990444946289062, 0.07089945220947266, 0.07083417510986328, 0.07314835357666015, 0.0711412124633789, 0.07081961822509765, 0.07223161315917968, 0.07148659515380859, 0.07086860656738281, 0.07115878295898438, 0.07122025299072265, 0.07077337646484375, 0.07058380889892578, 0.07090493011474609, 0.07087427520751953, 0.07100466918945313, 0.07136793518066406, 0.07161542510986328, 0.07138054656982422, 0.07108438110351563, 0.0743198699951172, 0.0715060806274414, 0.07134397125244141, 0.07110655975341797, 0.07080697631835937, 0.07099798583984375, 0.0703616943359375, 0.07037337493896484, 0.06986956787109375, 0.06938623809814454, 0.06964749145507812, 0.06993190765380859, 0.06972825622558594, 0.0700103988647461, 0.07002877044677734, 0.07044191741943359, 0.070168701171875, 0.0696069107055664, 0.0702462387084961, 0.07021014404296876, 0.07006221008300781, 0.06971379089355469, 0.06950048065185546, 0.06921379089355469, 0.06982259368896485, 0.07022665405273437, 0.06976703643798828, 0.06955840301513672, 0.06985641479492187, 0.07162703704833985, 0.06968406677246093, 0.06941673278808594, 0.07019308471679687, 0.07057202911376953, 0.07043276977539062, 0.07056793975830078, 0.07076659393310547, 0.07010249328613281, 0.07078253173828125, 0.07146141052246094, 0.07332498931884765, 0.07044915008544922, 0.07093657684326173, 0.07072310638427734, 0.07067286682128907, 0.07069286346435547, 0.07088105773925782, 0.07112111663818359, 0.07133798217773438, 0.07139328002929687, 0.07192371368408203, 0.07153052520751953, 0.071057373046875, 0.07161856079101563, 0.07140351867675782, 0.07123961639404297, 0.07084979248046876, 0.07058428955078125, 0.07024845123291015, 0.07012624359130859, 0.06947859191894531, 0.06892953491210937, 0.06897049713134766, 0.07045158386230468, 0.0698757095336914, 0.06978057861328125, 0.06989507293701172, 0.07027507019042968, 0.07005990600585937, 0.07028749084472656, 0.07612745666503906, 0.07018160247802735, 0.07039801788330079, 0.0700579833984375, 0.07074610900878907, 0.07086067199707032, 0.07102630615234375, 0.07096371459960937, 0.07067017364501953, 0.07065206146240234, 0.07093043518066407, 0.07127030181884765, 0.07208150482177735, 0.07225689697265625, 0.07104940795898437, 0.07108243560791015, 0.0713667221069336, 0.07141136169433594, 0.07104745483398438, 0.07199456024169922, 0.07090668487548828, 0.07112499237060547, 0.07061094665527344, 0.07029698944091797, 0.07039036560058594, 0.07093193817138672, 0.07059724426269531, 0.0708535385131836, 0.07180342102050781, 0.07138582611083985, 0.07091993713378907, 0.0707682876586914, 0.07050685119628906, 0.07082412719726562, 0.0708602523803711, 0.07093897247314453, 0.07105535888671875, 0.07129293060302734, 0.07283916473388671, 0.07144652557373046, 0.07143612670898437, 0.07097545623779297, 0.07136070251464843, 0.0713707504272461, 0.07122054290771485, 0.07176057434082031, 0.07243366241455078, 0.07161443328857423, 0.07158191680908203, 0.07154185485839844, 0.07097007751464844, 0.07109439849853516, 0.07118377685546876, 0.07081622314453125, 0.07037529754638672, 0.0700847396850586, 0.07148944091796874, 0.07177721405029297, 0.07088703918457032, 0.07229593658447266, 0.07077772521972656, 0.07015628814697265, 0.06978108978271484, 0.06956233978271484, 0.06917782592773437, 0.06851376342773438, 0.06874931335449219, 0.06878323364257813, 0.06893824005126953, 0.06873535919189454, 0.06856703948974609, 0.06843309020996094, 0.06876153564453125, 0.06940557098388672, 0.06917836761474609, 0.0692889633178711, 0.06827622222900391, 0.06881868743896484, 0.0686219482421875, 0.06857698822021484, 0.06833449554443359, 0.06954422760009765, 0.07067804718017578, 0.07091014099121094, 0.07083586883544922, 0.07059248352050781, 0.071327392578125, 0.07556575775146485, 0.07269923400878907, 0.07110918426513672, 0.07125004577636719, 0.07178240203857422, 0.07716659545898437, 0.07185408020019532, 0.07141990661621093, 0.07128809356689453, 0.0709967041015625, 0.07106150054931641, 0.07047277069091797, 0.06975788879394532, 0.06962566375732422, 0.07001299285888672, 0.07190089416503906, 0.07022415924072266, 0.06941094207763672, 0.06910361480712891, 0.06903807830810547, 0.06840435028076172, 0.06900553894042968, 0.06920873260498046, 0.06930194854736328, 0.06946336364746093, 0.07046041870117188, 0.07017852783203125, 0.0703217315673828, 0.07132848358154296, 0.07097548675537109, 0.07095017242431641, 0.07088201904296874, 0.07191961669921874, 0.07144857788085937, 0.07160028839111328, 0.07191126251220703, 0.07078409576416016, 0.0709715805053711, 0.07145549011230469, 0.07067235565185546, 0.07076659393310547, 0.070582275390625, 0.07114457702636719, 0.07083468627929687, 0.0710618896484375, 0.07135641479492187, 0.0711577606201172, 0.07237427520751953, 0.07096934509277343, 0.07094857788085937, 0.07064809417724609, 0.07084236907958984, 0.07124787139892579, 0.07375462341308593, 0.07056301116943359, 0.06972089385986328, 0.06976306915283204, 0.07004512023925781, 0.07055827331542969, 0.07509222412109375, 0.07019904327392579, 0.07038566589355469, 0.07051878356933594, 0.0708399658203125, 0.07003171539306641, 0.0699097900390625, 0.07176675415039062, 0.07084031677246094, 0.070940673828125, 0.07107788848876953, 0.07190697479248047, 0.07080585479736329, 0.07350476837158203, 0.07155712127685547, 0.07155846405029297, 0.07058403015136719, 0.07096969604492187, 0.07071353912353516, 0.07036752319335937, 0.07017897796630859, 0.07138098907470704, 0.07088976287841797, 0.07150768280029297, 0.07076815795898438, 0.07133219146728516, 0.07084809875488281, 0.07118598175048828, 0.07130825805664062, 0.0713359375, 0.07131136322021485, 0.0718766098022461, 0.07318748474121094, 0.07147071838378906, 0.07120508575439453, 0.07086265563964844, 0.07189491271972656, 0.07130381011962891, 0.07104303741455079, 0.07113526153564453, 0.07086009979248047, 0.07144226837158203, 0.07110527801513672, 0.0704512939453125, 0.07052041625976563, 0.0705498275756836, 0.0709521255493164, 0.0702508773803711, 0.0701591339111328, 0.07006387329101563, 0.0709222412109375, 0.07000387573242188, 0.07011414337158203, 0.070002685546875, 0.07077900695800782, 0.0714889907836914, 0.07035472106933593, 0.07077292633056641, 0.07091836547851563, 0.07101264190673828, 0.0716635513305664, 0.07117574310302735, 0.07095136260986327, 0.07105110168457031, 0.07084047698974609, 0.0709197769165039, 0.07015859222412109, 0.07085657501220703, 0.07083660888671875, 0.07115484619140625, 0.07106998443603516, 0.07115155029296875, 0.07098992156982421, 0.07072207641601562, 0.07118633270263672, 0.07036255645751953, 0.07046195220947266, 0.07076649475097656, 0.07290249633789063, 0.07144000244140625, 0.0716376953125, 0.07128883361816406, 0.07108975982666016, 0.07149167633056641, 0.07079763031005859, 0.07018905639648437, 0.06938784027099609, 0.06996947479248047, 0.06988070678710938, 0.07166902160644531, 0.0703331527709961, 0.07023359680175781, 0.07004624176025391, 0.07046141052246094, 0.07067996978759766, 0.0706176986694336, 0.07070310211181641, 0.07124358367919922, 0.0706723861694336, 0.07222799682617187, 0.07193276977539062, 0.07103302764892579, 0.07116976165771484, 0.07068876647949218, 0.07027471923828126, 0.06996141052246094, 0.06927986907958984, 0.07094092559814454, 0.07098118591308594, 0.06985196685791016, 0.0700999984741211, 0.07020438385009765, 0.06979174041748047, 0.06951116943359376, 0.06927999877929687, 0.06853750610351562, 0.06844432067871094, 0.06868831634521484, 0.06918547058105469, 0.06913139343261719, 0.06918608093261719, 0.06889859008789062, 0.06895171356201171, 0.06926563262939453, 0.06949350738525391, 0.06898009490966797, 0.06936640167236328, 0.06960332489013672, 0.06922569274902343, 0.06875017547607422, 0.06861933135986328, 0.06947119903564453, 0.07038966369628906, 0.07307263946533203, 0.070830078125, 0.0703506851196289, 0.07074214172363281, 0.07032556915283203, 0.069757568359375, 0.07005567932128906, 0.07054732513427735, 0.07108246612548828, 0.07038694763183594, 0.07043145751953125, 0.0711618881225586, 0.07087712097167968, 0.0710042266845703, 0.07100147247314453, 0.07267596435546875, 0.07093762969970703, 0.07379657745361329, 0.07098947143554687, 0.07027542114257812, 0.07069900512695312, 0.07089766693115235, 0.07266508483886719, 0.07141107177734375, 0.07161714935302735, 0.07136460876464844, 0.0714240951538086, 0.07127849578857422, 0.07132978820800781, 0.07102140808105468, 0.06977942657470704, 0.06942720031738281, 0.07044915008544922, 0.06988169860839843, 0.07053638458251953, 0.07017945861816406, 0.06982281494140626, 0.06960128021240235, 0.06972415924072266, 0.07012882995605468, 0.07033888244628907, 0.07094528198242188, 0.07100425720214844, 0.07118019104003906, 0.07060415649414062, 0.06940850830078125, 0.06941577911376953, 0.06945964813232422, 0.06955862426757813, 0.07148544311523437, 0.07090790557861328, 0.07012515258789062, 0.07040998077392578, 0.07002178955078125, 0.07021481323242187, 0.0719163818359375, 0.07121046447753906, 0.07100675201416015, 0.07168994903564453, 0.07213494110107421, 0.07174553680419922, 0.07138832092285156, 0.07113814544677734, 0.07114546966552734, 0.07042390441894532, 0.07002082824707032, 0.07009334564208984, 0.06949110412597656, 0.06889849853515626, 0.06886201477050781, 0.06929395294189453, 0.06955648040771484, 0.06961984252929687, 0.07036313629150391, 0.07115721893310546, 0.07051522827148438, 0.07082803344726563, 0.07063516998291015, 0.07436323547363281, 0.070582275390625, 0.07119894409179688, 0.0705738525390625, 0.06984883117675782, 0.07084671783447266, 0.07078284454345703, 0.07100399780273438, 0.07055388641357421, 0.0707747802734375, 0.06997401428222656, 0.07317708587646485, 0.0711352310180664, 0.07299209594726562, 0.07040499114990234, 0.07006764984130859, 0.06962374114990234, 0.07037155151367187, 0.0695723876953125, 0.06923900604248047, 0.06926992034912109, 0.06895977783203125, 0.06866896057128906, 0.06884857940673828, 0.0686776351928711, 0.06855680084228516, 0.06848115539550781, 0.06851757049560547, 0.06832726287841796, 0.068393310546875, 0.0685047378540039, 0.06892835235595703, 0.0699310073852539, 0.06995378875732422, 0.06959606170654296, 0.06938050842285157, 0.06888492584228516, 0.06870658874511719, 0.06932422637939453, 0.06860559844970703, 0.06846854400634765, 0.06949356842041016, 0.07020527648925781, 0.070098876953125, 0.07077667236328125, 0.0708565444946289, 0.07072207641601562, 0.07070121765136719, 0.0701069107055664, 0.07088706970214843, 0.07035330963134766, 0.07084441375732421, 0.07072367858886719, 0.06967286682128906, 0.07029952239990235, 0.07018099212646485, 0.07324610900878906, 0.07040995025634765, 0.07083712005615235, 0.07021363067626953, 0.07086857604980469, 0.07105123138427734, 0.07128518676757813, 0.0714772491455078, 0.07155059051513672, 0.07129920196533203, 0.0710445785522461, 0.07088349151611328, 0.07091571044921875, 0.07048678588867187, 0.07250300598144531, 0.0702633285522461, 0.07058399963378906, 0.0709082260131836, 0.0711200942993164, 0.0712220458984375, 0.07149321746826172]",tokens/s,14.146848871919998,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,7435.067392,8041.463808,0.0,7646.216192,7627.584,s,1,12.991546875,12.991546875,0.0,12.991546875,12.991546875,12.991546875,12.991546875,[12.991546875],,kWh,0.00017130919977500125,1.8889409411852197e-05,5.410198772600079e-05,0.00024430059691285424,,MB,1762.79552,8725.13536,0.0,8315.20768,8191.863296,s,10,3.3267133789062506,0.33267133789062503,0.00033820709782437364,0.3326336975097656,0.3330281829833984,0.3331999130249023,0.3333372970581055,"[0.3326554870605469, 0.3323913879394531, 0.3329900207519531, 0.3326119079589844, 0.33216793823242186, 0.33296771240234374, 0.3325362243652344, 0.3326815185546875, 0.33337164306640626, 0.3323395385742188]",tokens/s,769.5282726285459,kWh,9.738502531527805e-06,1.0739790810460098e-06,6.445282934000018e-06,1.7257764546573834e-05,tokens/kWh,14833902.694009311,MB,1768.218624,9039.70816,0.0,8629.78048,8480.067584,s,10,26.721745361328125,2.6721745361328124,0.005471856679455445,2.672157958984375,2.678659765625,2.6799368164062503,2.68095845703125,"[2.662966552734375, 2.66588330078125, 2.66883642578125, 2.668659912109375, 2.671700927734375, 2.672614990234375, 2.674096923828125, 2.677396484375, 2.6783759765625, 2.6812138671875]",tokens/s,23.576304297538137,kWh,7.822668066305538e-05,8.628155067764689e-06,5.198445825419998e-05,0.0001388392939850201,tokens/kWh,453762.030846954,,s,630,26.71208351898191,0.042400132569812586,0.00038199873817920996,0.042404895782470704,0.042909495162963864,0.0430070894241333,0.04318935146331787,"[0.041968673706054685, 0.041872352600097654, 0.04167382431030273, 0.041505695343017575, 0.04162355041503906, 0.04158464050292969, 0.04158464050292969, 0.041723358154296876, 0.04179532623291016, 0.041729854583740233, 0.04183324813842773, 0.0419898567199707, 0.042024608612060546, 0.04181238555908203, 0.04197587203979492, 0.042237632751464846, 0.04200515365600586, 0.04196768188476562, 0.041816001892089845, 0.0421409912109375, 0.04211977767944336, 0.041957374572753905, 0.04215135955810547, 0.04218537521362305, 0.04213324737548828, 0.042106880187988284, 0.04203494262695313, 0.041931262969970705, 0.04193830490112305, 0.04223040008544922, 0.04237324905395508, 0.04228099060058594, 0.042094432830810546, 0.04206099319458008, 0.04231865692138672, 0.042377120971679685, 0.04252880096435547, 0.042377281188964847, 0.042254337310791014, 0.04240137481689453, 0.04259267044067383, 0.042563552856445315, 0.04243833541870117, 0.04298976135253906, 0.04271635055541992, 0.042678817749023434, 0.04269308853149414, 0.04247343826293945, 0.04242956924438476, 0.042584705352783206, 0.04251855850219727, 0.04254537582397461, 0.042635265350341796, 0.042759552001953124, 0.04259916687011719, 0.04263248062133789, 0.042711360931396485, 0.042708255767822265, 0.04276348876953125, 0.042938144683837894, 0.04294246292114258, 0.042850303649902347, 0.04279296112060547, 0.04178739166259766, 0.041696990966796875, 0.04185756683349609, 0.041946430206298825, 0.04193734359741211, 0.04174233627319336, 0.04173619079589844, 0.041646080017089845, 0.041586593627929686, 0.041707775115966794, 0.04190419387817383, 0.042065345764160156, 0.04214204788208008, 0.04193075180053711, 0.042446849822998046, 0.04219811248779297, 0.042166591644287106, 0.0420931510925293, 0.042008575439453126, 0.04215398406982422, 0.04222886276245117, 0.042087295532226565, 0.04201846313476563, 0.0420621452331543, 0.0422973747253418, 0.042231807708740236, 0.042162174224853514, 0.042356929779052734, 0.042167713165283206, 0.0421393928527832, 0.04213766479492188, 0.04205369567871094, 0.042154529571533206, 0.042468894958496095, 0.04252463912963867, 0.042418113708496095, 0.04243513488769531, 0.04259417724609375, 0.04256371307373047, 0.04252684783935547, 0.04258598327636719, 0.0425206413269043, 0.042794944763183594, 0.04275830459594727, 0.04268592071533203, 0.042567550659179686, 0.04252262496948242, 0.042496192932128904, 0.04250246429443359, 0.042562782287597654, 0.04244969558715821, 0.04287030410766601, 0.04279539108276367, 0.04247283172607422, 0.04252131271362305, 0.04260611343383789, 0.042525150299072265, 0.0425082893371582, 0.042469024658203125, 0.042562911987304684, 0.04277657699584961, 0.04295977783203125, 0.04297942352294922, 0.04216144180297852, 0.042003360748291016, 0.041912033081054685, 0.04245455932617188, 0.04202473449707031, 0.04196246337890625, 0.0417786865234375, 0.04169705581665039, 0.041800064086914064, 0.041976158142089846, 0.041816062927246093, 0.04155801773071289, 0.041645919799804684, 0.04207222366333008, 0.04208025741577148, 0.04196352005004883, 0.04217804718017578, 0.04217292785644531, 0.04215135955810547, 0.04224208068847656, 0.0422630729675293, 0.04205977630615235, 0.04193667221069336, 0.04215343856811524, 0.0420032958984375, 0.042280223846435545, 0.04224985504150391, 0.04216320037841797, 0.04211097717285156, 0.042395648956298826, 0.04213564682006836, 0.042366081237792966, 0.04232886505126953, 0.042190113067626954, 0.042672863006591795, 0.04247500610351562, 0.04249856185913086, 0.04287088012695312, 0.04260444641113281, 0.04260416030883789, 0.04253084945678711, 0.042546878814697264, 0.04235862350463867, 0.04241900634765625, 0.0425984001159668, 0.04271820831298828, 0.0426690559387207, 0.04253619384765625, 0.042619552612304684, 0.04289750289916992, 0.04292822265625, 0.042563518524169924, 0.04251651382446289, 0.04278879928588867, 0.0428359375, 0.042775806427001954, 0.04266387176513672, 0.04277936172485351, 0.0427369270324707, 0.04270095825195312, 0.042728096008300784, 0.043040702819824216, 0.04320060729980469, 0.04206374359130859, 0.041754463195800784, 0.041728832244873046, 0.04168201446533203, 0.04189276885986328, 0.04190329742431641, 0.04199488067626953, 0.04189734268188477, 0.04202969741821289, 0.041940193176269534, 0.04178409576416016, 0.041918464660644535, 0.04188774490356445, 0.04182220840454102, 0.04175811386108398, 0.04192111968994141, 0.042238079071044925, 0.042210559844970706, 0.04216419219970703, 0.042207294464111325, 0.04207062530517578, 0.04236876678466797, 0.042157791137695313, 0.04222201538085937, 0.042057281494140626, 0.042080863952636716, 0.04211727905273437, 0.04219062423706055, 0.04226399993896485, 0.042204830169677736, 0.04218921661376953, 0.04230604934692383, 0.04241628646850586, 0.042347423553466795, 0.04243552017211914, 0.04227635192871094, 0.04225075149536133, 0.04237865447998047, 0.04232252883911133, 0.04254294586181641, 0.042547359466552734, 0.04249615859985351, 0.04274467086791992, 0.04273660659790039, 0.042704704284667966, 0.042676448822021484, 0.04251443099975586, 0.043003902435302735, 0.04288918304443359, 0.042876510620117186, 0.04285472106933594, 0.04268239974975586, 0.042579551696777344, 0.04261312103271484, 0.042665313720703125, 0.04301107025146484, 0.042907424926757816, 0.042790912628173826, 0.04281532669067383, 0.04297439956665039, 0.04281238555908203, 0.042742977142333986, 0.042990623474121095, 0.04211113739013672, 0.041942432403564454, 0.04183100891113281, 0.04189593505859375, 0.041957374572753905, 0.04180377578735352, 0.04193484878540039, 0.042261825561523435, 0.04219359970092774, 0.04192870330810547, 0.04189583969116211, 0.041992286682128906, 0.041998207092285154, 0.04206713485717774, 0.04198441696166992, 0.04212310409545898, 0.042183361053466796, 0.0420203857421875, 0.042240478515625, 0.04227260971069336, 0.04217647933959961, 0.04210502243041992, 0.0420208625793457, 0.04226371383666992, 0.042039329528808594, 0.04203193664550781, 0.04230348968505859, 0.042419967651367185, 0.04236838531494141, 0.04233715057373047, 0.042518081665039065, 0.042326465606689456, 0.04232134246826172, 0.042404415130615235, 0.04239116668701172, 0.04248175811767578, 0.04251871871948242, 0.04240188980102539, 0.04272742462158203, 0.042557342529296875, 0.04254316711425781, 0.04253084945678711, 0.042518527984619144, 0.042687839508056644, 0.04272528076171875, 0.042603233337402346, 0.042462974548339846, 0.04291401672363281, 0.042772544860839846, 0.04274176025390625, 0.042580032348632814, 0.04242995071411133, 0.04265619277954102, 0.04300339126586914, 0.042871295928955076, 0.042661727905273436, 0.042868896484375, 0.04310835266113281, 0.04299161529541016, 0.04272537612915039, 0.04292403030395508, 0.042967041015625, 0.042989791870117186, 0.04278681564331055, 0.042059135437011716, 0.04200716781616211, 0.042003456115722655, 0.0419420166015625, 0.04177644729614258, 0.04206809616088867, 0.041968193054199215, 0.041893184661865236, 0.041816318511962894, 0.04177123260498047, 0.04177657699584961, 0.042097152709960936, 0.04223004913330078, 0.042218494415283206, 0.0421629753112793, 0.042109153747558595, 0.04205507278442383, 0.042148448944091796, 0.04204748916625976, 0.0421580810546875, 0.042041343688964845, 0.04194655990600586, 0.04236111831665039, 0.042374881744384765, 0.042266334533691406, 0.0422632942199707, 0.04224726486206055, 0.04248166275024414, 0.04233267211914062, 0.04234086227416992, 0.042229759216308595, 0.04212940979003906, 0.042240001678466796, 0.04250419235229492, 0.04252057647705078, 0.0423807373046875, 0.04235638427734375, 0.042557823181152345, 0.042496543884277344, 0.04254105758666992, 0.0423768310546875, 0.0425432014465332, 0.042670398712158206, 0.04269862365722656, 0.042504287719726565, 0.0430263671875, 0.042820926666259765, 0.04275276947021484, 0.04271513748168945, 0.04269055938720703, 0.04293804931640625, 0.04295916748046875, 0.04318207931518555, 0.042866687774658206, 0.04305100631713867, 0.042928031921386715, 0.04272137451171875, 0.042625022888183595, 0.04278208160400391, 0.04301465606689453, 0.04302979278564453, 0.04289827346801758, 0.042319072723388675, 0.04217081451416015, 0.04205097579956055, 0.04193273544311524, 0.041882625579833986, 0.04186111831665039, 0.04184473419189453, 0.04212531280517578, 0.042039295196533204, 0.041885696411132815, 0.04183049774169922, 0.041825439453125, 0.04204787063598633, 0.04245663833618164, 0.042195327758789064, 0.04216262435913086, 0.042102783203125, 0.042123264312744144, 0.04194918441772461, 0.04195734405517578, 0.04187526321411133, 0.041973983764648434, 0.04219084930419922, 0.04227686309814453, 0.04215091323852539, 0.04234902572631836, 0.042359329223632815, 0.04244188690185547, 0.0422657585144043, 0.042407615661621094, 0.0425382080078125, 0.042611488342285155, 0.042543102264404296, 0.042568737030029294, 0.04241302490234375, 0.042788063049316406, 0.04273846435546875, 0.043053054809570314, 0.042291263580322265, 0.04244473648071289, 0.04272889709472656, 0.042748287200927736, 0.04265798568725586, 0.04260646438598633, 0.04271321487426758, 0.04249401473999023, 0.04266364669799805, 0.042923904418945315, 0.0427454719543457, 0.04275225448608398, 0.04270947265625, 0.043030529022216796, 0.04290899276733398, 0.042687007904052734, 0.042614944458007814, 0.04260236740112305, 0.042692737579345705, 0.04306867218017578, 0.043394912719726564, 0.0432116813659668, 0.042971134185791016, 0.04279305648803711, 0.04274959945678711, 0.04220713424682617, 0.041984001159667966, 0.04183017730712891, 0.04178351974487305, 0.041869152069091795, 0.041716960906982424, 0.041842689514160154, 0.04210374450683594, 0.04216153717041016, 0.042116832733154294, 0.0425335693359375, 0.042216960906982424, 0.04222563171386719, 0.04232268905639648, 0.04226867294311523, 0.04198604965209961, 0.041818111419677735, 0.04197817611694336, 0.04240351867675781, 0.042405376434326174, 0.04229785537719727, 0.04246092987060547, 0.04250812911987305, 0.04243497467041016, 0.04244844818115234, 0.04232032012939453, 0.042180126190185546, 0.042181087493896485, 0.04245094299316406, 0.04286806488037109, 0.04247747039794922, 0.04236569595336914, 0.04277657699584961, 0.04272537612915039, 0.04258127975463867, 0.04260054397583008, 0.042528385162353514, 0.0424637451171875, 0.042676097869873045, 0.042722049713134765, 0.04257369613647461, 0.042559486389160156, 0.042848255157470705, 0.04271279907226563, 0.04250019073486328, 0.04270870590209961, 0.04278076934814453, 0.0429029426574707, 0.04282467269897461, 0.04285440063476562, 0.042729503631591795, 0.042635231018066405, 0.04258390426635742, 0.042812896728515626, 0.04300051116943359, 0.04295043182373047, 0.042782943725585935, 0.04308361434936524, 0.04296828842163086, 0.042898368835449216, 0.04282905578613281, 0.04293241500854492, 0.04297580718994141, 0.041940990447998046, 0.04180307388305664, 0.04196217727661133, 0.04189401626586914, 0.04168454360961914, 0.04203948974609375, 0.04205110549926758, 0.041941600799560545, 0.04181196975708008, 0.04196761703491211, 0.041971710205078124, 0.042017951965332034, 0.041785568237304685, 0.041964160919189454, 0.042171806335449216, 0.04224060821533203, 0.042302913665771484, 0.04227743911743164, 0.04230963134765625, 0.04228851318359375, 0.04256335830688476, 0.04237395095825195, 0.042355838775634765, 0.0423223991394043, 0.04221177673339844, 0.042358814239501955, 0.04244617462158203, 0.042471710205078124, 0.042430686950683597, 0.042698879241943356, 0.042602718353271486, 0.04238016128540039, 0.04223017501831055, 0.042608543395996096, 0.042584545135498045, 0.04250361633300781, 0.042754302978515624, 0.042736064910888674, 0.04266300964355469, 0.042619583129882815, 0.04256175994873047, 0.04259337615966797, 0.04254608154296875, 0.042780670166015625, 0.04289708709716797, 0.04281171035766602, 0.04300969696044922, 0.042858463287353515, 0.04274214553833008, 0.04264348983764649, 0.043014110565185545, 0.042968929290771486, 0.04304873657226563, 0.0430145263671875, 0.04290351867675781, 0.04289932632446289, 0.04286495971679687, 0.04288905715942383, 0.043126911163330076, 0.043159423828125, 0.043026462554931644, 0.04336022567749023, 0.04314316940307617, 0.04210038375854492, 0.04226108932495117, 0.04212892913818359, 0.0418554573059082, 0.041750560760498046, 0.041955135345458985, 0.04216233444213867, 0.04228227233886719, 0.0419376335144043, 0.042176513671875, 0.042228832244873046, 0.042259361267089846, 0.04250032043457031, 0.042434337615966794, 0.04232499313354492, 0.04209561538696289, 0.04191027069091797, 0.04191372680664063, 0.04228505706787109, 0.0422795524597168, 0.042215614318847655, 0.04240364837646484, 0.04239155197143555, 0.042246143341064454, 0.0421550407409668, 0.04211606216430664, 0.04208639907836914, 0.04215580749511719, 0.04240982437133789, 0.042546817779541016, 0.042614688873291014, 0.04257059097290039, 0.042516288757324217, 0.042743297576904295, 0.04266668701171875, 0.04273561477661133, 0.042790912628173826, 0.042823585510253906, 0.042937664031982424, 0.04291584014892578, 0.04274460983276367, 0.042796222686767575, 0.04262380981445312, 0.042635265350341796, 0.04252057647705078, 0.04279216003417969, 0.04289007949829102, 0.04276380920410156, 0.04321116638183594, 0.04293769454956055, 0.04288070297241211, 0.04278992080688476, 0.04271507263183594, 0.04303257751464844, 0.042974624633789066, 0.04287548828125, 0.04326604843139648, 0.04319232177734375, 0.04315692901611328, 0.04296345520019531, 0.04312649536132813, 0.043180000305175784, 0.043028865814208984]",tokens/s,23.584831918944637,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,828.616704,551.419904,0.0,148.897792,141.633024,s,1,7.78901318359375,7.78901318359375,0.0,7.78901318359375,7.78901318359375,7.78901318359375,7.78901318359375,[7.78901318359375],,kWh,1.3088574887494057e-05,1.4365149989689988e-06,4.32778124000166e-06,1.8852871126464717e-05,,MB,1298.546688,666.763264,0.0,249.561088,216.900608,s,24,0.22888515377044677,0.009536881407101948,8.85650268973128e-05,0.009515711784362793,0.009681363010406493,0.009725766468048095,0.009747128343582153,"[0.00971292781829834, 0.009426624298095702, 0.00945417594909668, 0.009550016403198242, 0.00948902416229248, 0.009593312263488769, 0.009752832412719726, 0.009455103874206543, 0.009728032112121582, 0.009433888435363769, 0.009560128211975098, 0.00948624038696289, 0.009483327865600586, 0.009501407623291016, 0.009496416091918945, 0.009434592247009278, 0.009476032257080078, 0.00950233554840088, 0.009529151916503907, 0.009607711791992187, 0.009577343940734864, 0.009534815788269042, 0.009570624351501465, 0.009529088020324707]",tokens/s,26843.15648607744,kWh,2.7640784228218323e-07,3.0482945692502316e-08,1.5584397989394166e-07,4.627347678686272e-07,tokens/kWh,553232689.1691003,MB,1312.391168,691.929088,0.0,274.726912,216.903168,s,24,9.988657409667969,0.4161940587361654,0.002557537829658557,0.4157039642333984,0.42025921020507817,0.4216589111328125,0.42225664123535156,"[0.41687808227539064, 0.4166531982421875, 0.4148886413574219, 0.41281082153320314, 0.417097412109375, 0.4217606811523438, 0.42240478515625, 0.4210822143554688, 0.41747030639648436, 0.4143987121582031, 0.4136266479492188, 0.41328915405273436, 0.41478518676757814, 0.41532305908203127, 0.4131085510253906, 0.41540301513671873, 0.4151427001953125, 0.413345458984375, 0.4163067932128906, 0.41519143676757814, 0.4183388671875, 0.4160491943359375, 0.41729757690429686, 0.4160049133300781]",tokens/s,151.37169471209845,kWh,1.1998331026641378e-05,1.3231120146797434e-06,4.5334537530229336e-06,1.7854896794344062e-05,tokens/kWh,3528443.805956731,,s,1512,9.977102428436286,0.006598612717219761,0.00012600581759966573,0.006574687957763671,0.006702179193496704,0.00676454713344574,0.007167503261566165,"[0.006596511840820313, 0.006710783958435058, 0.0070594558715820314, 0.006755199909210205, 0.0066780800819396975, 0.0067199358940124515, 0.006645760059356689, 0.006608672142028808, 0.00674019193649292, 0.00666758394241333, 0.006590400218963623, 0.006568704128265381, 0.0065781760215759275, 0.006516736030578613, 0.00653107213973999, 0.006617087841033936, 0.006651904106140137, 0.006563199996948242, 0.006580863952636719, 0.006680607795715332, 0.006623199939727783, 0.006575712203979492, 0.006526656150817871, 0.006565887928009034, 0.006585055828094482, 0.006523168087005616, 0.006665472030639648, 0.00653769588470459, 0.006520832061767578, 0.006618912220001221, 0.00659603214263916, 0.0066447358131408694, 0.006588287830352783, 0.006544832229614258, 0.006572415828704834, 0.006596704006195068, 0.006518688201904297, 0.006623360157012939, 0.006850175857543946, 0.00670470380783081, 0.006641759872436524, 0.0066312642097473146, 0.006598559856414795, 0.006603136062622071, 0.006575839996337891, 0.006514560222625732, 0.0065771198272705075, 0.006515935897827148, 0.006504576206207275, 0.0065831680297851565, 0.006551199913024902, 0.006631552219390869, 0.00658022403717041, 0.006571104049682617, 0.00656217622756958, 0.0066044158935546875, 0.006644383907318115, 0.006631231784820556, 0.006601151943206787, 0.006512639999389648, 0.006596127986907959, 0.006576128005981445, 0.0065552000999450685, 0.006290143966674805, 0.006576320171356201, 0.0066984319686889645, 0.006664127826690674, 0.0066317119598388674, 0.006551680088043213, 0.006567808151245117, 0.006635519981384277, 0.006582272052764892, 0.006587999820709229, 0.006576767921447754, 0.006582047939300537, 0.00652288007736206, 0.006635488033294678, 0.007206208229064941, 0.007883615970611572, 0.00807209587097168, 0.006631840229034424, 0.006577824115753174, 0.006581151962280273, 0.006563327789306641, 0.006619808197021485, 0.00651043176651001, 0.006585504055023193, 0.006556704044342041, 0.006502208232879639, 0.006562848091125489, 0.006517183780670166, 0.006527840137481689, 0.0065532798767089845, 0.006520832061767578, 0.006520832061767578, 0.0066613759994506834, 0.006523647785186768, 0.006535168170928955, 0.006523007869720459, 0.006492032051086426, 0.006529248237609863, 0.006486015796661377, 0.006514463901519775, 0.006543007850646973, 0.00647811222076416, 0.006518752098083496, 0.006530399799346924, 0.006477695941925049, 0.006511648178100586, 0.006502240180969238, 0.006616864204406738, 0.006576320171356201, 0.006498367786407471, 0.006516575813293457, 0.006577792167663574, 0.006492671966552735, 0.0065615358352661135, 0.006551167964935303, 0.00649894380569458, 0.006594719886779785, 0.006557759761810303, 0.00651855993270874, 0.006547008037567139, 0.006512351989746094, 0.006572095870971679, 0.006563936233520508, 0.006322175979614258, 0.0065270719528198245, 0.006510496139526367, 0.006546847820281983, 0.006521279811859131, 0.006572192192077637, 0.006563776016235352, 0.006458943843841553, 0.006567488193511963, 0.006538176059722901, 0.006563072204589844, 0.006587232112884521, 0.0065280961990356445, 0.006623392105102539, 0.006582880020141601, 0.006522816181182861, 0.006571807861328125, 0.006562143802642822, 0.006528223991394043, 0.006559648036956787, 0.0065465598106384274, 0.006545375823974609, 0.006584352016448975, 0.006577919960021972, 0.0066269121170043944, 0.006674367904663086, 0.0067816638946533205, 0.006602496147155762, 0.0065682239532470705, 0.006549215793609619, 0.006557695865631104, 0.006576288223266602, 0.00662716817855835, 0.006567935943603515, 0.006582272052764892, 0.006509952068328858, 0.006578335762023926, 0.006508031845092774, 0.006671328067779541, 0.006555647850036621, 0.006500351905822754, 0.006555808067321777, 0.006592127799987793, 0.006510816097259522, 0.006545631885528564, 0.006577951908111572, 0.0065146880149841305, 0.006570015907287598, 0.006561439990997314, 0.006564159870147705, 0.006731520175933838, 0.0067852802276611324, 0.006873087882995605, 0.006592512130737305, 0.0066109437942504885, 0.006555647850036621, 0.006615200042724609, 0.006592351913452149, 0.006541183948516846, 0.006602880001068115, 0.006559743881225586, 0.006686336040496827, 0.006617216110229493, 0.006346464157104492, 0.00657203197479248, 0.006516736030578613, 0.006598656177520752, 0.0066109437942504885, 0.006555583953857422, 0.006580287933349609, 0.006545407772064209, 0.006541088104248047, 0.006545631885528564, 0.006581600189208984, 0.006547872066497803, 0.006528416156768799, 0.00651580810546875, 0.006561759948730469, 0.006541088104248047, 0.0065474557876586915, 0.006598624229431152, 0.006545279979705811, 0.006518943786621094, 0.00665177583694458, 0.006500480175018311, 0.006558815956115723, 0.006566688060760498, 0.006508736133575439, 0.006526912212371826, 0.006487199783325195, 0.006511680126190186, 0.0065062718391418455, 0.0064898238182067875, 0.006592127799987793, 0.006533631801605225, 0.006479551792144775, 0.0065491838455200195, 0.006556608200073242, 0.006518144130706787, 0.00655347204208374, 0.006500319957733154, 0.0065071358680725095, 0.0065985918045043945, 0.006448192119598389, 0.006537439823150634, 0.006531871795654297, 0.006467584133148193, 0.006627327919006347, 0.006557695865631104, 0.006606847763061524, 0.0065773119926452635, 0.0066118078231811525, 0.006569983959197998, 0.006585631847381592, 0.0065272641181945805, 0.0065552000999450685, 0.006556863784790039, 0.006540832042694092, 0.006555744171142578, 0.006657663822174072, 0.00653763198852539, 0.00658844804763794, 0.0064934401512145995, 0.006533279895782471, 0.00655347204208374, 0.006515423774719239, 0.0062638077735900875, 0.00657203197479248, 0.006555935859680176, 0.00653385591506958, 0.006551487922668457, 0.006545472145080567, 0.0065120959281921385, 0.006576543807983399, 0.0065578241348266605, 0.006534463882446289, 0.006579999923706055, 0.006519711971282959, 0.0065608639717102055, 0.006575007915496826, 0.0065426878929138186, 0.006570240020751953, 0.006573887825012207, 0.007342688083648682, 0.00699622392654419, 0.006598400115966797, 0.006577727794647217, 0.0065352959632873535, 0.006562111854553222, 0.006524928092956543, 0.0065322241783142086, 0.006529344081878662, 0.006478400230407715, 0.006503488063812256, 0.006645887851715088, 0.00651142406463623, 0.006555967807769776, 0.006594240188598633, 0.006492512226104736, 0.006560959815979004, 0.006619520187377929, 0.006535583972930908, 0.0065083842277526855, 0.006479135990142822, 0.006535583972930908, 0.006525279998779297, 0.006500095844268799, 0.006524991989135742, 0.006472832202911377, 0.006603263854980469, 0.006565919876098633, 0.006522848129272461, 0.006549632072448731, 0.006551231861114502, 0.006566112041473389, 0.006537407875061035, 0.006666528224945068, 0.0065147199630737306, 0.0065445761680603025, 0.006605216026306152, 0.006507936000823974, 0.006547584056854248, 0.007066336154937744, 0.00659660816192627, 0.007706624031066894, 0.006636608123779297, 0.006511551856994629, 0.007235072135925293, 0.007213183879852295, 0.006326399803161621, 0.0066500802040100095, 0.006547711849212647, 0.00655072021484375, 0.006636096000671387, 0.006567808151245117, 0.0065847039222717285, 0.006643551826477051, 0.006546592235565186, 0.007628543853759766, 0.006685696125030518, 0.006615039825439453, 0.006597792148590088, 0.0066070079803466795, 0.00680620813369751, 0.00660038423538208, 0.006623551845550537, 0.006606847763061524, 0.00658841609954834, 0.00659660816192627, 0.0065474557876586915, 0.006627327919006347, 0.006615039825439453, 0.006600863933563232, 0.006632480144500732, 0.006689599990844727, 0.006649856090545654, 0.006732895851135254, 0.006863903999328613, 0.0066910400390625, 0.0066351361274719235, 0.006713535785675049, 0.0066415038108825685, 0.0066724162101745605, 0.006668255805969238, 0.006661503791809082, 0.006654335975646973, 0.006637824058532715, 0.0066109437942504885, 0.006633471965789795, 0.006618271827697754, 0.00675107192993164, 0.006619135856628418, 0.006688767910003662, 0.0066427521705627445, 0.006627903938293457, 0.0066891517639160155, 0.006709248065948486, 0.007270463943481445, 0.006859936237335205, 0.0068427839279174806, 0.00675878381729126, 0.006784351825714111, 0.006703775882720947, 0.006850560188293457, 0.0067420158386230465, 0.006754271984100342, 0.0066910400390625, 0.006719295978546143, 0.006690368175506591, 0.006723264217376709, 0.00661897611618042, 0.006708064079284668, 0.006392320156097412, 0.006726943969726563, 0.00667855978012085, 0.006687456130981445, 0.006719103813171387, 0.006697279930114746, 0.006674592018127441, 0.006692704200744629, 0.00664089584350586, 0.006695680141448975, 0.006719488143920899, 0.006844768047332764, 0.006709184169769287, 0.00667414379119873, 0.00658022403717041, 0.0066109437942504885, 0.006631584167480469, 0.006645823955535889, 0.006631199836730957, 0.006684864044189453, 0.006553567886352539, 0.0066495680809021, 0.00678275203704834, 0.006801919937133789, 0.006688352108001709, 0.006717696189880371, 0.006633471965789795, 0.006576064109802246, 0.006628928184509278, 0.006580639839172364, 0.006546688079833984, 0.006574944019317627, 0.0065569281578063965, 0.006714111804962159, 0.0067358717918396, 0.007288832187652588, 0.006557695865631104, 0.006719136238098144, 0.006545760154724121, 0.006599711894989014, 0.006677472114562989, 0.007393343925476074, 0.007146912097930908, 0.0068793601989746095, 0.007344319820404053, 0.0070330557823181155, 0.006627327919006347, 0.006567039966583252, 0.006638304233551025, 0.006688479900360107, 0.0065905280113220215, 0.006578559875488281, 0.00688972806930542, 0.006668384075164795, 0.00662284803390503, 0.006552832126617432, 0.006589151859283447, 0.006578239917755127, 0.006667359828948975, 0.006585247993469238, 0.006670335769653321, 0.006507808208465576, 0.006638304233551025, 0.00640121603012085, 0.006558527946472168, 0.006548927783966064, 0.006584640026092529, 0.006623487949371338, 0.0065342397689819335, 0.0066282558441162105, 0.006575839996337891, 0.006560351848602295, 0.006600063800811767, 0.006566207885742187, 0.006584320068359375, 0.006616864204406738, 0.006562016010284424, 0.006586304187774658, 0.006768703937530518, 0.00658022403717041, 0.006702239990234375, 0.006828256130218506, 0.006693503856658936, 0.006827424049377442, 0.006826591968536377, 0.006805439949035644, 0.006727200031280517, 0.00676697587966919, 0.006725791931152344, 0.006701151847839356, 0.006713247776031494, 0.0067868480682373045, 0.006648032188415527, 0.006715263843536377, 0.006647456169128418, 0.006619487762451172, 0.006670271873474121, 0.006661759853363037, 0.006627903938293457, 0.006911327838897705, 0.0067918400764465335, 0.006677760124206543, 0.006680480003356934, 0.006730591773986817, 0.0066575040817260745, 0.0066070079803466795, 0.006713727951049805, 0.0066375679969787596, 0.006676095962524414, 0.006643936157226562, 0.006662303924560547, 0.006680704116821289, 0.006670368194580078, 0.0066471037864685055, 0.006662559986114502, 0.006670464038848877, 0.006858751773834228, 0.006752352237701416, 0.006715295791625976, 0.006661439895629883, 0.006690783977508545, 0.006671135902404785, 0.006624447822570801, 0.006888192176818847, 0.006700032234191895, 0.006712704181671142, 0.006437280178070068, 0.006704031944274902, 0.006693727970123291, 0.006716896057128907, 0.006642176151275635, 0.006817215919494629, 0.007025279998779297, 0.006681951999664306, 0.006588096141815185, 0.006631584167480469, 0.006599487781524659, 0.006602655887603759, 0.006707104206085205, 0.006590240001678467, 0.006625792026519775, 0.006893472194671631, 0.006699007987976074, 0.006626431941986084, 0.006657120227813721, 0.00663097620010376, 0.0065784001350402835, 0.006608895778656006, 0.0065630397796630855, 0.006564640045166016, 0.006612991809844971, 0.006602752208709717, 0.00659660816192627, 0.006560991764068604, 0.006514944076538086, 0.006582560062408448, 0.006735648155212402, 0.006556191921234131, 0.006579391956329346, 0.006616928100585937, 0.006564000129699707, 0.006603104114532471, 0.006658207893371582, 0.006611455917358398, 0.006570943832397461, 0.006523327827453613, 0.006519328117370606, 0.006655839920043946, 0.006528063774108887, 0.006613088130950928, 0.006906015872955322, 0.006578144073486328, 0.006545983791351319, 0.006567103862762451, 0.006525919914245605, 0.00658128023147583, 0.006581215858459472, 0.006606624126434326, 0.006589759826660156, 0.0065913920402526855, 0.006505472183227539, 0.006572768211364746, 0.006525216102600098, 0.00663046407699585, 0.0065627841949462895, 0.006551519870758056, 0.006710271835327148, 0.006585311889648437, 0.006584320068359375, 0.006271135807037353, 0.006560736179351807, 0.006588895797729492, 0.00653763198852539, 0.006571839809417725, 0.00655350399017334, 0.006578783988952636, 0.006600351810455323, 0.0065773119926452635, 0.006507391929626465, 0.00661030387878418, 0.006595200061798096, 0.006712672233581543, 0.006596479892730713, 0.006541344165802002, 0.006567903995513916, 0.006577023983001709, 0.006493728160858154, 0.0065428481101989745, 0.00653769588470459, 0.006510303974151611, 0.006588575839996338, 0.006519328117370606, 0.0064778242111206055, 0.0066744318008422855, 0.0065120959281921385, 0.006567679882049561, 0.006586751937866211, 0.006541952133178711, 0.006623007774353027, 0.006944992065429687, 0.006629151821136475, 0.006627327919006347, 0.00659449577331543, 0.006546815872192383, 0.006675136089324951, 0.006606592178344727, 0.006502655982971192, 0.006589920043945313, 0.006548351764678955, 0.006536863803863526, 0.006535391807556152, 0.006491104125976563, 0.006525760173797608, 0.006571775913238526, 0.006488319873809815, 0.006635519981384277, 0.006545631885528564, 0.006491871833801269, 0.006569183826446533, 0.006572896003723144, 0.006516511917114258, 0.006572159767150879, 0.006531167984008789, 0.006562079906463623, 0.006603839874267578, 0.006636127948760986, 0.006592031955718994, 0.006558303833007813, 0.006600639820098877, 0.0066007041931152345, 0.0065977277755737305, 0.006588704109191894, 0.00628985595703125, 0.006552480220794678, 0.006577087879180908, 0.006557695865631104, 0.0065146880149841305, 0.006553599834442139, 0.006509888172149658, 0.006582784175872803, 0.006533408164978027, 0.0065771198272705075, 0.006562975883483887, 0.006570847988128662, 0.006553696155548096, 0.006591072082519531, 0.006574336051940918, 0.006616127967834473, 0.006618048191070557, 0.006514143943786621, 0.006566431999206543, 0.006731776237487793, 0.006506432056427002, 0.0065363521575927735, 0.006564383983612061, 0.006515071868896484, 0.006567935943603515, 0.006541696071624756, 0.006835840225219727, 0.006596415996551514, 0.006539616107940674, 0.006544415950775147, 0.006550335884094238, 0.006504288196563721, 0.006555456161499024, 0.006581920146942139, 0.006487775802612305, 0.00654636812210083, 0.006500383853912353, 0.006620960235595703, 0.006581696033477783, 0.006529823780059814, 0.006571360111236572, 0.00658460807800293, 0.006498591899871826, 0.006549600124359131, 0.006549471855163574, 0.006492224216461182, 0.006537439823150634, 0.006490911960601807, 0.00653001594543457, 0.006559743881225586, 0.006545599937438965, 0.006543231964111328, 0.006539103984832764, 0.006494368076324463, 0.006556704044342041, 0.006689695835113525, 0.006516672134399414, 0.006538815975189209, 0.006571616172790527, 0.006529952049255371, 0.006719488143920899, 0.006559296131134033, 0.00662390422821045, 0.0062614078521728515, 0.006555615901947021, 0.006501760005950928, 0.006659743785858155, 0.006575104236602783, 0.006510047912597656, 0.006542175769805908, 0.00654099178314209, 0.00647811222076416, 0.006553567886352539, 0.0065569281578063965, 0.0065066561698913575, 0.006561984062194824, 0.006498144149780273, 0.006514143943786621, 0.006669151782989502, 0.006514111995697022, 0.0064988799095153805, 0.006557695865631104, 0.0065146880149841305, 0.006590464115142822, 0.006544511795043945, 0.0064860482215881345, 0.00653001594543457, 0.006520287990570068, 0.006509024143218994, 0.006565216064453125, 0.006516479969024659, 0.006560351848602295, 0.0065428481101989745, 0.006504640102386474, 0.006661824226379395, 0.006626399993896484, 0.006522143840789795, 0.0065933442115783695, 0.006526336193084716, 0.006545087814331055, 0.006561471939086914, 0.0065487041473388675, 0.006504159927368164, 0.006557695865631104, 0.006486239910125732, 0.006522655963897705, 0.0065474557876586915, 0.006471680164337158, 0.006545023918151855, 0.006631807804107666, 0.006540607929229736, 0.006525951862335205, 0.006505280017852783, 0.006595263957977295, 0.006592319965362549, 0.006570367813110351, 0.006666016101837158, 0.006629600048065186, 0.006511744022369385, 0.006558591842651367, 0.006543360233306885, 0.0065454401969909665, 0.006588704109191894, 0.006622432231903076, 0.006729343891143799, 0.006786208152770996, 0.006280928134918213, 0.006590752124786377, 0.006543360233306885, 0.006598688125610351, 0.006506720066070557, 0.006535999774932862, 0.006665152072906494, 0.006589824199676513, 0.006572256088256836, 0.006609312057495117, 0.006559743881225586, 0.006578112125396728, 0.006774144172668457, 0.006565663814544678, 0.006564767837524414, 0.00659660816192627, 0.006533120155334473, 0.006551231861114502, 0.006564159870147705, 0.0065391998291015625, 0.006593952178955078, 0.006539711952209473, 0.006567455768585205, 0.0065829758644104005, 0.006537055969238282, 0.006631552219390869, 0.0066089282035827635, 0.006479872226715088, 0.0066744318008422855, 0.006606847763061524, 0.006533120155334473, 0.006598527908325195, 0.006572415828704834, 0.006523712158203125, 0.006572832107543945, 0.006525087833404541, 0.006535168170928955, 0.006680448055267334, 0.006627295970916748, 0.006531040191650391, 0.006570176124572754, 0.0064880638122558594, 0.006555647850036621, 0.006737919807434082, 0.006511807918548584, 0.006603583812713623, 0.006586080074310303, 0.006473919868469238, 0.006617184162139893, 0.0065734081268310545, 0.0065420160293579105, 0.006583903789520264, 0.00655625581741333, 0.006522655963897705, 0.006563168048858642, 0.006509471893310547, 0.006645023822784424, 0.006601215839385987, 0.006705120086669922, 0.006658048152923584, 0.006661791801452637, 0.006589951992034912, 0.006605663776397705, 0.006286848068237305, 0.006554111957550048, 0.006553823947906494, 0.006566783905029297, 0.006546592235565186, 0.006491903781890869, 0.006573952198028564, 0.006541440010070801, 0.006525023937225342, 0.006704927921295166, 0.006581984043121338, 0.006531487941741943, 0.006690303802490235, 0.006545504093170166, 0.006562272071838379, 0.006598400115966797, 0.006559936046600342, 0.006551583766937256, 0.006572000026702881, 0.00669484806060791, 0.006649951934814453, 0.0066109118461608884, 0.00658841609954834, 0.006594560146331787, 0.006643839836120605, 0.006592383861541748, 0.006915679931640625, 0.006580639839172364, 0.006633471965789795, 0.006612991809844971, 0.006535168170928955, 0.006731776237487793, 0.006639776229858399, 0.00656163215637207, 0.00659660816192627, 0.006592512130737305, 0.006535168170928955, 0.006617184162139893, 0.006592127799987793, 0.006471168041229248, 0.006619103908538818, 0.006749152183532715, 0.0064737281799316405, 0.006575967788696289, 0.006576064109802246, 0.006502399921417237, 0.006580095767974853, 0.006543551921844483, 0.006573631763458252, 0.0066096000671386716, 0.006576992034912109, 0.006599584102630615, 0.00662284803390503, 0.006523359775543213, 0.006588223934173584, 0.006600800037384033, 0.0065414719581604, 0.006573279857635498, 0.006692512035369873, 0.006535840034484863, 0.006528416156768799, 0.00655452823638916, 0.006574079990386963, 0.006242239952087402, 0.006578783988952636, 0.00652623987197876, 0.006566624164581299, 0.006793312072753906, 0.006538911819458008, 0.006544864177703858, 0.006559999942779541, 0.006502943992614746, 0.006518943786621094, 0.00656163215637207, 0.00656387186050415, 0.006565055847167969, 0.0065359678268432616, 0.006500127792358398, 0.006555583953857422, 0.006644000053405761, 0.006518784046173095, 0.006537375926971435, 0.0065411520004272465, 0.006545567989349365, 0.006559743881225586, 0.006535007953643799, 0.006543360233306885, 0.0065474557876586915, 0.006495391845703125, 0.006523647785186768, 0.006494463920593262, 0.0065057921409606935, 0.006539807796478271, 0.00653107213973999, 0.006651360034942627, 0.006646560192108154, 0.006510335922241211, 0.00653926420211792, 0.006551551818847656, 0.006549503803253174, 0.006535327911376953, 0.006512479782104492, 0.00655679988861084, 0.006533984184265137, 0.006500383853912353, 0.006520832061767578, 0.006553567886352539, 0.006463520050048828, 0.006555776119232178, 0.006510208129882813, 0.00667471981048584, 0.006597824096679687, 0.006533919811248779, 0.0065576000213623045, 0.0066126399040222165, 0.006552000045776368, 0.006572000026702881, 0.00659171199798584, 0.006517280101776123, 0.00658460807800293, 0.006624576091766358, 0.006495200157165528, 0.006528736114501953, 0.00658841609954834, 0.006608160018920899, 0.006584383964538574, 0.006341343879699707, 0.006527167797088623, 0.006582272052764892, 0.006567935943603515, 0.0065084161758422856, 0.006525055885314941, 0.006569727897644043, 0.006557504177093506, 0.006789663791656494, 0.006608287811279297, 0.0065270400047302245, 0.006584767818450928, 0.006586527824401856, 0.006649695873260498, 0.006653952121734619, 0.006606847763061524, 0.006511680126190186, 0.006589375972747803, 0.0065413122177124024, 0.006574079990386963, 0.006584320068359375, 0.006520224094390869, 0.006533728122711181, 0.0066109437942504885, 0.006673984050750733, 0.006552000045776368, 0.006739391803741455, 0.006533696174621582, 0.006545407772064209, 0.006559743881225586, 0.0065443840026855465, 0.006538112163543702, 0.006541440010070801, 0.0065372161865234375, 0.006641280174255371, 0.006544032096862793, 0.006562719821929932, 0.006589119911193848, 0.006684703826904297, 0.006567039966583252, 0.006628320217132568, 0.006545407772064209, 0.006565216064453125, 0.006609119892120361, 0.006504896163940429, 0.006563839912414551, 0.006551680088043213, 0.006550848007202148, 0.006569568157196045, 0.006693855762481689, 0.00691212797164917, 0.006606719970703125, 0.006565887928009034, 0.006567423820495606, 0.00662988805770874, 0.006544447898864746, 0.006563936233520508, 0.006591328144073486, 0.006533279895782471, 0.006549600124359131, 0.006786816120147705, 0.006604191780090332, 0.006677087783813477, 0.006252384185791016, 0.0065660481452941896, 0.00651852798461914, 0.006557695865631104, 0.006551807880401612, 0.006496575832366944, 0.006559391975402832, 0.006552671909332276, 0.006508800029754638, 0.006625984191894531, 0.006528672218322754, 0.006539616107940674, 0.006574079990386963, 0.00652288007736206, 0.006535327911376953, 0.006567776203155518, 0.0066826238632202144, 0.006567327976226806, 0.006571936130523682, 0.006606847763061524, 0.006570271968841553, 0.006651360034942627, 0.006529983997344971, 0.006575104236602783, 0.006521247863769532, 0.006531487941741943, 0.006553311824798584, 0.006495808124542236, 0.006613279819488525, 0.0065848641395568844, 0.006498400211334228, 0.006567967891693115, 0.0065742721557617186, 0.0065363202095031735, 0.006552576065063476, 0.0065874881744384765, 0.006527391910552979, 0.006656159877777099, 0.006570176124572754, 0.006550591945648193, 0.006550111770629883, 0.0064956479072570805, 0.006933248043060303, 0.0075420160293579105, 0.006583040237426758, 0.006577824115753174, 0.00659065580368042, 0.006506591796875, 0.006534560203552246, 0.006609568119049072, 0.006563839912414551, 0.006590464115142822, 0.006587584018707276, 0.006618080139160157, 0.006576000213623047, 0.00665334415435791, 0.006582848072052002, 0.006576128005981445, 0.006557151794433593, 0.006512256145477295, 0.006626272201538086, 0.006646751880645752, 0.006522848129272461, 0.006259039878845215, 0.006574207782745361, 0.00656166410446167, 0.006493760108947754, 0.006552031993865967, 0.006555456161499024, 0.006559487819671631, 0.006619840145111084, 0.006541088104248047, 0.006551616191864014, 0.006582015991210937, 0.00649616003036499, 0.006581632137298584, 0.00655238389968872, 0.006504479885101318, 0.006569568157196045, 0.0065970239639282224, 0.006625279903411865, 0.0065435199737548825, 0.006565728187561035, 0.006586368083953857, 0.00657532787322998, 0.006488255977630615, 0.0065296320915222165, 0.006559743881225586, 0.006486015796661377, 0.006546847820281983, 0.006520959854125976, 0.006527167797088623, 0.006521120071411133, 0.006523039817810059, 0.00648960018157959, 0.006553952217102051, 0.006619135856628418, 0.006576128005981445, 0.006593632221221924, 0.006535200119018555, 0.006611199855804444, 0.006584544181823731, 0.006552192211151123, 0.006682400226593018, 0.006555647850036621, 0.00653926420211792, 0.006535520076751709, 0.006501567840576172, 0.006515168190002442, 0.006624671936035156, 0.006518976211547851, 0.006566304206848145, 0.006719007968902588, 0.006580607891082764, 0.006561024188995361, 0.0065114560127258305, 0.006516736030578613, 0.006571743965148926, 0.006506783962249756, 0.006532991886138916, 0.006540768146514893, 0.006488895893096923, 0.006587903976440429, 0.006762847900390625, 0.006495967864990234, 0.006590303897857666, 0.0062641921043395995, 0.006591008186340332, 0.0065307202339172366, 0.006634016036987305, 0.006559775829315185, 0.0065064959526062015, 0.006573535919189453, 0.006526656150817871, 0.0065320320129394534, 0.00663705587387085, 0.006602975845336914, 0.006768832206726074, 0.006696959972381592, 0.006840320110321045, 0.006590400218963623, 0.006592576026916504, 0.006559743881225586, 0.006520415782928467, 0.006555583953857422, 0.006519264221191407, 0.006565887928009034, 0.006573311805725098, 0.006500480175018311, 0.006697855949401856, 0.006563583850860596, 0.006547296047210693, 0.006533279895782471, 0.006711296081542969, 0.00656489610671997, 0.00668723201751709, 0.006652544021606446, 0.006603968143463135, 0.00659116792678833, 0.006559872150421143, 0.006528319835662842, 0.0065577921867370605, 0.0065335679054260255, 0.006604127883911133, 0.006580639839172364, 0.006707456111907959, 0.006636864185333252, 0.006593215942382813, 0.006524191856384277, 0.006815775871276856, 0.00731001615524292, 0.0066007041931152345, 0.006549503803253174, 0.006563839912414551, 0.006500351905822754, 0.006564191818237305, 0.006559391975402832, 0.006645760059356689, 0.006542912006378174, 0.006547776222229004, 0.006542912006378174, 0.00657366418838501, 0.006532063961029053, 0.00653107213973999, 0.006612959861755371, 0.006522655963897705, 0.006865151882171631, 0.00662278413772583, 0.006605184078216553, 0.0062614078521728515, 0.006557119846343994, 0.006553952217102051, 0.006571072101593018, 0.006536128044128418, 0.006567615985870362, 0.006652224063873291, 0.006666240215301514, 0.006618656158447265, 0.006541600227355957, 0.006575520038604736, 0.006584767818450928, 0.0065469760894775395, 0.00657696008682251, 0.006563007831573486, 0.006616000175476074, 0.006596479892730713, 0.006527103900909424, 0.006565760135650635, 0.006565631866455078, 0.00669920015335083, 0.006547327995300293, 0.006539455890655518, 0.006514815807342529, 0.006567808151245117, 0.006584127902984619, 0.006552095890045166, 0.006598400115966797, 0.006557759761810303, 0.006551392078399658, 0.006568096160888672, 0.006522719860076904, 0.006594944000244141, 0.00657369613647461, 0.006561791896820069, 0.006586368083953857, 0.006635519981384277, 0.006546624183654785, 0.006601600170135498, 0.006708511829376221, 0.006752927780151367, 0.006625279903411865, 0.006610208034515381, 0.006548128128051758, 0.006686528205871582, 0.0066397438049316405, 0.006596000194549561, 0.006572127819061279, 0.0065337600708007815, 0.006565887928009034, 0.006617087841033936, 0.006532320022583008, 0.006554399967193604, 0.006543168067932129, 0.006749663829803467, 0.006571807861328125, 0.0065996160507202145, 0.006537087917327881, 0.0065985918045043945, 0.006681056022644043, 0.006537055969238282, 0.006573247909545899, 0.006629824161529541, 0.006301695823669433, 0.006579455852508545, 0.006911776065826416, 0.006769631862640381, 0.007170048236846924, 0.007368991851806641, 0.00669868803024292, 0.006653503894805908, 0.006719967842102051, 0.006731071949005127, 0.006722015857696533, 0.006616415977478027, 0.0066200318336486815, 0.006651391983032226, 0.00658892822265625, 0.006647808074951172, 0.006651904106140137, 0.006681824207305908, 0.006582784175872803, 0.006563936233520508, 0.00653868818283081, 0.006570752143859863, 0.006575104236602783, 0.006503424167633057, 0.00652288007736206, 0.006531328201293945, 0.006548448085784912, 0.006532000064849853, 0.006523935794830322, 0.006517824172973633, 0.00668236780166626, 0.006766623973846435, 0.00657747220993042, 0.006582431793212891, 0.006605343818664551, 0.00660038423538208, 0.006637887954711914, 0.006518879890441895, 0.006556992053985596, 0.0065268797874450685, 0.006533728122711181, 0.006549920082092285, 0.006540832042694092, 0.006577951908111572, 0.006562496185302734, 0.006642464160919189, 0.006630112171173095, 0.006988319873809814, 0.006542143821716309, 0.006554463863372803, 0.006750207901000976, 0.006557695865631104, 0.006598527908325195, 0.006609024047851563, 0.006559743881225586, 0.006666240215301514, 0.006604800224304199, 0.006576128005981445, 0.006633120059967041, 0.006639232158660888, 0.006646495819091797, 0.006632544040679931, 0.006615968227386475, 0.006291584014892578, 0.006536863803863526, 0.006623519897460938, 0.006609248161315918, 0.006582399845123291, 0.006537087917327881, 0.006584159851074219, 0.006549856185913086, 0.00658841609954834, 0.006551487922668457, 0.006580095767974853, 0.006789120197296142, 0.006576128005981445, 0.006582015991210937, 0.006752607822418213, 0.00661078405380249, 0.006664159774780273, 0.00655951976776123, 0.006569856166839599, 0.006596799850463867, 0.006572288036346435, 0.0065413122177124024, 0.006541567802429199, 0.0065920639038085935, 0.006553791999816895, 0.006567903995513916, 0.006543392181396484, 0.006549503803253174, 0.006714399814605713, 0.006565951824188233, 0.00658080005645752, 0.006664063930511475, 0.006612959861755371, 0.006701632022857666, 0.006655488014221191, 0.006590911865234375, 0.0066061758995056155, 0.006561888217926025, 0.006566112041473389, 0.006565855979919434, 0.006619935989379883, 0.006524799823760986, 0.00667628812789917, 0.00662505578994751, 0.006490240097045899, 0.006633600234985351, 0.006682240009307862, 0.006605055809020996, 0.006649856090545654, 0.0065474557876586915, 0.006590144157409668, 0.006608799934387207, 0.006535071849822998, 0.006584383964538574, 0.006633920192718506, 0.006717440128326416, 0.006620768070220947, 0.00660316801071167, 0.006578080177307129, 0.006608992099761963, 0.006608895778656006, 0.006588096141815185, 0.006652224063873291, 0.006267168045043945, 0.006575488090515137, 0.006574431896209717, 0.006565631866455078, 0.006551807880401612, 0.006511871814727783, 0.006567903995513916, 0.006567935943603515, 0.006544288158416748, 0.006612127780914307, 0.006510496139526367, 0.006654784202575683, 0.006594560146331787, 0.006533279895782471, 0.006591872215270996, 0.0065779838562011715, 0.006539936065673828, 0.006555647850036621, 0.006526976108551025, 0.006567808151245117, 0.006535136222839356, 0.006567552089691162, 0.0066442561149597165, 0.0065710082054138185, 0.006525568008422852, 0.006582240104675293, 0.006654367923736572, 0.006815743923187256, 0.006733920097351074, 0.006651103973388672, 0.006529727935791016, 0.0066007041931152345, 0.006623231887817383, 0.006703104019165039, 0.006627552032470703, 0.006686143875122071, 0.006707104206085205, 0.006678815841674805, 0.006674848079681397, 0.006795135974884034, 0.006756224155426025, 0.006741888046264648, 0.006670464038848877, 0.006823455810546875, 0.0067068800926208495, 0.006613791942596436, 0.006586688041687012, 0.0066126718521118165, 0.006565887928009034, 0.0066070079803466795, 0.006602399826049805, 0.006629216194152832, 0.006603104114532471, 0.0067051520347595215, 0.0067420158386230465, 0.006604800224304199, 0.0066375679969787596, 0.0066152639389038086, 0.006598207950592041, 0.006621407985687256, 0.006545407772064209, 0.006590784072875977, 0.006722400188446045, 0.006347904205322266, 0.006564000129699707, 0.0065558719635009765, 0.006733376026153565, 0.0066221442222595215, 0.006647808074951172, 0.006604896068572998, 0.00653872013092041, 0.00657862377166748, 0.006590144157409668, 0.00657590389251709, 0.006554111957550048, 0.006610655784606933, 0.006545728206634522, 0.006600448131561279, 0.006586719989776611, 0.006535039901733398, 0.006595680236816406, 0.006721759796142578, 0.006582240104675293, 0.00669158411026001, 0.006549407958984375, 0.006549600124359131, 0.00656819200515747, 0.00661411190032959, 0.006601632118225098, 0.006573728084564209, 0.006561088085174561, 0.006605599880218506, 0.006571167945861816, 0.006525887966156006, 0.006535071849822998, 0.006561791896820069, 0.006832223892211914, 0.006604703903198242, 0.006720863819122315, 0.006690591812133789, 0.006619840145111084, 0.006643519878387451, 0.0065001602172851565, 0.0066217598915100095, 0.006517888069152832, 0.006536064147949219, 0.006548863887786865, 0.006515327930450439, 0.006586368083953857, 0.006553023815155029, 0.006507232189178467, 0.006551392078399658, 0.006574079990386963, 0.006793216228485107, 0.006876224040985107, 0.006605535984039307, 0.006622975826263428, 0.006641119956970215, 0.0065977277755737305, 0.006552768230438232, 0.0066135358810424805, 0.0065476479530334475, 0.006621183872222901, 0.006572224140167236, 0.00654041576385498, 0.0066136960983276365]",tokens/s,151.54700584115153,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1057.681408,904.855552,0.0,509.607936,491.434496,s,1,7.827791015625,7.827791015625,0.0,7.827791015625,7.827791015625,7.827791015625,7.827791015625,[7.827791015625],,kWh,2.41184667208131e-05,2.652280298336202e-06,8.146395406000151e-06,3.491714242514945e-05,,MB,1365.553152,1018.10176,0.0,608.17408,592.24832,s,10,0.1971610870361328,0.019716108703613282,0.0005599603234482525,0.019589552879333498,0.02018427448272705,0.020702905559539794,0.02111781042098999,"[0.02006902313232422, 0.019475648880004883, 0.01972447967529297, 0.01926380729675293, 0.01932441520690918, 0.019350496292114258, 0.02122153663635254, 0.01970345687866211, 0.019276031494140623, 0.0197521915435791]",tokens/s,12984.306581403867,kWh,5.748851891567857e-07,6.339963632436003e-08,3.531416867126034e-07,9.914265121937492e-07,tokens/kWh,258213792.80401096,MB,1379.516416,1032.781824,0.0,622.854144,605.085696,s,10,10.358093688964845,1.0358093688964842,0.009877266112672058,1.0333173828125002,1.049001318359375,1.050311328125,1.0513593359375,"[1.03319384765625, 1.0235807495117188, 1.0278206787109374, 1.03344091796875, 1.0273258056640624, 1.024480712890625, 1.051621337890625, 1.04366015625, 1.04425927734375, 1.048710205078125]",tokens/s,60.82200247630318,kWh,3.0259939236261438e-05,3.3371566101980955e-06,1.1780895798287674e-05,4.537799164474721e-05,tokens/kWh,1388338.2167551846,,s,630,10.352869088172914,0.0164331255367824,0.0004075164655791157,0.016304304122924804,0.016814908027648925,0.016989174556732177,0.017438395004272463,"[0.01586176013946533, 0.016242368698120117, 0.016171327590942385, 0.01624678421020508, 0.016216064453125, 0.016260448455810546, 0.016392511367797853, 0.016185920715332033, 0.01618451118469238, 0.01612041664123535, 0.016279296875, 0.016339071273803712, 0.01632473564147949, 0.016285696029663087, 0.01627894401550293, 0.016599647521972655, 0.018066495895385743, 0.01744771194458008, 0.016621728897094727, 0.01654374313354492, 0.01660927963256836, 0.01647760009765625, 0.016271968841552735, 0.016304128646850585, 0.016245855331420898, 0.016272287368774414, 0.016975200653076172, 0.016218528747558594, 0.016504575729370117, 0.016314783096313477, 0.01625904083251953, 0.01626675224304199, 0.016287391662597656, 0.01626620864868164, 0.016310272216796876, 0.016314367294311523, 0.016313728332519532, 0.01653619194030762, 0.016403488159179688, 0.016202816009521483, 0.01615862464904785, 0.01617647933959961, 0.016181919097900392, 0.01620969581604004, 0.016224479675292967, 0.016205087661743164, 0.016226079940795897, 0.01639878463745117, 0.01629350471496582, 0.01616796875, 0.016236095428466796, 0.01652364730834961, 0.01670047950744629, 0.01660809516906738, 0.016584831237792967, 0.01648627281188965, 0.016368736267089845, 0.016306751251220702, 0.01634547233581543, 0.016610496520996092, 0.01667718315124512, 0.016578752517700194, 0.016421152114868165, 0.015839903831481934, 0.016209823608398437, 0.01616217613220215, 0.016243423461914062, 0.016265216827392577, 0.01643654441833496, 0.01640108871459961, 0.016358432769775392, 0.016206432342529296, 0.01616099166870117, 0.016283647537231445, 0.01632271957397461, 0.016228351593017578, 0.016348608016967775, 0.016316352844238283, 0.0161246395111084, 0.016379711151123046, 0.016160863876342774, 0.01613209533691406, 0.016134016036987303, 0.016164255142211915, 0.01622025680541992, 0.016361343383789063, 0.01635215950012207, 0.01628758430480957, 0.016215103149414063, 0.016188352584838868, 0.01618534469604492, 0.016187103271484374, 0.016135967254638672, 0.01626755142211914, 0.016318336486816406, 0.016257408142089844, 0.01647817611694336, 0.016280799865722655, 0.016315168380737304, 0.016320512771606444, 0.0161876163482666, 0.01625654411315918, 0.0163143367767334, 0.01620012855529785, 0.016309984207153322, 0.016285823822021483, 0.016207872390747072, 0.016204832077026367, 0.01641468811035156, 0.01618227195739746, 0.016170944213867187, 0.01630419158935547, 0.01618931198120117, 0.01616099166870117, 0.0163286075592041, 0.01626755142211914, 0.01620534324645996, 0.016220447540283203, 0.016265119552612305, 0.016273408889770507, 0.01617078399658203, 0.01623891258239746, 0.016109472274780275, 0.016080896377563478, 0.016113664627075194, 0.016202016830444334, 0.016091136932373046, 0.016179296493530275, 0.016117696762084962, 0.016109439849853517, 0.016152511596679686, 0.016150688171386717, 0.01615603256225586, 0.01609337615966797, 0.016148927688598633, 0.01615667152404785, 0.01651737594604492, 0.016281343460083007, 0.01611961555480957, 0.016113855361938476, 0.01608415985107422, 0.01609996795654297, 0.016212160110473633, 0.016187263488769532, 0.016178655624389648, 0.016217824935913085, 0.016173311233520508, 0.016199359893798827, 0.016219135284423827, 0.016196800231933595, 0.016157503128051757, 0.016117696762084962, 0.016179264068603514, 0.016157760620117187, 0.016141088485717773, 0.016236703872680665, 0.016216352462768556, 0.016183008193969728, 0.0162523193359375, 0.01630064010620117, 0.02134739112854004, 0.01658780860900879, 0.016338848114013673, 0.016250879287719726, 0.016158336639404296, 0.01614886474609375, 0.01613209533691406, 0.016275455474853515, 0.01631158447265625, 0.01625529670715332, 0.016565984725952148, 0.016212671279907227, 0.016196640014648437, 0.0162825927734375, 0.01622425651550293, 0.0162795524597168, 0.01618889617919922, 0.01624038314819336, 0.016226303100585936, 0.016423295974731446, 0.01633113670349121, 0.016242591857910157, 0.016271488189697265, 0.016158720016479493, 0.016317792892456055, 0.0162718391418457, 0.016617727279663087, 0.016230335235595705, 0.016150527954101563, 0.015823519706726075, 0.016231903076171873, 0.01623721694946289, 0.016244735717773438, 0.01618659210205078, 0.016149280548095703, 0.016464000701904298, 0.016401536941528322, 0.016699487686157227, 0.01620470428466797, 0.016287488937377928, 0.016150527954101563, 0.016259071350097656, 0.016144384384155275, 0.0161527042388916, 0.01611782455444336, 0.016248640060424806, 0.017026496887207032, 0.018632736206054688, 0.016906784057617186, 0.016684864044189455, 0.01652694320678711, 0.0162063045501709, 0.01612607955932617, 0.016180864334106447, 0.016167295455932616, 0.01623040008544922, 0.01639833641052246, 0.01677948760986328, 0.01671046447753906, 0.016597984313964843, 0.016350879669189453, 0.016255008697509767, 0.016285120010375978, 0.01704051208496094, 0.016162559509277342, 0.016076799392700195, 0.01625699234008789, 0.016188928604125977, 0.01627190399169922, 0.01617715263366699, 0.016269311904907227, 0.016156448364257812, 0.016205535888671876, 0.01623276710510254, 0.016146623611450195, 0.016281600952148437, 0.01617100715637207, 0.01619340705871582, 0.016273536682128907, 0.0162708797454834, 0.016237024307250977, 0.016354688644409178, 0.016347776412963866, 0.016648191452026367, 0.01624239921569824, 0.016353567123413085, 0.017019935607910156, 0.017358816146850586, 0.01706723213195801, 0.0163438720703125, 0.016294879913330076, 0.01627686309814453, 0.015849727630615235, 0.01619580841064453, 0.016463647842407225, 0.016912384033203123, 0.017225439071655274, 0.016990528106689454, 0.016549983978271485, 0.01648627281188965, 0.016344768524169922, 0.01625734329223633, 0.016170591354370118, 0.016096960067749022, 0.01607244873046875, 0.016242784500122072, 0.016313215255737305, 0.016219808578491212, 0.016183616638183594, 0.01628960037231445, 0.016130271911621093, 0.016221920013427735, 0.016204063415527343, 0.016088672637939453, 0.016189855575561525, 0.01617862319946289, 0.016195232391357423, 0.016304031372070312, 0.016264192581176756, 0.016342016220092775, 0.016516096115112306, 0.01645529556274414, 0.016374143600463867, 0.016488447189331054, 0.016504959106445314, 0.01642214393615723, 0.01647235107421875, 0.01622483253479004, 0.016191488265991212, 0.01621993637084961, 0.016269311904907227, 0.01630793571472168, 0.01626576042175293, 0.01622198486328125, 0.016205791473388673, 0.01611955261230469, 0.01632076835632324, 0.016293888092041017, 0.016107519149780272, 0.01621811294555664, 0.016239936828613282, 0.01627177619934082, 0.01626550483703613, 0.016291200637817384, 0.01624025535583496, 0.016147615432739258, 0.016387935638427734, 0.016261024475097655, 0.016291999816894533, 0.01631987190246582, 0.01621664047241211, 0.01618364715576172, 0.01615763282775879, 0.016154752731323243, 0.016208480834960938, 0.01608185577392578, 0.016400159835815428, 0.016557823181152342, 0.016228607177734375, 0.01613209533691406, 0.016297727584838866, 0.01616716766357422, 0.01618889617919922, 0.016180992126464844, 0.01622505569458008, 0.016105279922485352, 0.0162674560546875, 0.01613209533691406, 0.01609324836730957, 0.016131872177124022, 0.01616265678405762, 0.01624095916748047, 0.016373760223388673, 0.016268896102905273, 0.01615648078918457, 0.016194143295288087, 0.016151615142822266, 0.016229312896728517, 0.016184383392333985, 0.01622643280029297, 0.016681791305541992, 0.016826400756835936, 0.016551904678344727, 0.016285951614379884, 0.01619852828979492, 0.016143232345581054, 0.016107519149780272, 0.016096384048461913, 0.016144832611083983, 0.01615011215209961, 0.01610428810119629, 0.016129215240478514, 0.016131935119628907, 0.016174047470092774, 0.016154592514038085, 0.016162879943847658, 0.016187360763549805, 0.01640608024597168, 0.016343488693237304, 0.01622844886779785, 0.01615247917175293, 0.016174144744873047, 0.016189599990844728, 0.01616873550415039, 0.01618841552734375, 0.017328128814697266, 0.01658448028564453, 0.016340736389160157, 0.016328224182128905, 0.016288127899169922, 0.016164480209350587, 0.016262079238891603, 0.01620377540588379, 0.016109664916992186, 0.016183168411254882, 0.01609657669067383, 0.016079584121704103, 0.016484352111816408, 0.016813631057739257, 0.01618124771118164, 0.01621887969970703, 0.016124992370605468, 0.01633951950073242, 0.016161184310913086, 0.016135168075561524, 0.016140352249145506, 0.01604412841796875, 0.016239423751831055, 0.016943103790283204, 0.016729120254516602, 0.01667888069152832, 0.01643631935119629, 0.016123807907104493, 0.016281055450439452, 0.01641904067993164, 0.016693376541137697, 0.016704832077026367, 0.01646214485168457, 0.016578784942626955, 0.016687456130981444, 0.016908607482910155, 0.017069759368896483, 0.01706188774108887, 0.01683404731750488, 0.016796159744262695, 0.016705535888671876, 0.01659859275817871, 0.016832351684570312, 0.017008575439453125, 0.016630207061767578, 0.016713951110839845, 0.016629791259765624, 0.016803007125854492, 0.016943904876708986, 0.01704140853881836, 0.016951007843017576, 0.016803104400634764, 0.016761184692382813, 0.016558752059936524, 0.01658470344543457, 0.016640256881713868, 0.016779136657714844, 0.01679350471496582, 0.016740095138549804, 0.01667679977416992, 0.016769311904907228, 0.016987520217895506, 0.01690687942504883, 0.01679155158996582, 0.016874496459960937, 0.017265663146972657, 0.017022975921630858, 0.016834144592285157, 0.01677916717529297, 0.016924192428588867, 0.01693199920654297, 0.01674220848083496, 0.016936960220336913, 0.016687103271484375, 0.016778911590576172, 0.01678371238708496, 0.016444992065429688, 0.016578304290771485, 0.01649135971069336, 0.016471424102783204, 0.016510656356811523, 0.01669001579284668, 0.01669071960449219, 0.01650361633300781, 0.016446527481079103, 0.016373632431030273, 0.016427839279174804, 0.016725887298583986, 0.01686140823364258, 0.01659679985046387, 0.016554079055786132, 0.016639328002929686, 0.016943103790283204, 0.017109119415283203, 0.017005279541015626, 0.01662169647216797, 0.016690879821777343, 0.016891263961791993, 0.016929376602172853, 0.016746528625488283, 0.0166297607421875, 0.016748640060424806, 0.016612735748291016, 0.01660982322692871, 0.016742399215698242, 0.016682655334472656, 0.01641097640991211, 0.016478208541870116, 0.016489791870117187, 0.016495296478271484, 0.016420703887939453, 0.016373216629028322, 0.016512895584106447, 0.016386592864990234, 0.01652560043334961, 0.01640447998046875, 0.01638400077819824, 0.01660518455505371, 0.016687263488769533, 0.0166276798248291, 0.016869247436523436, 0.016617055892944335, 0.016630176544189454, 0.016621023178100585, 0.016611871719360353, 0.016678911209106445, 0.016472000122070313, 0.016488319396972657, 0.016332447052001955, 0.016306720733642577, 0.016510976791381835, 0.01650217628479004, 0.01628835105895996, 0.016314655303955077, 0.016298816680908202, 0.016160831451416016, 0.016195999145507813, 0.016257728576660156, 0.01622115135192871, 0.01577129554748535, 0.016197952270507812, 0.01621798324584961, 0.016296031951904297, 0.016171072006225588, 0.016275295257568358, 0.016244512557983398, 0.016573856353759766, 0.0166409912109375, 0.01660313606262207, 0.016549440383911134, 0.016605152130126952, 0.016586271286010743, 0.016579519271850585, 0.016785408020019533, 0.01697996711730957, 0.016885759353637696, 0.01675468826293945, 0.01669273567199707, 0.016538368225097657, 0.01650265693664551, 0.016553855895996093, 0.01658608055114746, 0.0169202880859375, 0.01661238479614258, 0.016563199996948243, 0.01660809516906738, 0.016572479248046876, 0.016473663330078124, 0.01662406349182129, 0.016490495681762696, 0.016774335861206056, 0.016680864334106444, 0.016700159072875975, 0.016605344772338868, 0.0167524471282959, 0.016638368606567384, 0.016529184341430664, 0.016362560272216796, 0.01630022430419922, 0.016313087463378905, 0.016287776947021486, 0.016219871520996094, 0.016262912750244142, 0.016204511642456055, 0.01627510452270508, 0.016245920181274413, 0.016356191635131835, 0.016615552902221678, 0.01731167984008789, 0.016808000564575196, 0.01667465591430664, 0.017039680480957033, 0.016524543762207033, 0.016472671508789064, 0.01654547119140625, 0.016714143753051757, 0.01666649627685547, 0.016674623489379883, 0.01693427276611328, 0.017171295166015624, 0.016803552627563476, 0.016686431884765623, 0.01665420722961426, 0.0168623046875, 0.01694927978515625, 0.01666342353820801, 0.016617536544799805, 0.016531391143798826, 0.017415584564208983, 0.019873983383178712, 0.017082784652709963, 0.017135616302490234, 0.016903360366821288, 0.017021440505981447, 0.016859136581420898, 0.01665999984741211, 0.016519968032836913, 0.016595008850097657, 0.01693075180053711, 0.019927040100097656, 0.01665843200683594, 0.016347135543823242, 0.016347007751464845, 0.01625823974609375, 0.017125631332397463, 0.01623062324523926, 0.01626588821411133, 0.016236352920532226, 0.016174272537231447, 0.016210752487182616, 0.016216127395629883, 0.01626451110839844, 0.016283424377441406, 0.01645654487609863, 0.0164003849029541, 0.016426559448242187, 0.016502368927001954, 0.01658572769165039, 0.01650876808166504, 0.01626038360595703, 0.01630441665649414, 0.016228351593017578, 0.016303775787353515, 0.016359296798706055, 0.016489376068115236, 0.01636761665344238, 0.016293376922607423, 0.01630259132385254, 0.017792448043823243, 0.016761568069458006, 0.017176704406738283, 0.016275232315063476, 0.01632863998413086, 0.01635251235961914, 0.016363807678222656, 0.016351711273193358, 0.01650217628479004, 0.016206432342529296, 0.016273216247558595, 0.016263359069824217, 0.01626470375061035, 0.016347808837890623, 0.016565088272094727, 0.016519359588623047, 0.016238431930541992]",tokens/s,60.85269644911382,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3591.528448,4609.409024,0.0,4206.886912,4070.564864,s,1,10.3852275390625,10.3852275390625,0.0,10.3852275390625,10.3852275390625,10.3852275390625,10.3852275390625,[10.3852275390625],,kWh,9.134361489166925e-05,1.0068475187233027e-05,2.7655855458004475e-05,0.00012906794553690676,,MB,3603.951616,4783.47264,0.0,4366.270464,4197.065728,s,10,1.7514608612060547,0.17514608612060545,0.0004245356513764438,0.17517752075195314,0.17563513641357423,0.1756477912902832,0.1756579151916504,"[0.1753883514404297, 0.17451078796386718, 0.1745284729003906, 0.17524362182617187, 0.1750978546142578, 0.17563232421875, 0.1751114196777344, 0.17467234802246093, 0.175615234375, 0.1756604461669922]",tokens/s,1461.63700069049,kWh,5.1781405067976e-06,5.707598114686513e-07,3.4232434793332697e-06,9.17214379759952e-06,tokens/kWh,27910596.00123134,MB,3611.480064,4793.9584,0.0,4376.756224,4197.068288,s,10,20.225977294921872,2.0225977294921873,0.020314569555725914,2.0217301025390624,2.0496035888671873,2.0540643676757813,2.0576329907226563,"[2.0048492431640623, 2.0381734619140626, 1.995053466796875, 2.0486123046875, 2.058525146484375, 2.0264163818359373, 1.9951151123046875, 2.01577197265625, 2.0181939697265623, 2.0252662353515625]",tokens/s,31.148062257450167,kWh,5.8116734184036995e-05,6.410392450480075e-06,3.789584513146676e-05,0.00010242297176598385,tokens/kWh,615096.388180793,,s,630,20.223344085693366,0.03210054616776724,0.0006397085772861648,0.032003679275512696,0.0328206413269043,0.033063246726989744,0.03425157215118409,"[0.032599742889404294, 0.03198422431945801, 0.03157155227661133, 0.03144976043701172, 0.03169254493713379, 0.031460832595825196, 0.03158070373535156, 0.031494144439697266, 0.03137740707397461, 0.03158627128601074, 0.03318172836303711, 0.03208931350708008, 0.03181843185424805, 0.0317031364440918, 0.03148185539245606, 0.031641408920288085, 0.0317196159362793, 0.03203891372680664, 0.03177903938293457, 0.031497119903564456, 0.03158652877807617, 0.03227471923828125, 0.03235878372192383, 0.03220182418823242, 0.03201526260375977, 0.032089569091796874, 0.03192681694030762, 0.032075775146484374, 0.03198108863830566, 0.03204143905639648, 0.03189145660400391, 0.03195289611816406, 0.031938304901123045, 0.03227027130126953, 0.03233824157714844, 0.03213475036621094, 0.03254902267456055, 0.03214281463623047, 0.032266719818115235, 0.03271648025512695, 0.0319105281829834, 0.03168636894226074, 0.032024864196777345, 0.03171516799926758, 0.03172502326965332, 0.03455648040771484, 0.03164172744750977, 0.03150592041015625, 0.03144755172729492, 0.03141433525085449, 0.031606271743774415, 0.03143926429748535, 0.031333536148071287, 0.03124518394470215, 0.03125043106079101, 0.031285247802734374, 0.031172607421875, 0.031160320281982422, 0.031235424041748047, 0.03117942428588867, 0.031098079681396485, 0.031275808334350584, 0.03120672035217285, 0.032249057769775394, 0.03173046493530273, 0.033478656768798826, 0.03306713485717774, 0.032021793365478515, 0.03211324691772461, 0.032075294494628905, 0.032233951568603515, 0.031991615295410156, 0.03211280059814453, 0.0327916145324707, 0.03319087982177735, 0.03286566543579102, 0.032860801696777346, 0.03279888153076172, 0.033193824768066406, 0.03305849456787109, 0.03306937789916992, 0.03293097686767578, 0.032922462463378904, 0.032685630798339846, 0.03284832000732422, 0.032839359283447264, 0.03271427154541016, 0.03277699279785156, 0.03256934356689453, 0.03225955200195312, 0.032354846954345706, 0.03238092803955078, 0.032350208282470705, 0.03273849487304688, 0.03259270477294922, 0.03227238464355469, 0.03223961639404297, 0.032331424713134764, 0.03213516616821289, 0.031895904541015624, 0.03196723175048828, 0.031940607070922854, 0.03225395202636719, 0.03188515281677246, 0.03193052864074707, 0.03215564727783203, 0.03189766311645508, 0.03169177627563476, 0.03172828865051269, 0.03223116683959961, 0.03207785415649414, 0.03226265716552734, 0.032546817779541014, 0.0324587516784668, 0.03240988922119141, 0.03236428833007812, 0.03204079818725586, 0.03202060699462891, 0.032, 0.03182547187805176, 0.031676864624023436, 0.03179315185546875, 0.03174720001220703, 0.03215244674682617, 0.03196518325805664, 0.03213011169433594, 0.03258572769165039, 0.03180339241027832, 0.032150848388671875, 0.031820640563964844, 0.03170697593688965, 0.03252975845336914, 0.0314989128112793, 0.03135238456726074, 0.03133612823486328, 0.031875295639038084, 0.031430879592895505, 0.03141257667541504, 0.03130364799499512, 0.031209056854248046, 0.03132246398925781, 0.03139705657958984, 0.0313250560760498, 0.031285247802734374, 0.031196352005004882, 0.03146630477905273, 0.031312000274658205, 0.03133609580993652, 0.03129990386962891, 0.031424415588378905, 0.0318047046661377, 0.031990495681762696, 0.03131936073303223, 0.031414592742919925, 0.03157564735412598, 0.03177961540222168, 0.031733760833740236, 0.032882591247558594, 0.031461471557617186, 0.03162704086303711, 0.03191993522644043, 0.031730112075805665, 0.03154649543762207, 0.03183497619628906, 0.03168649673461914, 0.03153868865966797, 0.03139347267150879, 0.03155814361572266, 0.03133657646179199, 0.03137161636352539, 0.031356927871704104, 0.03146678352355957, 0.0312959041595459, 0.03139001655578613, 0.031334592819213865, 0.03134444808959961, 0.03129958343505859, 0.03153919982910156, 0.03155670356750488, 0.03141519927978516, 0.03171257591247559, 0.031864799499511716, 0.03230972671508789, 0.03199923133850097, 0.03179827117919922, 0.03187881660461426, 0.03248998260498047, 0.03298643112182617, 0.03284134292602539, 0.03349094390869141, 0.0336385612487793, 0.0348383674621582, 0.032734878540039064, 0.03324143981933594, 0.032849918365478514, 0.03273932647705078, 0.03253238296508789, 0.03248137664794922, 0.03238620758056641, 0.03208179092407227, 0.03201696014404297, 0.0320035514831543, 0.03199596786499023, 0.03202883148193359, 0.03202089691162109, 0.0318856315612793, 0.03199375915527344, 0.03237472152709961, 0.03214966583251953, 0.03224515151977539, 0.032242271423339845, 0.032034847259521486, 0.03204211044311524, 0.032228160858154296, 0.03215977478027344, 0.032188480377197265, 0.03219001770019531, 0.032409568786621094, 0.03225846481323242, 0.032307422637939454, 0.032133983612060546, 0.03236140823364258, 0.032457855224609374, 0.03299212646484375, 0.033605728149414066, 0.03305052947998047, 0.03294617462158203, 0.033116161346435545, 0.03308707046508789, 0.033071262359619144, 0.03301545715332031, 0.03318960189819336, 0.032856960296630856, 0.032702465057373044, 0.03246627044677734, 0.03251788711547852, 0.03257993698120117, 0.03279679870605469, 0.03230559921264648, 0.0324128303527832, 0.0323072624206543, 0.03245558547973633, 0.03210432052612305, 0.032299007415771484, 0.032030433654785154, 0.032319774627685545, 0.03209532928466797, 0.03245318222045898, 0.03220896148681641, 0.03214713668823242, 0.03242812728881836, 0.032027137756347655, 0.032925376892089846, 0.032501953125, 0.03245888137817383, 0.032893150329589844, 0.03286608123779297, 0.03322489547729492, 0.03302169418334961, 0.032970207214355465, 0.03289968109130859, 0.032845054626464844, 0.032965377807617186, 0.03313049697875976, 0.03306905746459961, 0.032804256439208986, 0.03278473663330078, 0.03274892807006836, 0.03571392059326172, 0.03245657730102539, 0.03248735809326172, 0.032491710662841795, 0.03291484832763672, 0.03230073547363281, 0.03228121566772461, 0.03209040069580078, 0.03218431854248047, 0.03234323120117188, 0.03232169723510742, 0.03193718338012695, 0.03223273468017578, 0.032559776306152345, 0.03212249755859375, 0.03208867263793945, 0.032069023132324216, 0.03221871948242187, 0.032240161895751955, 0.03211500930786133, 0.03219660949707031, 0.03230515289306641, 0.03242803192138672, 0.03240262222290039, 0.03261667251586914, 0.032761505126953125, 0.03301625442504883, 0.03279705429077148, 0.03323065567016602, 0.033119617462158205, 0.03343561553955078, 0.03371721649169922, 0.03376278305053711, 0.03279216003417969, 0.03293695831298828, 0.032898303985595706, 0.03270931243896484, 0.03262828826904297, 0.03269385528564453, 0.0323199691772461, 0.03234207916259765, 0.032532798767089845, 0.03255849456787109, 0.03235692977905273, 0.03214697647094727, 0.032119102478027343, 0.03214355087280273, 0.03270041656494141, 0.032575199127197266, 0.03244867324829102, 0.032517822265625, 0.03259027099609375, 0.03246451187133789, 0.03274585723876953, 0.032110591888427735, 0.032763904571533206, 0.032974143981933594, 0.032647743225097656, 0.032655712127685546, 0.03359519958496094, 0.03296672058105469, 0.03716700744628906, 0.03295235061645508, 0.03251804733276367, 0.03250937652587891, 0.03237337493896485, 0.03228633499145508, 0.03240703964233398, 0.032674686431884765, 0.03213452911376953, 0.032401729583740234, 0.03196960067749023, 0.03195849609375, 0.03204150390625, 0.032001121520996094, 0.03196998405456543, 0.03200227355957031, 0.032183521270751955, 0.03186979293823242, 0.03171116828918457, 0.03166316795349121, 0.031976383209228514, 0.03178428840637207, 0.031527584075927734, 0.03157516860961914, 0.03647100830078125, 0.0319965763092041, 0.032159744262695314, 0.031649791717529296, 0.03160883140563965, 0.03134886360168457, 0.031228832244873047, 0.03120249557495117, 0.03132985687255859, 0.03171145629882813, 0.03141632080078125, 0.03136316871643066, 0.03149305534362793, 0.03154633522033692, 0.03191926383972168, 0.03140201568603516, 0.031417152404785154, 0.03141158485412598, 0.03179583930969238, 0.03138764762878418, 0.031389696121215824, 0.03131155204772949, 0.031291711807250974, 0.03133030319213867, 0.03157113647460937, 0.032783454895019534, 0.032078750610351564, 0.031966527938842776, 0.03178313636779785, 0.031666656494140626, 0.03161235237121582, 0.03173411178588867, 0.031516895294189454, 0.031719423294067385, 0.03164377593994141, 0.03146329689025879, 0.031508480072021484, 0.03145657539367676, 0.031289920806884766, 0.03132428741455078, 0.03143680000305176, 0.031662080764770506, 0.031903392791748045, 0.031617151260375974, 0.031531232833862305, 0.03141632080078125, 0.031933696746826175, 0.031476287841796874, 0.031547679901123046, 0.031538816452026365, 0.031510143280029296, 0.031615648269653324, 0.03154944038391113, 0.03165388870239258, 0.03181785583496094, 0.031846271514892575, 0.0318791675567627, 0.03209785461425781, 0.032245857238769535, 0.03204950332641601, 0.03220275115966797, 0.031788063049316403, 0.031984607696533204, 0.031969087600708004, 0.031827648162841796, 0.03178937530517578, 0.03144927978515625, 0.03225619125366211, 0.03174579238891601, 0.03163347244262695, 0.03169619178771973, 0.03154604721069336, 0.032474494934082034, 0.03142720031738281, 0.03142476844787598, 0.03140787124633789, 0.031473663330078124, 0.031428352355957034, 0.03133875274658203, 0.031406080245971676, 0.03136463928222656, 0.031568159103393556, 0.03131616020202637, 0.0313447036743164, 0.031365055084228516, 0.031275007247924806, 0.03134464073181152, 0.031154144287109376, 0.03265241622924805, 0.03211465454101563, 0.0321278076171875, 0.03207798385620117, 0.032034366607666016, 0.03198921585083008, 0.03205564880371094, 0.03186342430114746, 0.03194393539428711, 0.03197004890441894, 0.03221913528442383, 0.03199494361877441, 0.03202944183349609, 0.031983200073242186, 0.03208867263793945, 0.031987712860107424, 0.03192444801330566, 0.03201520156860352, 0.031893535614013674, 0.03193065643310547, 0.0321030387878418, 0.032062721252441403, 0.03197222328186035, 0.032235393524169924, 0.03191398429870605, 0.032169120788574215, 0.03199084854125977, 0.03193984031677246, 0.03202412796020508, 0.03212550354003906, 0.03209667205810547, 0.03202867126464844, 0.03210380935668945, 0.0321952018737793, 0.03211836624145508, 0.032221599578857424, 0.03227443313598633, 0.031936511993408204, 0.03201433563232422, 0.03192831993103027, 0.03187507247924805, 0.03208556747436524, 0.03194444847106934, 0.03178566360473633, 0.03165526390075683, 0.031721311569213866, 0.031779647827148434, 0.031909887313842776, 0.03179481506347656, 0.03175168037414551, 0.031894399642944334, 0.031929759979248046, 0.03182243156433105, 0.03173356819152832, 0.03189369583129883, 0.031968704223632814, 0.03189961624145508, 0.03200889587402344, 0.03197532844543457, 0.03177574348449707, 0.03189836883544922, 0.032135425567626955, 0.03190169525146484, 0.03281856155395508, 0.032223838806152344, 0.031975360870361326, 0.031893535614013674, 0.032278591156005856, 0.03200972747802734, 0.03206345748901367, 0.031827999114990235, 0.03176236724853516, 0.03187564849853516, 0.03190732765197754, 0.031965152740478515, 0.03187334442138672, 0.031719648361206054, 0.03197500801086426, 0.03184444808959961, 0.03180550384521484, 0.031922367095947264, 0.032032833099365235, 0.03197542381286621, 0.03185641670227051, 0.0319531192779541, 0.03188435173034668, 0.032206783294677736, 0.03193955230712891, 0.031787040710449216, 0.031878751754760744, 0.031964927673339846, 0.03217299270629883, 0.0318317756652832, 0.03194684791564941, 0.031713184356689454, 0.03160268783569336, 0.031606496810913084, 0.03195139122009277, 0.031979263305664064, 0.032061695098876956, 0.032007614135742185, 0.032128734588623045, 0.03231769561767578, 0.03235219192504883, 0.03231391906738281, 0.032302944183349606, 0.032150848388671875, 0.032111297607421874, 0.0321003532409668, 0.03191398429870605, 0.03202463912963867, 0.032042945861816406, 0.032378078460693356, 0.03198646354675293, 0.03201436614990234, 0.03201839828491211, 0.0318948802947998, 0.03320899200439453, 0.03221299362182617, 0.03240697479248047, 0.03191184043884277, 0.031912607192993166, 0.03239491271972656, 0.031879520416259764, 0.03187507247924805, 0.03196928024291992, 0.03238889694213867, 0.031763168334960935, 0.031669567108154294, 0.031793855667114256, 0.03191398429870605, 0.03187302398681641, 0.031897567749023435, 0.03218191909790039, 0.03203071975708008, 0.03205363082885742, 0.032004096984863284, 0.03221702575683594, 0.031881120681762694, 0.031805280685424805, 0.031923967361450194, 0.03161727905273438, 0.031955039978027344, 0.03420931243896484, 0.03225468826293945, 0.03199731254577637, 0.03189145660400391, 0.031887552261352536, 0.03246080017089844, 0.03199158477783203, 0.03257609558105469, 0.03224579238891601, 0.032258174896240235, 0.032270175933837894, 0.032083553314208986, 0.03247468948364258, 0.03225462341308594, 0.03225155258178711, 0.03220233535766601, 0.03229731369018555, 0.03217839813232422, 0.03213715362548828, 0.03175628852844238, 0.0318243522644043, 0.03196742439270019, 0.03195884895324707, 0.03184230422973633, 0.03177471923828125, 0.03178700828552246, 0.0321223030090332, 0.03160940742492676, 0.03168198394775391, 0.03200380706787109, 0.03184931182861328, 0.0317255687713623, 0.03184025573730469, 0.03195084762573242, 0.03174188804626465, 0.03177068710327149, 0.032008094787597655, 0.037103710174560545, 0.032352256774902347, 0.03181977653503418, 0.03426883316040039, 0.032053600311279296, 0.031819583892822266, 0.03213740921020508, 0.031757535934448244, 0.0316014404296875]",tokens/s,31.15211793511846,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3590.53312,4609.409024,0.0,4206.886912,4070.564864,s,1,10.2306328125,10.2306328125,0.0,10.2306328125,10.2306328125,10.2306328125,10.2306328125,[10.2306328125],,kWh,8.822761569999784e-05,9.724524280805748e-06,2.802502241999738e-05,0.00012597716240080097,,MB,3616.272384,4783.47264,0.0,4366.270464,4197.065728,s,10,1.6875668029785156,0.16875668029785157,0.00043956967470854794,0.16892181396484374,0.1691445556640625,0.16925013427734376,0.16933459716796875,"[0.16816026306152343, 0.1681755828857422, 0.16910429382324219, 0.16912109375, 0.16808642578125, 0.16882330322265626, 0.1690622100830078, 0.16902032470703124, 0.1686575927734375, 0.169355712890625]",tokens/s,1516.9769845446474,kWh,4.967155582626527e-06,5.477859752861424e-07,3.297309605830414e-06,8.812251163743085e-06,tokens/kWh,29050465.680469967,MB,3620.2496,4791.861248,0.0,4374.659072,4197.068288,s,10,17.10925341796875,1.7109253417968748,0.016390697756024867,1.7063275146484376,1.7390421508789062,1.742282733154297,1.7448751989746094,"[1.738322021484375, 1.7455233154296874, 1.7058992919921876, 1.7082659912109375, 1.6966365966796875, 1.7024412841796874, 1.6914332275390624, 1.70807666015625, 1.7065859375, 1.706069091796875]",tokens/s,36.82217947267947,kWh,4.937923422487194e-05,5.446276323082536e-06,3.286185821036891e-05,8.76873687583234e-05,tokens/kWh,718461.5172298684,,s,630,17.106712766647345,0.027153512328011647,0.0005691094259290827,0.026989168167114257,0.02773168888092041,0.027953979396820067,0.029107978229522713,"[0.028067743301391602, 0.027437152862548827, 0.027453088760375978, 0.02751113510131836, 0.027459840774536132, 0.027303680419921875, 0.02734489631652832, 0.027213823318481444, 0.02856959915161133, 0.027201824188232422, 0.02725449562072754, 0.027222015380859374, 0.02715238380432129, 0.027322368621826174, 0.027234304428100587, 0.027114784240722656, 0.02784124755859375, 0.02710937690734863, 0.027137184143066408, 0.02706489562988281, 0.027189535140991213, 0.027211551666259767, 0.027304384231567384, 0.02718492889404297, 0.027319904327392577, 0.02735526466369629, 0.027400480270385743, 0.027321887969970704, 0.027521503448486327, 0.027632640838623046, 0.027792383193969726, 0.02784992027282715, 0.02773062324523926, 0.02787504005432129, 0.02807811164855957, 0.02788803291320801, 0.027947328567504884, 0.027579904556274414, 0.02784614372253418, 0.027924671173095703, 0.027702751159667968, 0.027721887588500978, 0.02802364730834961, 0.027637760162353517, 0.027514591217041015, 0.027486495971679688, 0.02735270309448242, 0.027646080017089843, 0.027214080810546874, 0.030486528396606444, 0.027430400848388672, 0.028727392196655273, 0.02785055923461914, 0.02797123146057129, 0.02817715263366699, 0.027371776580810546, 0.02740790367126465, 0.02745779228210449, 0.027258975982666016, 0.027350784301757813, 0.02738617515563965, 0.02737766456604004, 0.02752457618713379, 0.02795827293395996, 0.027564064025878906, 0.027679136276245117, 0.02755846405029297, 0.027456863403320313, 0.028539039611816405, 0.02787993621826172, 0.027663648605346678, 0.027557600021362306, 0.028256832122802736, 0.027865535736083986, 0.027702304840087892, 0.02864022445678711, 0.02774537658691406, 0.02794998359680176, 0.027666271209716795, 0.02786524772644043, 0.02772377586364746, 0.027842464447021483, 0.027702848434448243, 0.028360319137573243, 0.02769603157043457, 0.027527168273925783, 0.028475040435791014, 0.027506719589233397, 0.027308351516723634, 0.027213823318481444, 0.02738505554199219, 0.027219743728637696, 0.02760188865661621, 0.027565919876098632, 0.027332799911499023, 0.027291103363037108, 0.027300384521484374, 0.027469823837280274, 0.027457536697387694, 0.027313247680664062, 0.02729257583618164, 0.027666431427001953, 0.027817983627319336, 0.027250688552856447, 0.027862239837646484, 0.02780659294128418, 0.02745881652832031, 0.027524864196777344, 0.027782047271728515, 0.027625375747680665, 0.02764329528808594, 0.027679040908813478, 0.027677055358886718, 0.027771135330200196, 0.02774127960205078, 0.028199583053588866, 0.027938816070556642, 0.027803071975708006, 0.027658559799194335, 0.028616352081298826, 0.027603551864624022, 0.027584447860717773, 0.02759891128540039, 0.027544736862182617, 0.027754432678222658, 0.027490591049194334, 0.02793606376647949, 0.02717964744567871, 0.02692243194580078, 0.02671392059326172, 0.02702207946777344, 0.027011072158813477, 0.02731007957458496, 0.026614816665649414, 0.026639328002929688, 0.027146240234375, 0.02674015998840332, 0.027859519958496094, 0.0298221435546875, 0.027078752517700196, 0.027199647903442384, 0.02696659278869629, 0.02678169631958008, 0.026984447479248046, 0.026918912887573244, 0.02680326461791992, 0.027015392303466796, 0.026780288696289064, 0.026583040237426758, 0.026714176177978517, 0.026882080078125, 0.027215871810913086, 0.0267509765625, 0.02689023971557617, 0.026693567276000977, 0.02693049621582031, 0.027185920715332032, 0.027303136825561524, 0.02673539161682129, 0.026849056243896486, 0.026966239929199217, 0.02711292839050293, 0.026937599182128905, 0.026994592666625978, 0.026979743957519533, 0.026691999435424805, 0.02723030471801758, 0.02690300750732422, 0.026836992263793946, 0.027165983200073244, 0.026716575622558594, 0.026959327697753905, 0.027171680450439453, 0.026657983779907225, 0.027310911178588866, 0.028728864669799806, 0.027435359954833986, 0.02727334403991699, 0.0269899845123291, 0.026927711486816407, 0.02716796875, 0.02715679931640625, 0.02699238395690918, 0.027589344024658204, 0.02692905616760254, 0.026970144271850585, 0.027127328872680663, 0.026687103271484373, 0.026857568740844728, 0.027261791229248048, 0.02680428886413574, 0.02693667221069336, 0.02672287940979004, 0.026730207443237303, 0.026860063552856445, 0.026808160781860352, 0.026648672103881835, 0.026800031661987304, 0.026881376266479493, 0.026833568572998047, 0.026723360061645506, 0.026722368240356446, 0.027109888076782225, 0.027023008346557617, 0.02678860855102539, 0.026785055160522462, 0.02686025619506836, 0.026677248001098632, 0.02656870460510254, 0.02777907180786133, 0.02690870475769043, 0.026939359664916993, 0.02693017578125, 0.02672492790222168, 0.026672672271728516, 0.02671504020690918, 0.02692300796508789, 0.02679376029968262, 0.02691913604736328, 0.0270696964263916, 0.026918752670288086, 0.02691561508178711, 0.0268985595703125, 0.02690457534790039, 0.026918912887573244, 0.026806272506713868, 0.027022911071777345, 0.02716694450378418, 0.027148191452026366, 0.02693766403198242, 0.02706345558166504, 0.027124576568603516, 0.02704159927368164, 0.02691904067993164, 0.0268472957611084, 0.026714111328125, 0.02692815971374512, 0.026627040863037108, 0.02653593635559082, 0.02680531120300293, 0.02810540771484375, 0.028221376419067384, 0.02694486427307129, 0.026929855346679688, 0.026876192092895507, 0.026674335479736327, 0.02708310317993164, 0.030218751907348632, 0.034176223754882815, 0.02728611183166504, 0.027084543228149415, 0.027269567489624023, 0.02795724868774414, 0.02698838424682617, 0.027140159606933594, 0.02676950454711914, 0.02691219139099121, 0.02692563247680664, 0.027387903213500975, 0.026976160049438477, 0.026969823837280273, 0.02707084846496582, 0.02703366470336914, 0.027074432373046874, 0.02720096015930176, 0.026991359710693358, 0.02694540786743164, 0.027043840408325196, 0.026846847534179687, 0.02687014389038086, 0.027064319610595702, 0.026798080444335938, 0.026861568450927735, 0.02698633575439453, 0.02686582374572754, 0.026953727722167968, 0.026840288162231444, 0.026810367584228514, 0.026804544448852538, 0.026963743209838867, 0.02687811279296875, 0.026702239990234376, 0.026662784576416014, 0.026693599700927734, 0.026756832122802734, 0.026841184616088868, 0.026884576797485352, 0.026849248886108398, 0.026779680252075194, 0.026646368026733397, 0.02669081687927246, 0.02668191909790039, 0.02698624038696289, 0.02712022399902344, 0.027119455337524415, 0.027074720382690428, 0.026656768798828126, 0.026776575088500978, 0.026971168518066406, 0.02666694450378418, 0.026708000183105467, 0.026979551315307618, 0.0272927360534668, 0.02695487976074219, 0.02725334358215332, 0.026806272506713868, 0.026641855239868163, 0.02685366439819336, 0.02704412841796875, 0.026648576736450196, 0.02688572883605957, 0.027021728515625, 0.02677894401550293, 0.02664134407043457, 0.027383199691772463, 0.02766102409362793, 0.02688198471069336, 0.02706211280822754, 0.02697420883178711, 0.026888256072998048, 0.026765247344970704, 0.02715398406982422, 0.026843839645385743, 0.026891456604003907, 0.02714886474609375, 0.026948671340942382, 0.02681884765625, 0.026856096267700195, 0.027112991333007812, 0.02693168067932129, 0.026648000717163087, 0.026619775772094727, 0.026747583389282226, 0.026798080444335938, 0.027074560165405274, 0.027024831771850586, 0.02727174377441406, 0.026752416610717773, 0.02692156791687012, 0.026578943252563478, 0.02673823928833008, 0.02693552017211914, 0.026968063354492186, 0.026869407653808595, 0.0276213436126709, 0.026976703643798828, 0.026925024032592774, 0.026855615615844725, 0.027006975173950197, 0.027072032928466796, 0.02697612762451172, 0.027208063125610353, 0.02730415916442871, 0.027303936004638672, 0.02737766456604004, 0.027207679748535156, 0.027098112106323242, 0.02726515197753906, 0.0273437442779541, 0.02706572723388672, 0.027035839080810548, 0.02695952033996582, 0.02677577590942383, 0.027050048828125, 0.026887840270996093, 0.026941951751708985, 0.02800383949279785, 0.027152959823608398, 0.026851615905761718, 0.02694963264465332, 0.026904159545898438, 0.026898847579956055, 0.027060224533081056, 0.027104927062988282, 0.02696598434448242, 0.027336864471435546, 0.026964256286621094, 0.026855072021484374, 0.02764851188659668, 0.026867776870727538, 0.027482112884521483, 0.026857088088989258, 0.026811904907226562, 0.026918848037719725, 0.027104511260986328, 0.02712054443359375, 0.027238752365112303, 0.02730361557006836, 0.027098880767822266, 0.02692403221130371, 0.02702275276184082, 0.027152992248535155, 0.026931455612182617, 0.02690835189819336, 0.027059616088867186, 0.026956159591674803, 0.02681270408630371, 0.026926143646240235, 0.02672230339050293, 0.026612672805786133, 0.026625503540039064, 0.02671027183532715, 0.026718496322631836, 0.02680953598022461, 0.026763999938964844, 0.027060575485229492, 0.027139839172363282, 0.02675302314758301, 0.02666649627685547, 0.026684127807617187, 0.026881792068481444, 0.02676652717590332, 0.026721120834350586, 0.02671615982055664, 0.026683616638183593, 0.026677024841308593, 0.02674278450012207, 0.026660863876342773, 0.02658515167236328, 0.02664147186279297, 0.026737279891967773, 0.0266693115234375, 0.02679952049255371, 0.026755392074584963, 0.026789215087890624, 0.026840095520019532, 0.026826656341552735, 0.026953727722167968, 0.026838144302368163, 0.02650611114501953, 0.02673811149597168, 0.026675775527954103, 0.026561824798583985, 0.026740480422973632, 0.026669343948364257, 0.026680000305175783, 0.026570112228393554, 0.026748640060424805, 0.026783775329589844, 0.026866559982299806, 0.026930240631103514, 0.02778726387023926, 0.027254783630371093, 0.02704159927368164, 0.027000192642211915, 0.02725766372680664, 0.027213823318481444, 0.02740777587890625, 0.02733695983886719, 0.02683535957336426, 0.026781152725219727, 0.02692095947265625, 0.02690505599975586, 0.02690015983581543, 0.02692969512939453, 0.0272936954498291, 0.0269453125, 0.026836992263793946, 0.027054079055786134, 0.026984512329101564, 0.026941375732421877, 0.026902528762817384, 0.026879999160766603, 0.02683839988708496, 0.02697279930114746, 0.026754751205444335, 0.027107648849487305, 0.026843135833740234, 0.026938528060913087, 0.02689833641052246, 0.02672662353515625, 0.027084735870361327, 0.026993440628051757, 0.026871807098388673, 0.027340799331665038, 0.026947200775146483, 0.02781808090209961, 0.02982326316833496, 0.027383199691772463, 0.02742947196960449, 0.02705116844177246, 0.02686227226257324, 0.02709724807739258, 0.027064287185668945, 0.026822687149047852, 0.02689638328552246, 0.027193536758422853, 0.02749625587463379, 0.02713599967956543, 0.026910240173339844, 0.0269289608001709, 0.02714691162109375, 0.026968063354492186, 0.02688204765319824, 0.027185152053833008, 0.02693734359741211, 0.02712166404724121, 0.02693129539489746, 0.027022911071777345, 0.02731862449645996, 0.0271646728515625, 0.02712166404724121, 0.027313375473022462, 0.027065120697021484, 0.02759916877746582, 0.027123712539672853, 0.027009023666381835, 0.028821504592895508, 0.028559648513793945, 0.0272523193359375, 0.0271648006439209, 0.02684716796875, 0.026763328552246092, 0.02681785583496094, 0.02666156768798828, 0.026728448867797853, 0.027281408309936524, 0.026744447708129882, 0.026824064254760742, 0.02683113670349121, 0.0266964168548584, 0.026676992416381835, 0.026733919143676756, 0.02708723258972168, 0.026925119400024414, 0.02674940872192383, 0.02660335922241211, 0.0266343994140625, 0.026726655960083008, 0.027102975845336913, 0.026850591659545897, 0.026962656021118164, 0.026841087341308592, 0.026740095138549805, 0.0268538875579834, 0.027131776809692382, 0.027091455459594727, 0.02724336051940918, 0.027321247100830077, 0.027553184509277344, 0.027031999588012695, 0.026989952087402343, 0.027692895889282226, 0.027353792190551757, 0.02723865509033203, 0.027392255783081056, 0.02738969612121582, 0.0269201602935791, 0.02711836814880371, 0.027088895797729492, 0.027045888900756834, 0.02918400001525879, 0.027441440582275392, 0.026824415206909178, 0.026918272018432617, 0.026907264709472658, 0.027225215911865233, 0.027054912567138673, 0.02683113670349121, 0.026791616439819334, 0.026879167556762694, 0.026766239166259767, 0.02697439956665039, 0.026763071060180665, 0.027077823638916015, 0.02687001609802246, 0.027007551193237306, 0.027812223434448242, 0.026888191223144533, 0.027033599853515625, 0.026682527542114257, 0.027091808319091797, 0.02774835205078125, 0.02892185592651367, 0.027096992492675782, 0.027137184143066408, 0.02693996810913086, 0.026958208084106445, 0.027181087493896486, 0.026943519592285158, 0.028493663787841798, 0.03038559913635254, 0.027097759246826173, 0.027307680130004883, 0.027064672470092775, 0.026843135833740234, 0.026570207595825197, 0.026761760711669923, 0.026894336700439454, 0.026853376388549805, 0.026779584884643555, 0.026923168182373048, 0.026643840789794922, 0.02678665542602539, 0.026817663192749024, 0.02673436737060547, 0.02674355125427246, 0.026755104064941405, 0.026797183990478514, 0.02696076774597168, 0.026892288208007813, 0.027006303787231446, 0.02693391990661621, 0.026806272506713868, 0.02677555274963379, 0.026963903427124025, 0.027149568557739256, 0.02669388771057129, 0.026616224288940428, 0.02696406364440918, 0.02743916893005371, 0.026938592910766602, 0.02693814468383789, 0.026887424468994142, 0.02684320068359375, 0.026872512817382812, 0.026834207534790037, 0.027476032257080077, 0.026659135818481446, 0.026915264129638673, 0.026816415786743163, 0.026755071640014647, 0.026630144119262695, 0.026712064743041993, 0.026795135498046876, 0.026868608474731444, 0.026949567794799803, 0.028175615310668947, 0.027181472778320313, 0.027694623947143556]",tokens/s,36.82764822171447,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 194347 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 196036 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 194926 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 195487 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 196649 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3587.117056,4609.409024,0.0,4206.886912,4070.564864,s,1,9.8327783203125,9.8327783203125,0.0,9.8327783203125,9.8327783203125,9.8327783203125,9.8327783203125,[9.8327783203125],,kWh,8.254613427084223e-05,9.095390638543101e-06,2.6063354184001353e-05,0.00011770487909338668,,MB,3522.904064,4783.47264,0.0,4366.270464,4197.065728,s,10,1.7560886688232424,0.17560886688232422,0.00048806288939002483,0.17572736358642577,0.1760205093383789,0.17602655868530273,0.1760313981628418,"[0.1758105926513672, 0.17567686462402343, 0.17546514892578124, 0.17554713439941405, 0.1760191650390625, 0.17424447631835938, 0.17577786254882813, 0.175880126953125, 0.1756346893310547, 0.17603260803222656]",tokens/s,1457.785159399417,kWh,5.1982663050438805e-06,5.730763723770448e-07,3.4529218656491555e-06,9.22426454307008e-06,tokens/kWh,27752890.087299727,MB,3527.102464,4793.9584,0.0,4376.756224,4197.068288,s,10,19.72680834960937,1.972680834960937,0.008595188301442907,1.9741355590820313,1.9812188842773437,1.983751837158203,1.9857781994628907,"[1.9763466796875, 1.96691796875, 1.96374267578125, 1.963957275390625, 1.9719244384765624, 1.9585361328125, 1.9800162353515625, 1.9784261474609375, 1.9862847900390626, 1.980656005859375]",tokens/s,31.93623564617209,kWh,5.7253962266207866e-05,6.3151852060677025e-06,3.747663865575024e-05,0.00010104578612802583,tokens/kWh,623479.7354159676,,s,630,19.72447334098815,0.03130868784283835,0.000569287045220454,0.0311867036819458,0.03171148090362549,0.03212369194030762,0.03275064445495606,"[0.03221913528442383, 0.03190982437133789, 0.031221824645996092, 0.031102560043334962, 0.031517087936401365, 0.031090463638305664, 0.03190521621704102, 0.03119388771057129, 0.03132617568969726, 0.03138972854614258, 0.033040382385253905, 0.03342950439453125, 0.031074304580688477, 0.03136288070678711, 0.031156415939331054, 0.03094105529785156, 0.03098428726196289, 0.030803295135498048, 0.030909120559692384, 0.030940736770629883, 0.031072223663330078, 0.031082975387573243, 0.030948352813720704, 0.03125760078430176, 0.03119923210144043, 0.03180953598022461, 0.03268991851806641, 0.03162044715881348, 0.0315230712890625, 0.031326879501342775, 0.03134873580932617, 0.031356927871704104, 0.031649791717529296, 0.031647296905517576, 0.03130822372436524, 0.031913183212280274, 0.031232799530029297, 0.03133171272277832, 0.0313242244720459, 0.03128892707824707, 0.03127190399169922, 0.031471616744995115, 0.031375455856323245, 0.031317184448242184, 0.031148767471313475, 0.031278783798217774, 0.03110864067077637, 0.03111510467529297, 0.031025920867919922, 0.031215808868408204, 0.031105024337768555, 0.03106800079345703, 0.031013023376464843, 0.03191193580627441, 0.03128639984130859, 0.031123935699462892, 0.031162784576416015, 0.031152128219604492, 0.031164415359497072, 0.031116416931152344, 0.031185792922973632, 0.031022111892700197, 0.031028192520141603, 0.031957664489746095, 0.0313239688873291, 0.030945215225219726, 0.031022783279418945, 0.03136140823364258, 0.031145984649658204, 0.031150272369384766, 0.031055679321289064, 0.031296831130981445, 0.031095392227172853, 0.030945119857788087, 0.031015167236328123, 0.031120672225952148, 0.03114863967895508, 0.031140159606933594, 0.031059776306152344, 0.03100262451171875, 0.031323583602905276, 0.031154239654541015, 0.031279615402221676, 0.03167027282714844, 0.031792255401611326, 0.031347583770751954, 0.031119359970092773, 0.03127401542663574, 0.03143097686767578, 0.031109695434570313, 0.03125872039794922, 0.03152019119262695, 0.03141184043884277, 0.031615936279296875, 0.03138086318969727, 0.031076255798339843, 0.031211967468261718, 0.03118320083618164, 0.031287040710449215, 0.03102124786376953, 0.031147167205810546, 0.031091392517089842, 0.03127312088012695, 0.030983488082885743, 0.03118355178833008, 0.031098623275756836, 0.030957536697387697, 0.030904544830322265, 0.03107436752319336, 0.031062015533447264, 0.03119923210144043, 0.030968063354492186, 0.030993663787841796, 0.031382015228271484, 0.031039424896240234, 0.03103545570373535, 0.031006719589233397, 0.03119923210144043, 0.031049728393554688, 0.031086463928222657, 0.03248550415039062, 0.031249568939208983, 0.031184864044189454, 0.03140470314025879, 0.03128044891357422, 0.031075231552124022, 0.03130716705322266, 0.03130185508728028, 0.031174144744873046, 0.031013248443603515, 0.03133225631713867, 0.03112201690673828, 0.031160320281982422, 0.030966976165771484, 0.031128095626831054, 0.031053760528564452, 0.031153663635253907, 0.031161184310913085, 0.031131647109985353, 0.031268512725830075, 0.031232128143310545, 0.031127775192260742, 0.03136460876464844, 0.03127552032470703, 0.03115007972717285, 0.031929567337036134, 0.03127724838256836, 0.03150704002380371, 0.03135452842712402, 0.0311845760345459, 0.03120172882080078, 0.0311441593170166, 0.031670015335083006, 0.03109280014038086, 0.03127631950378418, 0.031361631393432614, 0.03112739181518555, 0.03101033592224121, 0.031067071914672853, 0.03118489646911621, 0.031229471206665037, 0.031181280136108398, 0.030980096817016602, 0.031148031234741212, 0.031212671279907227, 0.03138348770141602, 0.031185855865478517, 0.031054943084716798, 0.03105580711364746, 0.03095635223388672, 0.03094495964050293, 0.031149696350097657, 0.03088044738769531, 0.031053375244140625, 0.03135548782348633, 0.03096348762512207, 0.030924863815307617, 0.031237567901611328, 0.031100896835327147, 0.031132415771484376, 0.03094121551513672, 0.031162336349487306, 0.031045791625976562, 0.030969696044921877, 0.03095961570739746, 0.03097395133972168, 0.03125785636901855, 0.030944095611572266, 0.031293567657470704, 0.031916032791137694, 0.031312000274658205, 0.03079360008239746, 0.030828447341918946, 0.030867359161376954, 0.0308287353515625, 0.031092735290527345, 0.030896127700805662, 0.031160320281982422, 0.03078758430480957, 0.030965503692626954, 0.03086892890930176, 0.03084707260131836, 0.03095542335510254, 0.03104031944274902, 0.03098419189453125, 0.03094528007507324, 0.03096780776977539, 0.03077939224243164, 0.031059455871582032, 0.03086128044128418, 0.031709728240966795, 0.031057024002075197, 0.03104857635498047, 0.03100876808166504, 0.030893280029296876, 0.030952543258666993, 0.031043264389038087, 0.031065151214599608, 0.031119295120239258, 0.031334463119506835, 0.03150534439086914, 0.031850112915039065, 0.031825855255126954, 0.031713727951049805, 0.03140812873840332, 0.03107200050354004, 0.031108896255493165, 0.03117103958129883, 0.0313624324798584, 0.03125516891479492, 0.03116851234436035, 0.03135468864440918, 0.031106752395629884, 0.030991903305053713, 0.030982847213745116, 0.032417247772216794, 0.031427391052246095, 0.031198976516723632, 0.031181055068969725, 0.03144892883300781, 0.031157983779907226, 0.031693248748779296, 0.031037439346313478, 0.031369216918945314, 0.031180448532104492, 0.031305471420288083, 0.030984256744384767, 0.03102729606628418, 0.0311157112121582, 0.031068159103393556, 0.03110246467590332, 0.031136255264282226, 0.03224991989135742, 0.031482431411743166, 0.031061920166015625, 0.03121331214904785, 0.03167062377929687, 0.031098047256469728, 0.031201215744018556, 0.030984991073608397, 0.03107174491882324, 0.032148128509521486, 0.03271001434326172, 0.031168928146362306, 0.031035327911376955, 0.03093731117248535, 0.031038816452026368, 0.03100271987915039, 0.032326465606689454, 0.03124835205078125, 0.03076483154296875, 0.03100262451171875, 0.03083673667907715, 0.030943231582641603, 0.03091391944885254, 0.030855615615844725, 0.031012128829956055, 0.030923679351806642, 0.030920703887939452, 0.03112540817260742, 0.03121776008605957, 0.031340543746948245, 0.03115167999267578, 0.03102083206176758, 0.030982048034667968, 0.03121011161804199, 0.03119651222229004, 0.03123468780517578, 0.031113599777221678, 0.031084320068359376, 0.031070207595825194, 0.03127705574035645, 0.031088960647583007, 0.030934719085693358, 0.03122585678100586, 0.031057920455932617, 0.03113315200805664, 0.03099292755126953, 0.03123747253417969, 0.03118921661376953, 0.03143071937561035, 0.031117055892944338, 0.03273110580444336, 0.03258796691894531, 0.031235679626464844, 0.030910400390625, 0.030978368759155273, 0.031019487380981446, 0.031061248779296877, 0.03455683135986328, 0.03147776031494141, 0.031645023345947265, 0.031217920303344728, 0.030995904922485353, 0.031032096862792968, 0.03113372802734375, 0.031070272445678712, 0.0308752326965332, 0.032726814270019534, 0.031300224304199216, 0.0310762882232666, 0.031088703155517577, 0.030930944442749023, 0.03125817680358887, 0.03114134407043457, 0.03120755195617676, 0.032494430541992185, 0.031334304809570314, 0.031051136016845702, 0.030988288879394532, 0.0309333438873291, 0.03092108726501465, 0.031057632446289063, 0.03080624008178711, 0.030904191970825196, 0.030917984008789062, 0.031171424865722656, 0.031106239318847657, 0.031208255767822265, 0.030826431274414062, 0.03088809585571289, 0.030877504348754883, 0.030846431732177736, 0.030933151245117186, 0.03104102325439453, 0.03102409553527832, 0.03099852752685547, 0.031113088607788084, 0.030975231170654295, 0.031060863494873046, 0.031033344268798828, 0.030760959625244142, 0.03092889595031738, 0.031121376037597657, 0.031114784240722657, 0.03105843162536621, 0.030900447845458985, 0.031280351638793946, 0.03100320053100586, 0.030842432022094725, 0.030845056533813475, 0.030861600875854493, 0.030992416381835936, 0.030873632431030272, 0.030805984497070314, 0.030840831756591795, 0.030913888931274416, 0.031709440231323244, 0.030822048187255858, 0.031146303176879882, 0.03117657661437988, 0.03121004867553711, 0.031131647109985353, 0.031264448165893556, 0.03103366470336914, 0.03102720069885254, 0.03118227195739746, 0.031142463684082033, 0.032273727416992186, 0.032078529357910154, 0.03172108840942383, 0.031648128509521485, 0.031797056198120115, 0.03162851142883301, 0.03149699211120605, 0.03131411170959473, 0.03182387161254883, 0.03139129638671875, 0.03211705780029297, 0.031294687271118164, 0.031204256057739257, 0.03139705657958984, 0.03147145652770996, 0.03134982490539551, 0.031194751739501952, 0.031702880859375, 0.03155398368835449, 0.03158732795715332, 0.031611904144287106, 0.03126067161560059, 0.031068159103393556, 0.03104275131225586, 0.031099359512329103, 0.031053279876708983, 0.03133523178100586, 0.03163059234619141, 0.03155027198791504, 0.03158124732971192, 0.031218624114990233, 0.03143251228332519, 0.031199424743652344, 0.03121766471862793, 0.03244607925415039, 0.03134502410888672, 0.03143680000305176, 0.03179110336303711, 0.03153248023986816, 0.031449024200439456, 0.03137395286560059, 0.030985855102539064, 0.031226240158081054, 0.0312073917388916, 0.03191811180114746, 0.031090591430664064, 0.030893632888793945, 0.03124278450012207, 0.030873600006103515, 0.03117670440673828, 0.03163340759277344, 0.03150787162780762, 0.03128934478759766, 0.031153856277465822, 0.030981023788452147, 0.03153875160217285, 0.031328224182128904, 0.031134208679199218, 0.031030624389648438, 0.031219327926635742, 0.031431072235107424, 0.03145379257202149, 0.03174195289611816, 0.03233516693115234, 0.03162563133239746, 0.03177824020385742, 0.031918336868286136, 0.03143449592590332, 0.031203744888305664, 0.031197343826293945, 0.031076351165771485, 0.03118489646911621, 0.031064064025878906, 0.03159449577331543, 0.03152892875671387, 0.031495935440063474, 0.03149427223205566, 0.03175030326843262, 0.031087776184082032, 0.03124412727355957, 0.03133935928344726, 0.031200864791870116, 0.031308351516723634, 0.03132352066040039, 0.03144563293457031, 0.03153715133666992, 0.03139548873901367, 0.0311627197265625, 0.031434080123901365, 0.03174796867370606, 0.03147174453735352, 0.03239798355102539, 0.0314654712677002, 0.03156991958618164, 0.03137926483154297, 0.03137964820861817, 0.03136240005493164, 0.03209897613525391, 0.03148534393310547, 0.031104864120483397, 0.031260448455810545, 0.03135382461547852, 0.031524864196777344, 0.03133030319213867, 0.03115827178955078, 0.031078527450561524, 0.031065120697021484, 0.030946144104003905, 0.031139839172363282, 0.03156582450866699, 0.031801343917846676, 0.031510431289672854, 0.03137955284118652, 0.03146137619018555, 0.031362560272216795, 0.031049823760986327, 0.030992416381835936, 0.0313014087677002, 0.031187551498413086, 0.031202335357666016, 0.031232992172241212, 0.03120240020751953, 0.031044511795043944, 0.031313888549804686, 0.0311562557220459, 0.03194470405578613, 0.03223551940917969, 0.0314238395690918, 0.03123062324523926, 0.03131596755981445, 0.031352832794189454, 0.031109151840209962, 0.031162336349487306, 0.03139993667602539, 0.03171123123168945, 0.03190169525146484, 0.0317255687713623, 0.031483488082885744, 0.03252470397949219, 0.0311943359375, 0.03157891273498535, 0.03133769607543945, 0.03137612724304199, 0.031659231185913086, 0.03167750358581543, 0.03160780715942383, 0.03123481559753418, 0.031152128219604492, 0.03156959915161133, 0.03245027160644531, 0.031174688339233397, 0.03102681541442871, 0.030951520919799805, 0.03247600173950195, 0.03329203033447266, 0.03144524765014649, 0.03233942413330078, 0.033737022399902346, 0.03132563209533691, 0.03147964859008789, 0.031289920806884766, 0.031152639389038086, 0.03125644874572754, 0.03122502326965332, 0.031189823150634767, 0.03142563247680664, 0.031236703872680665, 0.03154566383361816, 0.031102783203125, 0.031025344848632813, 0.03152435111999512, 0.031448768615722655, 0.03155958366394043, 0.032129119873046875, 0.031502944946289066, 0.031440256118774414, 0.03109974479675293, 0.03178700828552246, 0.031129600524902344, 0.03094528007507324, 0.031435935974121094, 0.03275862503051758, 0.031350400924682616, 0.03144947242736816, 0.03093708801269531, 0.03100467109680176, 0.03100876808166504, 0.03124553680419922, 0.031193952560424804, 0.031254528045654296, 0.031076351165771485, 0.030891904830932616, 0.031021184921264648, 0.031410175323486327, 0.031123455047607423, 0.031254528045654296, 0.03107583999633789, 0.0311014404296875, 0.03114188766479492, 0.031120864868164063, 0.03129151916503906, 0.031121088027954102, 0.031179071426391602, 0.03130774307250977, 0.031344512939453124, 0.031658687591552735, 0.03130764770507812, 0.031090303421020506, 0.03132659149169922, 0.031117311477661135, 0.03103539276123047, 0.03100057601928711, 0.031073535919189453, 0.031019168853759764, 0.03127152061462402, 0.031086591720581053, 0.03109174346923828, 0.03109161567687988, 0.031176767349243163, 0.031184608459472657, 0.031139999389648437, 0.03133760070800781, 0.031124576568603516, 0.041355358123779294, 0.03157507133483887, 0.03179395294189453, 0.03133440017700195, 0.031352832794189454, 0.03139574432373047, 0.03107148742675781, 0.031015071868896484, 0.03114633560180664, 0.031084896087646485, 0.03126006317138672, 0.031017183303833008, 0.031065792083740235, 0.03259462356567383, 0.03226828765869141, 0.03150777626037598, 0.03166636848449707, 0.03144345664978027, 0.0314979190826416, 0.03104185676574707, 0.0312684154510498, 0.032307361602783205, 0.03151260757446289, 0.03128096008300781, 0.031158720016479492, 0.031102975845336913, 0.03107142448425293, 0.03126534461975097, 0.031107328414916993]",tokens/s,31.940016298982112,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 197218 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3605.753856,4609.409024,0.0,4206.886912,4070.564864,s,1,10.78188671875,10.78188671875,0.0,10.78188671875,10.78188671875,10.78188671875,10.78188671875,[10.78188671875],,kWh,8.879367857496163e-05,9.787047555104585e-06,2.91508566540033e-05,0.0001277315827840695,,MB,3638.276096,4783.47264,0.0,4366.270464,4197.065728,s,10,1.6871604614257814,0.16871604614257812,0.00021903228489232804,0.16869321441650392,0.16901886749267578,0.16902762222290038,0.16903462600708008,"[0.16888467407226562, 0.16869769287109376, 0.1685198974609375, 0.16864649963378905, 0.1690169219970703, 0.16835987854003906, 0.16868873596191405, 0.169036376953125, 0.16845558166503907, 0.16885420227050782]",tokens/s,1517.3423385210212,kWh,4.970537296963622e-06,5.481584790924589e-07,3.306306788169493e-06,8.825002564225573e-06,tokens/kWh,29008490.154752146,MB,3644.35456,4791.861248,0.0,4374.659072,4197.068288,s,10,16.72774255371094,1.672774255371094,0.0037802323741648568,1.6717249145507813,1.6786344970703126,1.679328173828125,1.679883115234375,"[1.6719725341796876, 1.6751610107421875, 1.6800218505859374, 1.6784803466796876, 1.670573974609375, 1.671477294921875, 1.6685081787109375, 1.6732012939453125, 1.6686107177734375, 1.6697353515625]",tokens/s,37.66198564911789,kWh,6.492695028720242e-05,7.161333269113966e-06,4.051961716143078e-05,0.00011260790071774717,tokens/kWh,559463.4088589408,,s,630,16.72539849090574,0.026548251572866288,0.00038797980981304176,0.026458352088928223,0.02682104301452637,0.027065038108825684,0.02777667995452881,"[0.02725049591064453, 0.027042112350463866, 0.02682374382019043, 0.02647750473022461, 0.026679296493530274, 0.026455135345458985, 0.026573728561401368, 0.02652774429321289, 0.026639551162719727, 0.0266760311126709, 0.026551456451416017, 0.02662486457824707, 0.026517248153686522, 0.02655174446105957, 0.026444416046142578, 0.026302656173706054, 0.02639446449279785, 0.026384288787841798, 0.027058015823364256, 0.02634079933166504, 0.02640380859375, 0.02630860710144043, 0.02652351951599121, 0.026400896072387697, 0.02668339157104492, 0.026394079208374024, 0.026376672744750976, 0.02645408058166504, 0.026433536529541016, 0.02636595153808594, 0.02633113670349121, 0.026363168716430664, 0.026404863357543946, 0.026726591110229493, 0.02674652862548828, 0.02662416076660156, 0.02677014350891113, 0.027076608657836915, 0.02685055923461914, 0.026710208892822267, 0.026975807189941407, 0.026608640670776368, 0.02667519950866699, 0.026605247497558594, 0.026536256790161132, 0.0268372802734375, 0.026407743453979494, 0.02633763122558594, 0.026766944885253906, 0.0264116153717041, 0.026277503967285155, 0.026442495346069336, 0.026248287200927735, 0.02628700828552246, 0.026228736877441407, 0.026286079406738282, 0.026279647827148436, 0.026363616943359376, 0.026237152099609376, 0.026367359161376953, 0.02639779281616211, 0.026521215438842772, 0.026384639739990234, 0.026916383743286133, 0.02659584045410156, 0.027054079055786134, 0.02655561637878418, 0.026616031646728516, 0.026468191146850586, 0.026753952026367187, 0.026793888092041016, 0.026531455993652343, 0.027152671813964843, 0.026822656631469727, 0.028738784790039062, 0.02673539161682129, 0.02669113540649414, 0.026683008193969727, 0.026587743759155274, 0.026392799377441406, 0.02663360023498535, 0.026303104400634766, 0.026465951919555666, 0.02630019187927246, 0.026398784637451173, 0.02762713623046875, 0.02657574462890625, 0.026549375534057618, 0.026499967575073242, 0.026390527725219725, 0.026441087722778322, 0.026472192764282226, 0.026628320693969726, 0.026399391174316406, 0.02643667221069336, 0.026534719467163084, 0.02648896026611328, 0.026588640213012694, 0.026675039291381836, 0.026325408935546874, 0.026321184158325194, 0.02629840087890625, 0.026273696899414063, 0.026524768829345704, 0.026389791488647462, 0.026615488052368165, 0.026733919143676756, 0.02653046417236328, 0.026285600662231446, 0.027675104141235352, 0.0265314884185791, 0.026432895660400392, 0.026413503646850585, 0.02655695915222168, 0.026746112823486327, 0.026474496841430665, 0.026303232192993162, 0.026635744094848632, 0.026341920852661134, 0.026218496322631835, 0.026308223724365233, 0.02620863914489746, 0.026304000854492186, 0.026313119888305665, 0.026326400756835937, 0.026347391128540038, 0.02676940727233887, 0.026372095108032227, 0.026253376007080078, 0.026351808547973633, 0.026543039321899414, 0.026336063385009767, 0.02649212837219238, 0.02672105598449707, 0.026663999557495117, 0.02641196823120117, 0.026234367370605468, 0.026634239196777345, 0.026792448043823244, 0.026834943771362304, 0.026863616943359377, 0.027460672378540038, 0.027263872146606444, 0.02753094482421875, 0.02710960006713867, 0.026992544174194336, 0.026803936004638672, 0.026581535339355467, 0.026578943252563478, 0.02649497604370117, 0.026644479751586913, 0.026709728240966797, 0.02644406318664551, 0.026550432205200196, 0.026706111907958983, 0.026600479125976562, 0.02656934356689453, 0.0265482234954834, 0.0265482234954834, 0.026671104431152344, 0.026790111541748048, 0.026725696563720702, 0.026796512603759766, 0.027096607208251952, 0.026968544006347656, 0.02707865524291992, 0.027288896560668945, 0.027152736663818358, 0.02689468765258789, 0.026763071060180665, 0.026469919204711916, 0.02653990364074707, 0.02653878402709961, 0.026351423263549806, 0.026457632064819336, 0.026422975540161132, 0.026634624481201172, 0.02666556739807129, 0.0265948486328125, 0.026392704010009767, 0.02640118408203125, 0.026335168838500976, 0.026326528549194338, 0.02661427116394043, 0.026489952087402343, 0.026420127868652343, 0.02670159912109375, 0.026444000244140627, 0.02634752082824707, 0.027070783615112306, 0.026576448440551757, 0.02691116714477539, 0.026793407440185546, 0.02673516845703125, 0.026883583068847656, 0.02668185615539551, 0.026436800003051757, 0.026425920486450195, 0.02642076873779297, 0.026687488555908204, 0.026468927383422852, 0.026509471893310547, 0.026564607620239256, 0.02653183937072754, 0.026437631607055666, 0.02637004852294922, 0.02639664077758789, 0.026369823455810546, 0.027088415145874022, 0.02828156852722168, 0.026714111328125, 0.026583040237426758, 0.026486175537109375, 0.026475103378295898, 0.026382335662841795, 0.026608800888061522, 0.026501632690429686, 0.026547967910766603, 0.02647897529602051, 0.026443328857421875, 0.026466304779052735, 0.02650588798522949, 0.026574848175048828, 0.026426496505737303, 0.02645487976074219, 0.026398752212524416, 0.026390527725219725, 0.026457855224609375, 0.026548032760620118, 0.026452415466308592, 0.0264006404876709, 0.02653401565551758, 0.02637740707397461, 0.026387264251708984, 0.02673807907104492, 0.026834623336791992, 0.026745759963989257, 0.026537248611450195, 0.026499008178710936, 0.026622751235961913, 0.026730495452880858, 0.026631263732910155, 0.026642528533935547, 0.026490943908691406, 0.02659328079223633, 0.026452735900878908, 0.02778544044494629, 0.027629344940185548, 0.02675632095336914, 0.026744895935058594, 0.027070976257324218, 0.026505439758300782, 0.026906879425048828, 0.026449920654296875, 0.02660966491699219, 0.026343008041381837, 0.026365856170654296, 0.02646665573120117, 0.026481887817382813, 0.0264202880859375, 0.026547744750976564, 0.026423648834228517, 0.026396671295166017, 0.02662723159790039, 0.026493791580200196, 0.026406911849975585, 0.026390527725219725, 0.02636294364929199, 0.026556415557861326, 0.026826847076416017, 0.026408031463623048, 0.026547967910766603, 0.026372287750244142, 0.026563392639160157, 0.026336416244506836, 0.02658246421813965, 0.026366527557373048, 0.02643132781982422, 0.026374111175537108, 0.02634940719604492, 0.026323135375976563, 0.026381792068481444, 0.02628432083129883, 0.026271007537841798, 0.026327392578125, 0.026640512466430663, 0.026575040817260743, 0.026714431762695313, 0.02643667221069336, 0.026258495330810545, 0.0264006404876709, 0.026416191101074217, 0.026237632751464842, 0.026222848892211915, 0.026372095108032227, 0.02638643264770508, 0.026510751724243165, 0.02626959991455078, 0.026794048309326173, 0.026233247756958008, 0.02621401596069336, 0.02636390495300293, 0.026348127365112304, 0.026309823989868163, 0.026374975204467774, 0.026302463531494142, 0.02630854415893555, 0.02625472068786621, 0.029479040145874023, 0.026794559478759767, 0.0266561279296875, 0.026489471435546873, 0.02628812789916992, 0.028020383834838868, 0.026644832611083986, 0.02675654411315918, 0.026425920486450195, 0.027276384353637696, 0.026538911819458007, 0.026543519973754884, 0.026810335159301757, 0.026472415924072266, 0.02646428871154785, 0.026358400344848633, 0.02638982391357422, 0.026374847412109374, 0.026326431274414062, 0.026413663864135743, 0.026404863357543946, 0.02653539276123047, 0.026339872360229492, 0.02636329650878906, 0.026382944107055665, 0.026386528015136718, 0.0262259521484375, 0.02633996772766113, 0.026300064086914064, 0.026767007827758788, 0.026390975952148437, 0.026535968780517578, 0.026450143814086915, 0.02666867256164551, 0.026745216369628906, 0.026583263397216797, 0.026799455642700195, 0.026618303298950194, 0.026719648361206053, 0.026595935821533204, 0.026679168701171874, 0.026664703369140626, 0.02679007911682129, 0.026519744873046876, 0.026558464050292968, 0.026687488555908204, 0.026572799682617186, 0.026572351455688478, 0.02648080062866211, 0.02753913688659668, 0.02665260887145996, 0.026639007568359376, 0.02639651107788086, 0.026478015899658203, 0.026301023483276367, 0.026437759399414062, 0.026404863357543946, 0.026464256286621093, 0.02672435188293457, 0.02650111961364746, 0.026324960708618166, 0.02643903923034668, 0.026295040130615236, 0.026388383865356444, 0.026416479110717775, 0.02631497573852539, 0.026400320053100584, 0.02652249526977539, 0.026400768280029296, 0.02636566352844238, 0.027755231857299806, 0.0269005126953125, 0.026843135833740234, 0.026628000259399414, 0.02650441551208496, 0.027046783447265625, 0.026451967239379884, 0.026692863464355468, 0.02673126411437988, 0.026476543426513673, 0.026416927337646483, 0.026803871154785157, 0.026452543258666993, 0.02672377586364746, 0.026390752792358398, 0.02645145606994629, 0.026483552932739258, 0.02635366439819336, 0.026408319473266603, 0.026204479217529296, 0.026381759643554686, 0.026357791900634767, 0.027041824340820312, 0.02650809669494629, 0.026235904693603516, 0.026315711975097657, 0.026558528900146483, 0.02636390495300293, 0.026877952575683595, 0.026422752380371093, 0.026262048721313477, 0.02649087905883789, 0.026692928314208983, 0.02689299201965332, 0.02654003143310547, 0.026517248153686522, 0.02633344078063965, 0.026281951904296875, 0.026333152770996095, 0.02638649559020996, 0.026281984329223632, 0.02716828727722168, 0.02633366394042969, 0.026348831176757813, 0.026243520736694337, 0.02636828804016113, 0.026458368301391602, 0.026412704467773437, 0.026262720108032225, 0.02627235221862793, 0.026355520248413086, 0.026241535186767577, 0.026332704544067383, 0.02629680061340332, 0.026320863723754882, 0.026115968704223634, 0.026179744720458985, 0.026179584503173828, 0.02627993583679199, 0.02622572708129883, 0.026330047607421875, 0.026382335662841795, 0.026353151321411132, 0.02678374481201172, 0.026606687545776365, 0.026377119064331055, 0.026678752899169923, 0.026485279083251954, 0.026367776870727538, 0.026474016189575195, 0.026802719116210936, 0.026554527282714843, 0.026486431121826172, 0.026386783599853515, 0.026435583114624024, 0.026617855072021485, 0.026396352767944335, 0.02655824089050293, 0.026820863723754883, 0.03153497505187988, 0.02726697540283203, 0.02666547203063965, 0.02671615982055664, 0.026482240676879883, 0.026593536376953126, 0.026656639099121093, 0.026605472564697266, 0.02652150344848633, 0.026380191802978514, 0.02659564781188965, 0.026470687866210936, 0.02633910369873047, 0.026324512481689454, 0.026376895904541016, 0.026437471389770508, 0.026417312622070314, 0.02644112014770508, 0.026733152389526366, 0.02645984077453613, 0.02627190399169922, 0.02621251106262207, 0.02653593635559082, 0.02636390495300293, 0.026498048782348634, 0.02636288070678711, 0.026387903213500978, 0.02631923294067383, 0.026378015518188476, 0.026360223770141602, 0.026299840927124025, 0.02631532859802246, 0.026671104431152344, 0.026437631607055666, 0.026517248153686522, 0.02629964828491211, 0.026290496826171874, 0.026286783218383788, 0.026445823669433592, 0.026480640411376953, 0.02627903938293457, 0.02631091117858887, 0.02623347282409668, 0.026311744689941408, 0.02645075225830078, 0.026508480072021483, 0.026295232772827148, 0.026393407821655272, 0.026234912872314452, 0.02631884765625, 0.02647859191894531, 0.026257408142089843, 0.026306560516357422, 0.026531423568725586, 0.026244735717773436, 0.02637295913696289, 0.026281408309936524, 0.026384319305419922, 0.02617401695251465, 0.026244159698486327, 0.026389440536499022, 0.02624412727355957, 0.026276832580566407, 0.026133567810058593, 0.02628620719909668, 0.026422079086303712, 0.026298368453979492, 0.026715967178344728, 0.026971744537353515, 0.026328863143920897, 0.026307167053222655, 0.026435775756835936, 0.026381696701049805, 0.026221216201782225, 0.02623676872253418, 0.026461824417114258, 0.026284543991088868, 0.026419231414794922, 0.026517440795898437, 0.026719615936279296, 0.026936256408691406, 0.026942623138427733, 0.026853792190551756, 0.02664672088623047, 0.02655843162536621, 0.026556447982788087, 0.026471519470214845, 0.02663311958312988, 0.026394624710083008, 0.026453792572021486, 0.026491104125976564, 0.026458335876464845, 0.026674911499023436, 0.026492992401123048, 0.026341312408447264, 0.02643974494934082, 0.027052032470703126, 0.027674623489379883, 0.02668854331970215, 0.026545120239257813, 0.02647235107421875, 0.026445920944213868, 0.02639660835266113, 0.026460512161254883, 0.026523359298706056, 0.026441984176635742, 0.026474111557006835, 0.02648281669616699, 0.026681407928466797, 0.026410943984985353, 0.026591648101806642, 0.02637740707397461, 0.02658905601501465, 0.026295263290405272, 0.026333311080932616, 0.026444831848144532, 0.026778720855712892, 0.026638080596923828, 0.026640384674072266, 0.026797088623046875, 0.02640176010131836, 0.02673587226867676, 0.026712064743041993, 0.026555135726928712, 0.026443552017211915, 0.026398944854736327, 0.02630860710144043, 0.026289472579956053, 0.026968767166137695, 0.029946943283081055, 0.026415615081787108, 0.026307008743286134, 0.026263551712036134, 0.02631475257873535, 0.026363744735717773, 0.026251424789428712, 0.027470848083496095, 0.027150527954101562, 0.026354496002197265, 0.026259456634521484, 0.026310335159301756, 0.026337440490722657, 0.026339487075805666, 0.026384384155273437, 0.02633497619628906, 0.026767168045043945, 0.026360511779785156, 0.02615065574645996, 0.026449695587158203, 0.026433759689331055, 0.026351615905761717, 0.02675209617614746, 0.026350624084472658, 0.026226015090942384, 0.026229280471801758, 0.026251264572143555, 0.026451967239379884, 0.026482431411743165, 0.026208511352539064, 0.026200063705444337, 0.026392160415649416, 0.026218591690063478, 0.026282304763793944, 0.02633113670349121, 0.026290271759033205, 0.02610371208190918, 0.02631884765625, 0.026447296142578125, 0.02629075241088867, 0.026619647979736327, 0.026663135528564454, 0.02644691276550293, 0.02634220886230469]",tokens/s,37.66726397236844,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4913.123328,5693.636608,0.0,5593.104384,5582.857216,s,1,11.73065234375,11.73065234375,0.0,11.73065234375,11.73065234375,11.73065234375,11.73065234375,[11.73065234375],,kWh,0.0001190715114166854,1.3124586181180615e-05,3.6273640129980134e-05,0.00016846973772784613,,MB,1619.37408,6333.267968,0.0,5916.065792,5844.559872,s,10,2.0819514617919923,0.20819514617919924,0.0003264685847713937,0.20825191497802734,0.20862664794921876,0.20870465087890624,0.20876705322265626,"[0.20830857849121093, 0.2077802276611328, 0.2077529296875, 0.2083362579345703, 0.20878265380859376, 0.20826786804199218, 0.20801052856445312, 0.20786714172363283, 0.2082359619140625, 0.20860931396484375]",tokens/s,1229.615601987444,kWh,6.101645991146477e-06,6.729005191395409e-07,4.024916414374982e-06,1.0799462924661001e-05,tokens/kWh,23704882.528501846,MB,1627.738112,6333.267968,0.0,5916.065792,5844.562432,s,10,16.242586303710937,1.6242586303710937,0.0021678589128387355,1.6247357177734374,1.6272782348632813,1.6276063415527344,1.627868826904297,"[1.6247967529296874, 1.6206951904296876, 1.6249732666015626, 1.6215147705078126, 1.6247620849609374, 1.6247093505859376, 1.6279344482421876, 1.6227357177734374, 1.6232593994140625, 1.627205322265625]",tokens/s,38.78692643031019,kWh,4.728447959302075e-05,5.215234010473105e-06,3.156758428182575e-05,8.406729788531961e-05,tokens/kWh,749399.6070379405,,s,630,16.240188983917243,0.02577807775224958,0.00021093157398895606,0.025755743980407714,0.025952277755737305,0.02603838996887207,0.026429438152313234,"[0.02657535934448242, 0.026025983810424806, 0.025907039642333984, 0.025538272857666015, 0.025581663131713867, 0.025581567764282227, 0.025552959442138673, 0.025505311965942384, 0.025672256469726564, 0.026047807693481445, 0.02575654411315918, 0.025671680450439452, 0.02570649528503418, 0.02574336051940918, 0.025671680450439452, 0.025577472686767577, 0.025618431091308593, 0.025802751541137696, 0.025621503829956056, 0.025647680282592775, 0.025706527709960937, 0.02570403289794922, 0.025709375381469727, 0.025794559478759766, 0.025707775115966797, 0.025643264770507813, 0.02562713623046875, 0.02574473571777344, 0.025631391525268554, 0.025590911865234375, 0.025723487854003906, 0.02568764877319336, 0.02564371109008789, 0.025810432434082032, 0.025816991806030275, 0.02584012794494629, 0.025832927703857422, 0.026, 0.026001407623291017, 0.026164543151855468, 0.025936159133911132, 0.025915807723999023, 0.025761791229248047, 0.025657344818115234, 0.025866239547729493, 0.025896095275878907, 0.02581999969482422, 0.025853952407836913, 0.025876319885253907, 0.02585763168334961, 0.02571731185913086, 0.025829376220703124, 0.02597478485107422, 0.025845760345458983, 0.025763328552246095, 0.025813024520874025, 0.025819616317749025, 0.025784320831298828, 0.025792512893676758, 0.02589286422729492, 0.0259051513671875, 0.025777887344360352, 0.0260098876953125, 0.0258404483795166, 0.025648351669311523, 0.025608991622924803, 0.025612224578857423, 0.025634880065917968, 0.025681760787963866, 0.025665695190429688, 0.02568806457519531, 0.025521184921264647, 0.025682912826538087, 0.025556991577148438, 0.02550553512573242, 0.025465087890625, 0.025576831817626954, 0.02559014320373535, 0.025520351409912108, 0.025530048370361328, 0.02568227195739746, 0.025619552612304686, 0.025654176712036132, 0.025642047882080077, 0.02566035270690918, 0.025647104263305662, 0.025603712081909178, 0.025757152557373045, 0.02568899154663086, 0.025663488388061522, 0.025622528076171876, 0.025673728942871094, 0.02574336051940918, 0.025686016082763673, 0.02569148826599121, 0.025684640884399413, 0.025638912200927736, 0.025593856811523437, 0.02578339195251465, 0.02582156753540039, 0.02576380729675293, 0.02557926368713379, 0.02589129638671875, 0.02589516830444336, 0.02578175926208496, 0.02578054428100586, 0.025917728424072264, 0.025851903915405275, 0.025732479095458984, 0.02583206367492676, 0.025951263427734374, 0.025883264541625976, 0.025734752655029298, 0.02568383979797363, 0.02573811149597168, 0.02570035171508789, 0.025839296340942383, 0.025923168182373047, 0.026105632781982423, 0.025695167541503906, 0.025765792846679687, 0.025864288330078124, 0.025917343139648438, 0.02587990379333496, 0.025911584854125976, 0.025926111221313476, 0.02609766387939453, 0.025695423126220703, 0.025579423904418946, 0.025600927352905273, 0.025646400451660157, 0.025581567764282227, 0.025467391967773437, 0.02560428810119629, 0.025610240936279297, 0.025683679580688477, 0.025485599517822265, 0.025675775527954102, 0.025742464065551758, 0.025666431427001955, 0.02569625663757324, 0.025750911712646485, 0.02581888008117676, 0.025684864044189452, 0.025773759841918945, 0.025843616485595702, 0.025788639068603517, 0.025651071548461916, 0.02572915267944336, 0.02575584030151367, 0.025624576568603515, 0.02573030471801758, 0.02598921585083008, 0.025852575302124023, 0.025772031784057618, 0.025771615982055664, 0.02583932876586914, 0.025693920135498045, 0.025643999099731446, 0.02570240020751953, 0.025675775527954102, 0.025638080596923827, 0.025748287200927734, 0.026029504776000977, 0.02577465629577637, 0.025769983291625977, 0.02571673583984375, 0.025829376220703124, 0.025784320831298828, 0.02575155258178711, 0.0258100471496582, 0.025709440231323242, 0.025687360763549806, 0.025814720153808594, 0.02592051124572754, 0.025870336532592773, 0.02589641571044922, 0.025850400924682618, 0.025837568283081053, 0.02574745559692383, 0.025849855422973633, 0.02584979248046875, 0.02702694320678711, 0.026288703918457033, 0.025874431610107423, 0.025871519088745118, 0.025813631057739258, 0.02595248031616211, 0.02608332824707031, 0.025851903915405275, 0.025669631958007814, 0.025622528076171876, 0.025955520629882812, 0.02545337677001953, 0.02556723213195801, 0.025495552062988282, 0.02549964714050293, 0.025595903396606445, 0.025628671646118165, 0.025532064437866212, 0.025559328079223634, 0.025542720794677735, 0.02562179183959961, 0.025680608749389648, 0.02553036880493164, 0.02552627182006836, 0.025569280624389647, 0.02558361625671387, 0.025653247833251954, 0.025776128768920898, 0.025595903396606445, 0.025538400650024416, 0.02565340805053711, 0.026099071502685547, 0.02641164779663086, 0.025606143951416017, 0.02577305603027344, 0.025834495544433594, 0.02568806457519531, 0.025757696151733397, 0.02573036766052246, 0.02571939277648926, 0.02558576011657715, 0.025677215576171874, 0.02564156723022461, 0.025632511138916014, 0.025690208435058592, 0.025850015640258787, 0.025761791229248047, 0.02564240074157715, 0.025698911666870116, 0.02567987251281738, 0.025747007369995117, 0.02564726448059082, 0.025686304092407228, 0.026436704635620117, 0.025977760314941405, 0.025632768630981444, 0.02571878433227539, 0.02582908821105957, 0.025734624862670898, 0.025778144836425782, 0.025860960006713868, 0.025798656463623046, 0.025650783538818358, 0.025891231536865233, 0.025997312545776367, 0.02600886344909668, 0.026057024002075196, 0.02599567985534668, 0.025892831802368163, 0.025770015716552734, 0.026079679489135744, 0.025602367401123045, 0.025659391403198242, 0.025650911331176758, 0.02567158317565918, 0.025557376861572265, 0.025833087921142577, 0.025747039794921874, 0.025602848052978515, 0.025675775527954102, 0.02571878433227539, 0.02573721694946289, 0.025600000381469725, 0.025618431091308593, 0.02569625663757324, 0.025665536880493164, 0.02557542419433594, 0.025571327209472656, 0.02569011116027832, 0.025624063491821288, 0.02553868865966797, 0.025920896530151366, 0.02574847984313965, 0.02570854377746582, 0.02567158317565918, 0.025953535079956055, 0.02577235221862793, 0.025655839920043947, 0.02569625663757324, 0.025753599166870117, 0.025649152755737304, 0.02563644790649414, 0.025798112869262695, 0.02586515235900879, 0.027840511322021484, 0.02552217674255371, 0.025652992248535157, 0.0256760311126709, 0.02589695930480957, 0.025968511581420897, 0.025993343353271484, 0.025818143844604492, 0.025877471923828124, 0.025827327728271485, 0.025849855422973633, 0.02571801567077637, 0.025907968521118162, 0.025859872817993165, 0.025772031784057618, 0.025829599380493163, 0.02586115264892578, 0.025842655181884767, 0.02568806457519531, 0.0257392635345459, 0.02591948890686035, 0.025860095977783205, 0.025820703506469728, 0.02583600044250488, 0.025835519790649415, 0.02573107147216797, 0.025757696151733397, 0.025841663360595703, 0.02585113525390625, 0.02586470413208008, 0.025589887619018554, 0.025510143280029297, 0.027620384216308594, 0.02549033546447754, 0.02553990364074707, 0.025686784744262694, 0.02583692741394043, 0.025719423294067383, 0.025538368225097655, 0.025513376235961914, 0.02569705581665039, 0.025628671646118165, 0.02553446388244629, 0.025606143951416017, 0.0257490234375, 0.02561257553100586, 0.025685951232910155, 0.025501951217651368, 0.025640544891357423, 0.025684383392333983, 0.025667583465576172, 0.02576348876953125, 0.02570070457458496, 0.025640512466430666, 0.025682367324829102, 0.02575974464416504, 0.02582246398925781, 0.025690080642700196, 0.025686016082763673, 0.025658143997192382, 0.025687456130981445, 0.02568662452697754, 0.025718080520629884, 0.0258853759765625, 0.025785600662231446, 0.02575436782836914, 0.025779647827148436, 0.025786943435668945, 0.025744672775268554, 0.025750240325927733, 0.02589411163330078, 0.02573801612854004, 0.027373504638671876, 0.02572470474243164, 0.025915679931640626, 0.02571059226989746, 0.025776128768920898, 0.025746816635131835, 0.025783935546875, 0.025826303482055665, 0.025853824615478516, 0.025790592193603516, 0.025753599166870117, 0.025900575637817384, 0.025801183700561524, 0.025879648208618163, 0.025805728912353516, 0.02589023971557617, 0.025797183990478516, 0.025786367416381836, 0.025839616775512695, 0.02595430374145508, 0.027101119995117186, 0.026248767852783204, 0.02562060737609863, 0.025612672805786132, 0.02555084800720215, 0.02550783920288086, 0.025544704437255858, 0.025772031784057618, 0.025873823165893553, 0.025837663650512696, 0.025604608535766602, 0.025612224578857423, 0.025746559143066405, 0.02576639938354492, 0.02585968017578125, 0.02554070472717285, 0.025993024826049805, 0.025743648529052733, 0.025805471420288086, 0.025752895355224608, 0.025647775650024414, 0.025681951522827148, 0.025785791397094728, 0.025767711639404296, 0.02583558464050293, 0.025732927322387696, 0.02588355255126953, 0.025898880004882812, 0.025700479507446288, 0.02566307258605957, 0.02573148727416992, 0.02569215965270996, 0.02558515167236328, 0.02559152030944824, 0.025774879455566405, 0.02575564765930176, 0.02570854377746582, 0.026009056091308595, 0.026014240264892578, 0.025976831436157227, 0.025915391921997072, 0.02589251136779785, 0.02578041648864746, 0.025669792175292968, 0.02590480041503906, 0.025910751342773437, 0.025872671127319335, 0.025911903381347655, 0.025833471298217774, 0.02586966323852539, 0.025788543701171875, 0.025905696868896485, 0.02594611167907715, 0.02590924835205078, 0.02586537551879883, 0.026139488220214845, 0.025894912719726562, 0.026036031723022462, 0.026072351455688477, 0.02609654426574707, 0.02585990333557129, 0.025903263092041016, 0.026195167541503906, 0.025903039932250977, 0.025817951202392577, 0.025618495941162108, 0.025572832107543946, 0.025463327407836914, 0.025472160339355468, 0.025486175537109374, 0.025544416427612304, 0.02560848045349121, 0.02570159912109375, 0.025543455123901368, 0.025508928298950195, 0.025562047958374023, 0.025581567764282227, 0.025587711334228515, 0.025601823806762694, 0.025577695846557617, 0.025671680450439452, 0.02559552001953125, 0.025784704208374025, 0.025826719284057616, 0.02594374465942383, 0.025860832214355468, 0.025786048889160158, 0.025809215545654296, 0.025660863876342775, 0.02586204719543457, 0.025909408569335938, 0.025753311157226563, 0.025870975494384767, 0.025888864517211913, 0.025841920852661134, 0.02569599914550781, 0.02587264060974121, 0.02573855972290039, 0.02570841598510742, 0.025682111740112305, 0.025858528137207033, 0.025933984756469727, 0.025903104782104492, 0.02585763168334961, 0.025967008590698244, 0.025870336532592773, 0.02571820831298828, 0.025899295806884767, 0.02575388717651367, 0.025724735260009766, 0.02569228744506836, 0.02583955192565918, 0.025817216873168944, 0.025819135665893556, 0.02573516845703125, 0.025855424880981446, 0.02578060722351074, 0.02579270362854004, 0.02574336051940918, 0.025780128479003905, 0.02571683120727539, 0.025892383575439454, 0.025921375274658202, 0.02602364730834961, 0.02584809684753418, 0.025846271514892577, 0.025934751510620118, 0.02571673583984375, 0.02578214454650879, 0.025766016006469727, 0.025513248443603517, 0.025487424850463868, 0.02566595268249512, 0.025616064071655273, 0.025495872497558594, 0.025708799362182618, 0.025656320571899413, 0.025697280883789062, 0.025595808029174806, 0.02582681655883789, 0.025930335998535156, 0.02569625663757324, 0.025765087127685545, 0.025727136611938477, 0.025637504577636718, 0.025610240936279297, 0.02560540771484375, 0.025795295715332033, 0.025699455261230467, 0.025822080612182618, 0.025874431610107423, 0.025736480712890624, 0.025692895889282228, 0.025772031784057618, 0.025753599166870117, 0.02582294464111328, 0.025788703918457032, 0.025757696151733397, 0.02575564765930176, 0.025694208145141603, 0.025776128768920898, 0.025957887649536132, 0.02575382423400879, 0.02563920021057129, 0.025784320831298828, 0.025794559478759766, 0.02574336051940918, 0.025777408599853516, 0.025813343048095704, 0.025784736633300782, 0.025667583465576172, 0.02575155258178711, 0.025772031784057618, 0.02567308807373047, 0.025660032272338866, 0.02573516845703125, 0.025792512893676758, 0.025808895111083984, 0.02586739158630371, 0.026088319778442382, 0.026028032302856444, 0.025665056228637694, 0.02576972770690918, 0.0259051513671875, 0.025758432388305663, 0.02589286422729492, 0.02608867263793945, 0.026030879974365234, 0.025835519790649415, 0.025973312377929686, 0.02555423927307129, 0.025674432754516602, 0.025544704437255858, 0.025638912200927736, 0.025575008392333985, 0.02561270332336426, 0.02571878433227539, 0.026175487518310548, 0.025620479583740235, 0.025597312927246093, 0.02571855926513672, 0.025619295120239256, 0.025663488388061522, 0.025660768508911132, 0.025621152877807616, 0.025616384506225585, 0.02573014450073242, 0.025838079452514647, 0.025804256439208983, 0.025625471115112305, 0.02578233528137207, 0.02571251106262207, 0.025706367492675783, 0.02564463996887207, 0.026008224487304686, 0.02637968063354492, 0.02579007911682129, 0.02577712059020996, 0.025875839233398437, 0.025686656951904297, 0.025675775527954102, 0.025839616775512695, 0.025826976776123046, 0.02589251136779785, 0.025916095733642577, 0.026040319442749024, 0.02582691192626953, 0.025843488693237306, 0.025931936264038086, 0.026060415267944337, 0.025752031326293945, 0.0258338565826416, 0.025949247360229494, 0.025887008666992187, 0.025782943725585938, 0.025878528594970703, 0.02588057518005371, 0.025898719787597658, 0.025918912887573243, 0.025864831924438475, 0.025878751754760742, 0.02579372787475586, 0.02606064033508301, 0.02611404800415039, 0.0260720329284668, 0.025952255249023438, 0.026156320571899414, 0.02600009536743164, 0.025772031784057618, 0.02597657585144043, 0.025932031631469725, 0.025812992095947264]",tokens/s,38.79265202048407,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2163.884032,2194.604032,0.0,1816.133632,1727.29344,s,1,9.0174072265625,9.0174072265625,0.0,9.0174072265625,9.0174072265625,9.0174072265625,9.0174072265625,[9.0174072265625],,kWh,5.976673873331038e-05,6.585365099496029e-06,1.910834862001165e-05,8.546045245281806e-05,,MB,2234.83904,2406.416384,0.0,1998.585856,1980.448768,s,10,3.2212420043945316,0.32212420043945317,0.00027432602111490955,0.3221717987060547,0.3224371398925781,0.32244046325683595,0.3224431219482422,"[0.32209222412109373, 0.32156170654296873, 0.32244378662109374, 0.3224364013671875, 0.3221385803222656, 0.32232028198242185, 0.3222109985351562, 0.32169442749023436, 0.32217874145507813, 0.32216485595703126]",tokens/s,794.7245182161284,kWh,9.410542486587303e-06,1.037575628221308e-06,6.269536265624508e-06,1.6717654380433117e-05,tokens/kWh,15313153.040155603,MB,2248.503296,2597.257216,0.0,2189.426688,2078.022144,s,10,177.882412109375,17.7882412109375,0.020592925819032642,17.790093749999997,17.8097537109375,17.81297841796875,17.81555818359375,"[17.74033203125, 17.77685546875, 17.772400390625, 17.789318359375, 17.816203125, 17.78546484375, 17.790869140625, 17.7986328125, 17.803298828125, 17.809037109375]",tokens/s,3.541665488618573,kWh,0.0005192048609513302,5.727199115571418e-05,0.0003450639392453764,0.0009215407913524209,tokens/kWh,68363.76706400963,,s,630,177.87812567138693,0.28234623122442337,0.0005002065340032206,0.2823446197509766,0.28293623046875,0.2831180969238281,0.2836704473876953,"[0.2817083740234375, 0.281108642578125, 0.28102655029296875, 0.28117919921875, 0.28116668701171876, 0.2813500671386719, 0.28130703735351564, 0.2817966003417969, 0.28123318481445314, 0.28118856811523435, 0.28105682373046875, 0.2815637817382812, 0.2812744445800781, 0.28089865112304685, 0.2817504272460937, 0.28136038208007813, 0.28095693969726565, 0.2816244812011719, 0.2815550537109375, 0.2813931579589844, 0.280936767578125, 0.2819615783691406, 0.2811295166015625, 0.2813604431152344, 0.2813358154296875, 0.28161227416992185, 0.2817576904296875, 0.2811202392578125, 0.2818810729980469, 0.28143618774414064, 0.2814886474609375, 0.2817134094238281, 0.2818887023925781, 0.2820078735351563, 0.28157867431640626, 0.2822633972167969, 0.28165399169921873, 0.28145050048828124, 0.28195550537109376, 0.2812294921875, 0.2821034851074219, 0.2815816345214844, 0.2820033264160156, 0.2816337890625, 0.28149554443359376, 0.28178369140625, 0.28162832641601565, 0.28182830810546877, 0.28160409545898435, 0.2818009033203125, 0.282184814453125, 0.2814942321777344, 0.28167578125, 0.2819154052734375, 0.28225762939453125, 0.2818082580566406, 0.2819129638671875, 0.2820143127441406, 0.2816607360839844, 0.28195724487304685, 0.28182733154296874, 0.28184173583984373, 0.2815572814941406, 0.2825162048339844, 0.2816000061035156, 0.28129278564453125, 0.28238134765625, 0.28212322998046874, 0.28228607177734377, 0.28375567626953124, 0.28194082641601564, 0.2814460754394531, 0.2823106994628906, 0.28207748413085937, 0.28189495849609375, 0.2820966491699219, 0.28307351684570314, 0.28216644287109377, 0.2815824279785156, 0.2825994262695313, 0.2821038208007812, 0.2816468200683594, 0.2814876403808594, 0.28208740234375, 0.2819433898925781, 0.28198684692382814, 0.28177877807617185, 0.2824645690917969, 0.2820888061523438, 0.28176986694335937, 0.2818728332519531, 0.282263916015625, 0.2813644714355469, 0.2818531494140625, 0.282159912109375, 0.28162213134765623, 0.2818133850097656, 0.281831298828125, 0.2822569580078125, 0.2817108154296875, 0.28234378051757814, 0.28229971313476565, 0.28194680786132814, 0.2821629638671875, 0.282089599609375, 0.28266885375976564, 0.2821143493652344, 0.28176177978515626, 0.28232809448242185, 0.28198574829101564, 0.282744140625, 0.2830360107421875, 0.2825934448242188, 0.2832596130371094, 0.28283056640625, 0.28232498168945314, 0.2819215393066406, 0.28268655395507813, 0.2822210693359375, 0.2829930419921875, 0.28259872436523437, 0.28216085815429687, 0.2821068420410156, 0.28219186401367186, 0.28174951171875, 0.28200244140625, 0.2818103637695312, 0.2818050537109375, 0.28194964599609373, 0.28164801025390623, 0.2816470947265625, 0.28212225341796876, 0.28157131958007814, 0.28144024658203126, 0.28197479248046875, 0.2823550415039062, 0.28141839599609375, 0.28154266357421875, 0.28250521850585936, 0.2820546569824219, 0.2807807922363281, 0.2821868591308594, 0.28201666259765623, 0.2817181396484375, 0.2816468200683594, 0.2822828063964844, 0.2825413818359375, 0.2814042053222656, 0.28236123657226564, 0.28229693603515627, 0.2812333984375, 0.28171878051757815, 0.2826264038085938, 0.2821457824707031, 0.281993896484375, 0.2820177307128906, 0.28194412231445315, 0.28198910522460935, 0.2825441284179688, 0.2827960205078125, 0.2821397399902344, 0.28224371337890625, 0.2820672302246094, 0.28260906982421874, 0.2822496643066406, 0.2821588745117187, 0.2823231811523437, 0.2821611328125, 0.28179608154296876, 0.2822761535644531, 0.2825519104003906, 0.28220291137695314, 0.28239599609375, 0.28195034790039064, 0.28220880126953124, 0.2816912536621094, 0.28253070068359376, 0.2816629943847656, 0.28212066650390627, 0.28200961303710936, 0.28256051635742185, 0.2823777770996094, 0.2829818420410156, 0.282862548828125, 0.28231884765625, 0.28236764526367186, 0.2824459228515625, 0.28264678955078126, 0.28200732421875, 0.2823489990234375, 0.28172946166992185, 0.2823695373535156, 0.28246728515625, 0.28236184692382815, 0.28178411865234376, 0.2822180480957031, 0.2826302185058594, 0.28216717529296875, 0.28230303955078123, 0.28263436889648436, 0.28219390869140626, 0.2823474426269531, 0.28219732666015623, 0.2821455383300781, 0.2820280456542969, 0.2820444030761719, 0.28189697265625, 0.28263177490234376, 0.282442138671875, 0.28246426391601565, 0.28231436157226564, 0.28231512451171875, 0.28233053588867185, 0.28185836791992186, 0.28206314086914064, 0.28238027954101563, 0.2822102966308594, 0.2821610412597656, 0.282501220703125, 0.2822912902832031, 0.2823437194824219, 0.28220660400390624, 0.28217788696289064, 0.28252175903320315, 0.2824208068847656, 0.28231231689453123, 0.28185836791992186, 0.28272256469726564, 0.28227789306640627, 0.2822451171875, 0.28217138671875, 0.28220416259765624, 0.28259686279296875, 0.28197296142578127, 0.28252093505859377, 0.2827130126953125, 0.28260107421875, 0.2826029663085938, 0.2827558898925781, 0.28268142700195314, 0.2825068359375, 0.28281671142578124, 0.2828082580566406, 0.2827901611328125, 0.2826642761230469, 0.28224517822265627, 0.28231536865234375, 0.28244378662109376, 0.2823638916015625, 0.28244992065429686, 0.2825603332519531, 0.283182373046875, 0.2824149169921875, 0.28214898681640627, 0.28238482666015624, 0.2825198974609375, 0.28207839965820314, 0.28243026733398435, 0.2831707458496094, 0.28252578735351563, 0.2823947448730469, 0.28252685546875, 0.2841689147949219, 0.28261346435546875, 0.2823191223144531, 0.28272845458984375, 0.28269329833984375, 0.28300521850585936, 0.28303466796875, 0.28245709228515625, 0.2824744873046875, 0.281973876953125, 0.28299066162109376, 0.28248556518554685, 0.2822279357910156, 0.2835259094238281, 0.2836643371582031, 0.2839423828125, 0.2836729431152344, 0.28421142578125, 0.2832015380859375, 0.2830908203125, 0.2829354248046875, 0.28259231567382814, 0.2824910278320312, 0.2826756591796875, 0.2829981689453125, 0.28238104248046875, 0.2826528625488281, 0.28226971435546877, 0.2829794006347656, 0.282104736328125, 0.28296807861328127, 0.2830824279785156, 0.28277996826171875, 0.28335104370117187, 0.28256869506835935, 0.2826506042480469, 0.2839163208007813, 0.2831787414550781, 0.2821790161132812, 0.282032958984375, 0.28291915893554687, 0.28312551879882814, 0.2829066162109375, 0.28294964599609373, 0.2832795104980469, 0.28237298583984377, 0.28226153564453127, 0.28324884033203124, 0.28287667846679687, 0.2823016052246094, 0.2827273254394531, 0.282625, 0.2823463134765625, 0.2826581726074219, 0.28176394653320314, 0.2821018981933594, 0.2824953918457031, 0.28187026977539065, 0.28230593872070314, 0.2822806091308594, 0.2824806518554687, 0.281499755859375, 0.2825419921875, 0.28195135498046875, 0.28178289794921874, 0.2821245422363281, 0.28252569580078124, 0.28158108520507813, 0.28236846923828124, 0.2817228698730469, 0.28231884765625, 0.28214019775390625, 0.2818748474121094, 0.2822943115234375, 0.28198092651367185, 0.2826136779785156, 0.28222476196289065, 0.2823065490722656, 0.2823987121582031, 0.2818655700683594, 0.28213723754882813, 0.2818925476074219, 0.2822043762207031, 0.2822302551269531, 0.28203631591796874, 0.28266143798828125, 0.2824294128417969, 0.2824390869140625, 0.28216995239257814, 0.2819297180175781, 0.28267111206054685, 0.2821983337402344, 0.2819841003417969, 0.28223959350585937, 0.28231475830078123, 0.28229632568359375, 0.28255587768554685, 0.28281704711914063, 0.28269158935546873, 0.2822287292480469, 0.2822830505371094, 0.2823976745605469, 0.28256997680664064, 0.2825523071289063, 0.2826650390625, 0.282540771484375, 0.282830810546875, 0.2821959533691406, 0.28281964111328123, 0.2822872314453125, 0.2828547668457031, 0.28284771728515623, 0.282723876953125, 0.28235415649414064, 0.28240869140625, 0.2825396728515625, 0.28260894775390627, 0.28239041137695314, 0.28224798583984373, 0.2827673645019531, 0.28229428100585935, 0.2820782775878906, 0.2822442626953125, 0.2819520263671875, 0.2820889892578125, 0.2825404968261719, 0.282499267578125, 0.28215277099609376, 0.28228512573242187, 0.28275830078125, 0.2826625671386719, 0.28218994140625, 0.28231884765625, 0.28244610595703123, 0.2819981079101562, 0.2818420715332031, 0.282692138671875, 0.282208251953125, 0.2823740844726563, 0.28222061157226563, 0.28262374877929686, 0.28240887451171875, 0.28204629516601565, 0.28255691528320315, 0.28304940795898437, 0.28197909545898436, 0.28234378051757814, 0.28224920654296876, 0.2820809326171875, 0.2819485168457031, 0.2823261413574219, 0.2826187438964844, 0.2821982116699219, 0.2824906005859375, 0.282846923828125, 0.2824277954101562, 0.2825990295410156, 0.2828082580566406, 0.28278585815429685, 0.2824396667480469, 0.28202023315429686, 0.2823386840820313, 0.28228607177734377, 0.282679931640625, 0.2820782775878906, 0.28205517578125, 0.2826039733886719, 0.2818924865722656, 0.28278207397460936, 0.2825441284179688, 0.2825146179199219, 0.2823456115722656, 0.28275888061523435, 0.28253692626953125, 0.2823302612304687, 0.2827496643066406, 0.2826689147949219, 0.2818419189453125, 0.2827179565429688, 0.2824683532714844, 0.28237203979492187, 0.2814505615234375, 0.28211163330078126, 0.2820980224609375, 0.2821591186523438, 0.28242718505859377, 0.28200775146484375, 0.2821791687011719, 0.28169052124023436, 0.2829434814453125, 0.282093017578125, 0.2827833862304687, 0.28231283569335935, 0.28199798583984376, 0.28288214111328125, 0.282334228515625, 0.2829158020019531, 0.28237109375, 0.28195120239257815, 0.28232235717773435, 0.28237677001953126, 0.28265472412109377, 0.2826524658203125, 0.282267333984375, 0.2826734619140625, 0.2824541015625, 0.283025390625, 0.28260562133789063, 0.28247674560546876, 0.2826320495605469, 0.28254443359375, 0.28234735107421877, 0.282380126953125, 0.28298239135742187, 0.282345458984375, 0.282208251953125, 0.2828554382324219, 0.2826026306152344, 0.28202203369140627, 0.2827025451660156, 0.28374346923828125, 0.28333273315429686, 0.28216339111328126, 0.28254568481445314, 0.28293011474609375, 0.2825252380371094, 0.2828234252929688, 0.282904296875, 0.2824818420410156, 0.2824889831542969, 0.28289688110351563, 0.2820088195800781, 0.28275912475585935, 0.28214938354492186, 0.283443603515625, 0.28240280151367186, 0.28236163330078123, 0.28312188720703124, 0.282489990234375, 0.282265625, 0.28280435180664065, 0.2829030151367187, 0.28249981689453124, 0.2827157897949219, 0.2818607788085937, 0.28287387084960935, 0.2825871276855469, 0.28198211669921874, 0.28234225463867185, 0.2826886901855469, 0.28228436279296876, 0.2823562316894531, 0.28245303344726563, 0.2820813293457031, 0.2823396911621094, 0.2829031066894531, 0.28270797729492186, 0.2821275024414063, 0.2832491455078125, 0.28295849609375, 0.28206051635742185, 0.2828975219726563, 0.282876708984375, 0.28224725341796875, 0.2821158447265625, 0.28251162719726564, 0.2826855163574219, 0.28227783203125, 0.28352947998046873, 0.28261312866210936, 0.28258547973632814, 0.28250323486328127, 0.28231478881835936, 0.283111328125, 0.28280416870117187, 0.282333251953125, 0.28235589599609373, 0.28247406005859377, 0.28249728393554685, 0.28256256103515626, 0.28277120971679687, 0.28252108764648437, 0.2828787231445313, 0.28241510009765625, 0.2828995666503906, 0.282104736328125, 0.2827202453613281, 0.283009033203125, 0.2821937561035156, 0.28226309204101563, 0.2824374694824219, 0.28251800537109373, 0.2822692260742187, 0.2821905517578125, 0.28261135864257814, 0.2824639892578125, 0.28300875854492186, 0.2826269226074219, 0.2827056579589844, 0.28254635620117186, 0.28302133178710936, 0.2829619445800781, 0.2832652587890625, 0.2830226135253906, 0.2830750732421875, 0.2827343139648438, 0.2820447082519531, 0.2826931762695313, 0.2829349365234375, 0.2825732421875, 0.2821565246582031, 0.28265933227539064, 0.28219830322265627, 0.2824922180175781, 0.2822982177734375, 0.28269049072265623, 0.2825780944824219, 0.28191806030273436, 0.2827262268066406, 0.28285516357421875, 0.282481201171875, 0.282574951171875, 0.28316058349609374, 0.2822366027832031, 0.28232736206054687, 0.2828328857421875, 0.28230859375, 0.28245196533203126, 0.28311346435546875, 0.2831810607910156, 0.2820157470703125, 0.28297354125976565, 0.2824956359863281, 0.28224920654296876, 0.28210519409179685, 0.28260140991210936, 0.28277188110351564, 0.2822618103027344, 0.2835672302246094, 0.28298330688476564, 0.2829537353515625, 0.28221600341796876, 0.2828394775390625, 0.2828790588378906, 0.2829869689941406, 0.28328958129882814, 0.2826731262207031, 0.28282318115234373, 0.28256979370117186, 0.28257949829101564, 0.28313018798828127, 0.28251962280273435, 0.28293533325195314, 0.283202880859375, 0.2822878723144531, 0.2829209594726563, 0.28301806640625, 0.2826587829589844, 0.282595458984375, 0.2827507629394531, 0.28313739013671874, 0.28288912963867185, 0.282435546875, 0.2828511657714844, 0.2827154235839844, 0.2824917907714844, 0.2834964599609375, 0.28251898193359376]",tokens/s,3.541750834298009,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3900, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 229, in convert_model self._replace_by_quant_layers(model, layers_to_be_replaced) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 299, in _replace_by_quant_layers self._replace_by_quant_layers(child, names, name + ""."" + name1 if name != """" else name1) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 299, in _replace_by_quant_layers self._replace_by_quant_layers(child, names, name + ""."" + name1 if name != """" else name1) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 299, in _replace_by_quant_layers self._replace_by_quant_layers(child, names, name + ""."" + name1 if name != """" else name1) [Previous line repeated 1 more time] File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 283, in _replace_by_quant_layers new_layer = QuantLinear( File ""/usr/local/lib/python3.10/dist-packages/auto_gptq/nn_modules/qlinear/qlinear_exllama.py"", line 72, in __init__ assert infeatures % self.group_size == 0 AssertionError ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4061.51168,4705.878016,0.0,4303.355904,4034.388992,s,1,10.851154296875,10.851154296875,0.0,10.851154296875,10.851154296875,10.851154296875,10.851154296875,[10.851154296875],,kWh,8.962794916250611e-05,9.879218764545068e-06,2.6595854609998137e-05,0.00012610302253704932,,MB,4153.89696,4764.598272,0.0,4347.396096,4202.697728,s,10,1.9841480102539066,0.19841480102539064,0.0007212306341661284,0.1981963195800781,0.19949085388183593,0.19952742462158202,0.1995566812133789,"[0.19767817687988282, 0.19777430725097656, 0.1978056640625, 0.19807148742675781, 0.19756182861328125, 0.1989150390625, 0.19948272705078124, 0.1989736328125, 0.19832115173339843, 0.19956399536132813]",tokens/s,1290.2263272548923,kWh,5.830067232843213e-06,6.429499077605997e-07,3.852720947098176e-06,1.0325738087701989e-05,tokens/kWh,24792416.563896526,MB,4159.434752,4764.598272,0.0,4347.396096,4213.085184,s,10,19.84817468261719,1.9848174682617183,0.007605910565328086,1.9852088012695313,1.9936192749023438,1.9955309509277344,1.997060291748047,"[1.9931944580078125, 1.997442626953125, 1.98733154296875, 1.9848724365234376, 1.9797503662109375, 1.9923458251953126, 1.985545166015625, 1.978965576171875, 1.97622119140625, 1.9725054931640624]",tokens/s,31.740954020912923,kWh,5.821699561090679e-05,6.421177482266385e-06,3.82867849867023e-05,0.00010292495807987546,tokens/kWh,612096.4358431753,,s,630,19.845419160842916,0.03150066533467126,0.00043678423054396804,0.0314195203781128,0.03182788162231445,0.03211954116821289,0.03288780010223389,"[0.03280384063720703, 0.03178992080688477, 0.03147158432006836, 0.031213760375976562, 0.0313240966796875, 0.0313528003692627, 0.03155551910400391, 0.031463008880615234, 0.03137167930603027, 0.03160063934326172, 0.031442943572998046, 0.032024574279785153, 0.03169635200500488, 0.031253023147583006, 0.0313253116607666, 0.031294336318969725, 0.031536991119384766, 0.031594112396240236, 0.031728160858154296, 0.03133030319213867, 0.031324159622192385, 0.03126825523376465, 0.031236703872680665, 0.031346080780029296, 0.03135734367370605, 0.03160902404785156, 0.031403072357177736, 0.031464384078979495, 0.03154537582397461, 0.031522783279418945, 0.031696895599365234, 0.03246080017089844, 0.031676416397094724, 0.032142913818359375, 0.03163180732727051, 0.031968767166137696, 0.03286435317993164, 0.0319389762878418, 0.031752191543579104, 0.031643648147583005, 0.0316866569519043, 0.03160006332397461, 0.032460384368896485, 0.031882207870483396, 0.03184745597839356, 0.03167740821838379, 0.031318016052246093, 0.03146751976013184, 0.03160249519348145, 0.03175443267822266, 0.031356927871704104, 0.031206815719604493, 0.031396703720092775, 0.03217728042602539, 0.03184294319152832, 0.031469152450561526, 0.03147923278808594, 0.031294431686401364, 0.03135660743713379, 0.03138937568664551, 0.03127155113220215, 0.032903167724609376, 0.03144684791564942, 0.032220863342285154, 0.03141212844848633, 0.031283296585083005, 0.03125017547607422, 0.031236543655395507, 0.0314714241027832, 0.03265740966796875, 0.031386688232421876, 0.03138656044006348, 0.03142451286315918, 0.03138355255126953, 0.031481151580810544, 0.031328960418701174, 0.03143475151062012, 0.03130982398986817, 0.03141152000427246, 0.03134943962097168, 0.031212831497192384, 0.03139862442016601, 0.031512575149536134, 0.03145113563537598, 0.03145929527282715, 0.03138115119934082, 0.03146159934997558, 0.031306175231933596, 0.03193382453918457, 0.031621376037597654, 0.03191990470886231, 0.036738399505615235, 0.031643903732299805, 0.03154198455810547, 0.03144489669799805, 0.03153315162658692, 0.03146054458618164, 0.031617536544799804, 0.03155795288085937, 0.03149190330505371, 0.03174828720092773, 0.03192812728881836, 0.03161721611022949, 0.03159040069580078, 0.03166790390014648, 0.0316265926361084, 0.0348702392578125, 0.03206060791015625, 0.03159875106811524, 0.0316782398223877, 0.03171171188354492, 0.031422496795654294, 0.0317894401550293, 0.03157811164855957, 0.03155692863464356, 0.03136172866821289, 0.03157196807861328, 0.03157196807861328, 0.031505504608154294, 0.031554624557495116, 0.031819616317749023, 0.031991615295410156, 0.03164771270751953, 0.0315578556060791, 0.0315865592956543, 0.03142982482910156, 0.03214745712280274, 0.03202275085449219, 0.03160451126098633, 0.031489696502685544, 0.03154569625854492, 0.031704383850097655, 0.03140473556518555, 0.03136454391479492, 0.03176300811767578, 0.031688192367553714, 0.03141859245300293, 0.03149648094177246, 0.03158451271057129, 0.03146051216125488, 0.031486656188964846, 0.03151430320739746, 0.03162339210510254, 0.0314204158782959, 0.031662080764770506, 0.03187439918518067, 0.03188707160949707, 0.031534015655517576, 0.03146688079833984, 0.03142691230773926, 0.03145552062988281, 0.031514656066894534, 0.03167142486572266, 0.03143356704711914, 0.03162691116333008, 0.03289737701416016, 0.031521951675415036, 0.0313844165802002, 0.03169215965270996, 0.031922815322875976, 0.03159040069580078, 0.03150844764709473, 0.03145116806030274, 0.03171331214904785, 0.03132995223999024, 0.03138211250305176, 0.03149590492248535, 0.03143475151062012, 0.0313403205871582, 0.031352832794189454, 0.03138991928100586, 0.03141644859313965, 0.03163532829284668, 0.03150345611572265, 0.03133670425415039, 0.03143779182434082, 0.03142624092102051, 0.031291391372680666, 0.03130486488342285, 0.031433696746826174, 0.03122777557373047, 0.03139520072937012, 0.031375999450683596, 0.0313816967010498, 0.03149148750305176, 0.03142492866516113, 0.03146137619018555, 0.03136905670166015, 0.031457439422607425, 0.03217916870117187, 0.03250790405273438, 0.03230515289306641, 0.031492095947265625, 0.031323360443115233, 0.0314550724029541, 0.031398847579956056, 0.03117670440673828, 0.0312295036315918, 0.031154624938964842, 0.03133440017700195, 0.031510528564453126, 0.031399232864379886, 0.031950624465942386, 0.03137772750854492, 0.031610687255859374, 0.03135164833068848, 0.03134444808959961, 0.03133465576171875, 0.03117862319946289, 0.031318016052246093, 0.031255935668945314, 0.031387744903564455, 0.03126927947998047, 0.03156595230102539, 0.032098239898681644, 0.032591808319091795, 0.031668352127075194, 0.03182387161254883, 0.031417951583862305, 0.03132675170898438, 0.03174937629699707, 0.03136166381835938, 0.03132985687255859, 0.031418527603149414, 0.03127680015563965, 0.03151081657409668, 0.031341856002807617, 0.0314418888092041, 0.03146137619018555, 0.031409568786621093, 0.031360671997070315, 0.03134969520568848, 0.031604736328125, 0.03141612815856933, 0.03162131118774414, 0.03139174461364746, 0.03162236785888672, 0.03145414352416992, 0.03128892707824707, 0.03136531257629394, 0.03131571197509766, 0.03127670478820801, 0.03141494369506836, 0.03133017539978027, 0.03136288070678711, 0.03146096038818359, 0.03160137557983399, 0.03136102485656738, 0.03143452835083008, 0.031980960845947266, 0.03133478355407715, 0.03159728050231934, 0.03209638214111328, 0.031523456573486326, 0.03174505615234375, 0.03144601631164551, 0.031244352340698243, 0.031466815948486326, 0.031320064544677735, 0.03215420913696289, 0.03138912010192871, 0.03152774429321289, 0.031362560272216795, 0.031182527542114258, 0.03123587226867676, 0.031232511520385742, 0.031439136505126954, 0.03122790336608887, 0.031297536849975584, 0.031369216918945314, 0.031226144790649415, 0.03138553619384766, 0.03125328063964844, 0.03141033554077149, 0.03131852722167969, 0.03172384071350098, 0.031395872116088866, 0.03155763244628906, 0.031434335708618165, 0.03128156852722168, 0.03132825660705566, 0.03141398429870605, 0.03133263969421387, 0.03125043106079101, 0.03112550354003906, 0.031319839477539066, 0.031959264755249024, 0.03149619293212891, 0.03152668762207031, 0.03146108818054199, 0.031671808242797854, 0.0316364803314209, 0.03152704048156738, 0.03145657539367676, 0.031422464370727536, 0.03146403121948242, 0.03128508758544922, 0.03139801597595215, 0.031268863677978515, 0.031422464370727536, 0.03139583969116211, 0.031345727920532224, 0.031198144912719727, 0.03137446403503418, 0.03128361511230469, 0.031153856277465822, 0.031070240020751955, 0.03161532783508301, 0.03137295913696289, 0.03149519920349121, 0.03134774398803711, 0.03135782432556152, 0.03155766487121582, 0.03148905563354492, 0.03141612815856933, 0.03212015914916992, 0.0317139835357666, 0.031735807418823245, 0.031406080245971676, 0.031803232192993164, 0.03146080017089844, 0.032758270263671875, 0.0315088005065918, 0.03136297607421875, 0.03127052879333496, 0.0312938232421875, 0.031252479553222655, 0.03161907196044922, 0.03133235168457031, 0.031320064544677735, 0.031229663848876953, 0.03138748741149902, 0.03146991920471191, 0.03553289413452149, 0.03179110336303711, 0.0313384952545166, 0.03125411224365234, 0.03130614471435547, 0.031231071472167967, 0.031402912139892575, 0.0313602237701416, 0.031353120803833005, 0.03126892852783203, 0.031558080673217775, 0.03159244728088379, 0.03132345581054687, 0.03144569587707519, 0.03140230369567871, 0.03162694358825684, 0.031403072357177736, 0.03368153762817383, 0.03270329666137695, 0.031409343719482424, 0.03156870460510254, 0.032118785858154295, 0.0314305591583252, 0.03152284812927246, 0.03150992012023926, 0.032148128509521486, 0.03146672058105469, 0.0316464958190918, 0.03168982315063477, 0.03151350402832031, 0.031336448669433595, 0.03136716842651367, 0.03131779289245606, 0.03170070457458496, 0.03128761672973633, 0.03128047943115234, 0.031453983306884765, 0.03128940773010254, 0.03145113563537598, 0.03166556739807129, 0.03152547264099121, 0.03152511978149414, 0.03138688087463379, 0.03144908714294434, 0.031384063720703126, 0.03243145751953125, 0.03147660827636719, 0.03137430381774902, 0.03136800003051758, 0.03146284866333008, 0.031318208694458005, 0.031462879180908204, 0.03165686416625976, 0.03140003204345703, 0.031420320510864255, 0.031682559967041016, 0.031736831665039066, 0.03140096092224121, 0.031866880416870115, 0.03160883140563965, 0.03149222373962402, 0.03145238494873047, 0.03139801597595215, 0.03163996887207031, 0.03146124839782715, 0.03149440002441406, 0.03149004745483398, 0.031629119873046875, 0.031497535705566404, 0.03134745597839356, 0.03156150436401367, 0.03158665657043457, 0.03184639930725098, 0.03161497688293457, 0.03170479965209961, 0.03175993537902832, 0.031556320190429685, 0.032200702667236326, 0.03152416038513184, 0.031480512619018554, 0.03180755233764648, 0.03140601539611816, 0.031428447723388674, 0.03134284782409668, 0.03174390411376953, 0.031426559448242186, 0.03140143966674805, 0.031434816360473634, 0.03131439971923828, 0.03134265518188477, 0.031516607284545896, 0.03157196807861328, 0.03139993667602539, 0.03141948890686035, 0.031523744583129884, 0.031180799484252928, 0.03143270492553711, 0.03122585678100586, 0.03128348731994629, 0.03134435272216797, 0.03148185539245606, 0.031254528045654296, 0.031280288696289064, 0.031325023651123045, 0.03155353546142578, 0.03141836738586426, 0.031678016662597654, 0.03130822372436524, 0.03201433563232422, 0.0315702075958252, 0.031184160232543945, 0.031209760665893555, 0.031241472244262696, 0.031144607543945314, 0.031159936904907228, 0.03128998374938965, 0.0315742073059082, 0.031224767684936525, 0.031159168243408204, 0.031119359970092773, 0.031148319244384767, 0.03179475212097168, 0.031131168365478516, 0.03112825584411621, 0.03108652877807617, 0.031334016799926756, 0.031374975204467775, 0.0312675838470459, 0.03141219139099121, 0.03127084732055664, 0.03166012763977051, 0.03136409568786621, 0.031644672393798826, 0.031419551849365235, 0.03142950439453125, 0.03143436813354492, 0.03124246406555176, 0.031379487991333006, 0.03140003204345703, 0.03130572891235352, 0.03130796813964844, 0.03157939147949219, 0.03155206489562988, 0.03133235168457031, 0.031245983123779297, 0.031229471206665037, 0.031222591400146483, 0.03138764762878418, 0.031307775497436525, 0.032481281280517575, 0.03204854583740235, 0.031514368057250976, 0.0313024959564209, 0.03125676727294922, 0.031436223983764645, 0.0312642879486084, 0.03130409622192383, 0.031289472579956054, 0.03119340705871582, 0.0311942081451416, 0.031271839141845705, 0.031929471969604495, 0.03141926383972168, 0.031455232620239255, 0.03168870353698731, 0.03149004745483398, 0.031336448669433595, 0.03150793647766113, 0.03155974388122559, 0.03150716781616211, 0.03194153594970703, 0.03286220932006836, 0.03167631912231445, 0.03194684791564941, 0.0329106559753418, 0.031587007522583005, 0.03180339241027832, 0.03153715133666992, 0.03149728012084961, 0.0313374080657959, 0.031239328384399415, 0.031156383514404296, 0.031619680404663085, 0.03126486396789551, 0.031088640213012695, 0.03180441665649414, 0.03142348861694336, 0.0316866569519043, 0.031338783264160154, 0.0314365119934082, 0.03146137619018555, 0.03135807991027832, 0.03133500862121582, 0.03182620811462402, 0.03119513511657715, 0.03129251289367676, 0.031269119262695315, 0.03135324859619141, 0.031383808135986326, 0.031410175323486327, 0.031204448699951173, 0.03106265640258789, 0.031271200180053714, 0.030932992935180665, 0.0312359676361084, 0.032102527618408205, 0.031162368774414063, 0.0311213436126709, 0.03098534393310547, 0.030989471435546874, 0.030891807556152343, 0.03130268859863281, 0.031207456588745117, 0.030913471221923828, 0.030867456436157226, 0.030885343551635743, 0.030956064224243164, 0.030797183990478514, 0.0320846061706543, 0.03136716842651367, 0.03116851234436035, 0.03177779197692871, 0.031470592498779294, 0.031164415359497072, 0.031244352340698243, 0.03156985664367676, 0.0311757755279541, 0.03117763137817383, 0.031082496643066407, 0.031115264892578126, 0.031198911666870117, 0.031180383682250977, 0.03114668846130371, 0.031041568756103515, 0.03230428695678711, 0.031337087631225585, 0.031249664306640626, 0.031116256713867186, 0.031141504287719727, 0.03140966415405273, 0.03095363235473633, 0.0310296630859375, 0.03104310417175293, 0.03116111946105957, 0.031512575149536134, 0.0309718074798584, 0.031191135406494142, 0.03159040069580078, 0.031047296524047852, 0.03141993522644043, 0.03153932762145996, 0.03121583938598633, 0.031227392196655275, 0.031161056518554688, 0.03162140846252441, 0.03126067161560059, 0.031270912170410156, 0.03110691261291504, 0.03121174430847168, 0.03129542350769043, 0.031147167205810546, 0.03129840087890625, 0.03114729690551758, 0.031148832321166994, 0.031236032485961913, 0.031148031234741212, 0.031073888778686522, 0.03101740837097168, 0.031102176666259765, 0.031132415771484376, 0.03135897636413574, 0.031119359970092773, 0.031064064025878906, 0.03118489646911621, 0.031053504943847655, 0.031185216903686523, 0.032323486328125, 0.031340639114379884, 0.03134259223937988, 0.03148547172546387, 0.03125945663452148, 0.03278950500488281, 0.03127356719970703, 0.03185168075561524, 0.03133123207092285, 0.031287296295166016, 0.031521087646484376, 0.03168560028076172, 0.031243200302124022, 0.03127008056640625, 0.031148096084594727, 0.03109328079223633, 0.0311060791015625, 0.03102102470397949, 0.03118796730041504, 0.031579967498779296, 0.031290655136108396]",tokens/s,31.745361228905484,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1382.547456,1321.074688,0.0,918.552576,869.835264,s,1,9.6635947265625,9.6635947265625,0.0,9.6635947265625,9.6635947265625,9.6635947265625,9.6635947265625,[9.6635947265625],,kWh,4.0581240495809346e-05,4.466697265961176e-06,1.222639866998998e-05,5.72743364317605e-05,,MB,1471.885312,1503.526912,0.0,1086.324736,1012.78208,s,10,0.48738784408569336,0.04873878440856934,0.00018162640028683766,0.04869055938720703,0.04880540885925293,0.04903854389190674,0.04922505191802978,"[0.049271678924560545, 0.04875360107421875, 0.04864102554321289, 0.04864227294921875, 0.04865903854370117, 0.04870409774780273, 0.048702816009521484, 0.04871299362182617, 0.04862201690673828, 0.04867830276489258]",tokens/s,5252.490457168432,kWh,1.4635423072083617e-06,1.6140104745987601e-07,9.726646670199513e-07,2.597608021688189e-06,tokens/kWh,98552205.66866946,MB,1503.895552,1505.624064,0.0,1088.421888,1018.797568,s,10,11.055209838867189,1.1055209838867186,0.00573730897436105,1.1055435180664062,1.1144205322265626,1.114564453125,1.11467958984375,"[1.1143885498046875, 1.09450439453125, 1.106480712890625, 1.1046063232421874, 1.0997322998046875, 1.1067354736328125, 1.1147083740234376, 1.1066658935546876, 1.10356884765625, 1.1038189697265626]",tokens/s,56.98670664622639,kWh,3.158769705612467e-05,3.483656065238036e-06,1.5174153805980751e-05,5.0245506927343436e-05,tokens/kWh,1253843.454920257,,s,630,11.050741989135734,0.017540860300215462,0.00032635013769225836,0.01750379180908203,0.017766604232788086,0.017905113697052,0.018771550579071046,"[0.01781350326538086, 0.01742207908630371, 0.01738163185119629, 0.017663999557495116, 0.017680383682250975, 0.017536640167236328, 0.017364927291870117, 0.01743712043762207, 0.017467296600341797, 0.01740604782104492, 0.017530879974365234, 0.01801625633239746, 0.017522687911987304, 0.017534400939941405, 0.017596736907958984, 0.017624319076538084, 0.017519392013549805, 0.017512319564819336, 0.017621343612670898, 0.01749510383605957, 0.017503040313720703, 0.017365119934082032, 0.01742848014831543, 0.01745305633544922, 0.017438655853271486, 0.017367103576660155, 0.017483776092529296, 0.017532928466796875, 0.01763532829284668, 0.01767340850830078, 0.017470272064208984, 0.01750454330444336, 0.01755660820007324, 0.01747769546508789, 0.01768704032897949, 0.017481760025024416, 0.01753606414794922, 0.017441951751708984, 0.017352479934692383, 0.017506591796875, 0.01748080062866211, 0.017517311096191406, 0.0175281925201416, 0.01752729606628418, 0.01770086479187012, 0.017575935363769533, 0.020912128448486327, 0.018785600662231446, 0.01775276756286621, 0.017881088256835938, 0.017915807723999023, 0.01760633659362793, 0.017630975723266603, 0.017820255279541015, 0.019214303970336914, 0.018630495071411134, 0.0178606071472168, 0.01761510467529297, 0.017571840286254883, 0.017708736419677733, 0.017653631210327148, 0.017541568756103517, 0.017467391967773437, 0.017929855346679686, 0.017643775939941406, 0.017709056854248048, 0.017376768112182618, 0.01733456039428711, 0.01738159942626953, 0.017227039337158204, 0.017174591064453126, 0.01717910385131836, 0.017768640518188477, 0.017287168502807617, 0.0173789119720459, 0.017287263870239256, 0.017096704483032226, 0.017096063613891602, 0.017091520309448244, 0.01721126365661621, 0.017075872421264647, 0.017064416885375976, 0.01721343994140625, 0.017326080322265625, 0.017344032287597656, 0.017306079864501955, 0.017262592315673828, 0.01741619110107422, 0.01725644874572754, 0.017403423309326174, 0.01732246398925781, 0.017313791275024415, 0.017290655136108397, 0.01713007926940918, 0.017969152450561524, 0.018737152099609376, 0.017553184509277345, 0.01741584014892578, 0.017416767120361328, 0.017263904571533203, 0.017408735275268556, 0.0174936637878418, 0.0173306884765625, 0.017383232116699218, 0.017399839401245117, 0.017475584030151366, 0.017254400253295898, 0.017450687408447265, 0.017245792388916017, 0.01731657600402832, 0.017305599212646485, 0.017515615463256837, 0.01748249626159668, 0.017420448303222657, 0.017364992141723632, 0.01722883224487305, 0.017214431762695312, 0.017297279357910155, 0.01731391906738281, 0.017181791305541993, 0.017253023147583008, 0.017074432373046875, 0.017309696197509765, 0.017266687393188478, 0.0174202880859375, 0.01741788864135742, 0.01774336051940918, 0.017648416519165037, 0.017597280502319335, 0.017556352615356444, 0.017399808883666993, 0.01745478439331055, 0.017524063110351564, 0.01753113555908203, 0.017562335968017578, 0.017477024078369142, 0.017474143981933594, 0.01759436798095703, 0.017457279205322265, 0.017390560150146485, 0.017447839736938475, 0.017399808883666993, 0.017661951065063478, 0.017682432174682617, 0.017768447875976562, 0.017530879974365234, 0.017500160217285156, 0.017360895156860352, 0.017542272567749023, 0.017358879089355468, 0.017535135269165038, 0.017515199661254883, 0.017694719314575197, 0.017632415771484375, 0.017639680862426756, 0.017771072387695312, 0.017696128845214844, 0.01755779266357422, 0.017586559295654298, 0.017646656036376954, 0.01732703971862793, 0.017411775588989258, 0.017346656799316407, 0.017541343688964844, 0.017489919662475584, 0.017590272903442384, 0.01768448066711426, 0.01760256004333496, 0.017730623245239257, 0.017570560455322265, 0.017402048110961913, 0.017353952407836912, 0.01752707290649414, 0.01748633575439453, 0.01754857635498047, 0.0174552001953125, 0.01749260711669922, 0.01759231948852539, 0.017346080780029298, 0.017426912307739257, 0.017485824584960938, 0.017543167114257813, 0.017580032348632812, 0.017573568344116212, 0.018006336212158202, 0.01760771179199219, 0.01765065574645996, 0.01763100814819336, 0.01802217674255371, 0.01802444839477539, 0.01772480010986328, 0.01782806396484375, 0.01780531120300293, 0.017795488357543944, 0.017498111724853514, 0.017772544860839845, 0.017636512756347655, 0.01757619285583496, 0.0174451847076416, 0.01733990478515625, 0.017363168716430663, 0.01839366340637207, 0.017596160888671875, 0.017377536773681642, 0.017247840881347655, 0.017228031158447267, 0.017303712844848634, 0.01733772850036621, 0.01737001609802246, 0.017421663284301756, 0.017240447998046873, 0.017336320877075196, 0.017277984619140624, 0.01739465522766113, 0.017567743301391603, 0.017369087219238282, 0.017391679763793945, 0.017446847915649415, 0.017622400283813476, 0.01771788787841797, 0.01753081512451172, 0.01741961669921875, 0.017465375900268556, 0.01748227119445801, 0.017844224929809572, 0.01751260757446289, 0.0174138240814209, 0.017368703842163085, 0.017983295440673827, 0.01750649642944336, 0.01741279983520508, 0.017745599746704102, 0.017509920120239258, 0.017459999084472655, 0.017472991943359373, 0.0177096004486084, 0.01741619110107422, 0.017508352279663086, 0.017246208190917968, 0.017227136611938476, 0.017180479049682618, 0.017544000625610352, 0.017537023544311522, 0.017596416473388672, 0.017481727600097655, 0.01741823959350586, 0.0175914249420166, 0.017675136566162108, 0.017735008239746095, 0.017567808151245118, 0.017553216934204103, 0.017519392013549805, 0.017755840301513674, 0.01756208038330078, 0.01750809669494629, 0.01761510467529297, 0.0175914249420166, 0.017597312927246093, 0.017551359176635743, 0.01760767936706543, 0.017613279342651368, 0.017480224609375, 0.017467391967773437, 0.017512544631958008, 0.017330080032348632, 0.017283071517944337, 0.01750150489807129, 0.0175664005279541, 0.0175797119140625, 0.01746499252319336, 0.017468191146850585, 0.01746726417541504, 0.01742233657836914, 0.017403423309326174, 0.017389984130859376, 0.0172728328704834, 0.01713363265991211, 0.0172576961517334, 0.017336671829223632, 0.01748627281188965, 0.017541120529174805, 0.017340383529663085, 0.017202911376953126, 0.017146175384521484, 0.017301504135131835, 0.017321887969970702, 0.017387264251708983, 0.017422048568725586, 0.017379968643188477, 0.017495552062988282, 0.01749648094177246, 0.01751171112060547, 0.01761743927001953, 0.017473600387573243, 0.017501760482788085, 0.017414815902709962, 0.01740390396118164, 0.01744691276550293, 0.017480703353881837, 0.01738377571105957, 0.017371807098388672, 0.01744691276550293, 0.017551359176635743, 0.01771638488769531, 0.017587039947509767, 0.017530208587646486, 0.01722640037536621, 0.017184768676757813, 0.01735865592956543, 0.017397951126098633, 0.017466367721557616, 0.017486623764038086, 0.01741231918334961, 0.017563072204589844, 0.017475584030151366, 0.01772163200378418, 0.017399808883666993, 0.017283071517944337, 0.017299455642700197, 0.01715814399719238, 0.017352703094482422, 0.01740118408203125, 0.017506975173950196, 0.017593664169311525, 0.017674623489379884, 0.017520959854125977, 0.01742233657836914, 0.017489919662475584, 0.01754313659667969, 0.017430559158325195, 0.017315040588378905, 0.018082719802856445, 0.01720230484008789, 0.01747020721435547, 0.017584127426147463, 0.017555456161499023, 0.01759231948852539, 0.017637088775634767, 0.01791139221191406, 0.017836736679077148, 0.01755036735534668, 0.017710271835327147, 0.01765519905090332, 0.017399744033813478, 0.017438528060913085, 0.017433216094970703, 0.017524736404418945, 0.017501440048217774, 0.018569984436035157, 0.01760051155090332, 0.01792201614379883, 0.017511648178100588, 0.017470272064208984, 0.01764556884765625, 0.017452096939086913, 0.017484256744384766, 0.01775254440307617, 0.01741823959350586, 0.01741119956970215, 0.017517471313476564, 0.017387487411499022, 0.017580032348632812, 0.017663999557495116, 0.017751455307006836, 0.01759292793273926, 0.017643487930297852, 0.01754115104675293, 0.017691648483276368, 0.0176711368560791, 0.01753500747680664, 0.017451007843017577, 0.017563648223876953, 0.017568992614746093, 0.018039583206176758, 0.017502208709716797, 0.01743052864074707, 0.017339744567871095, 0.01738979148864746, 0.018149375915527344, 0.01771478462219238, 0.017626623153686523, 0.017640352249145508, 0.017604608535766602, 0.01746646308898926, 0.01756048011779785, 0.017494016647338868, 0.017528480529785156, 0.01760905647277832, 0.01758393669128418, 0.01744857597351074, 0.01756319999694824, 0.017661983489990235, 0.017757152557373045, 0.017872896194458008, 0.01756707191467285, 0.01745372772216797, 0.017622528076171876, 0.01757766342163086, 0.01783072090148926, 0.017659456253051757, 0.017719776153564452, 0.01789743995666504, 0.01759404754638672, 0.017656095504760744, 0.017831968307495116, 0.018209888458251954, 0.020539424896240235, 0.017711999893188477, 0.01765990447998047, 0.017641471862792968, 0.017489919662475584, 0.017538623809814455, 0.017582527160644533, 0.017629024505615234, 0.017608192443847655, 0.01766377639770508, 0.017641759872436522, 0.01754787254333496, 0.017568960189819335, 0.017473472595214843, 0.017513023376464844, 0.017551679611206055, 0.01802444839477539, 0.01756716728210449, 0.01747977638244629, 0.01752681541442871, 0.01759071922302246, 0.017630592346191406, 0.017538816452026366, 0.017509248733520506, 0.01745305633544922, 0.0176312313079834, 0.01760051155090332, 0.017811456680297853, 0.017547231674194336, 0.017624383926391603, 0.017623775482177733, 0.01776639938354492, 0.01779916763305664, 0.01764352035522461, 0.017563648223876953, 0.01785171127319336, 0.01777916717529297, 0.01758243179321289, 0.01755340766906738, 0.01760665512084961, 0.017632383346557617, 0.01768931198120117, 0.017698976516723634, 0.01768822479248047, 0.01759654426574707, 0.017563167572021483, 0.017588191986083985, 0.017434688568115236, 0.017406496047973632, 0.017577535629272462, 0.01750259208679199, 0.017973440170288086, 0.0175861759185791, 0.017743871688842772, 0.017717056274414063, 0.017732927322387695, 0.01762393569946289, 0.01756096076965332, 0.01755753517150879, 0.01737708854675293, 0.017390592575073242, 0.01745692825317383, 0.017518592834472657, 0.01784320068359375, 0.017826400756835937, 0.017793439865112306, 0.017624223709106445, 0.017506912231445314, 0.017569631576538087, 0.01786716842651367, 0.0177227840423584, 0.017529247283935546, 0.017393856048583983, 0.017542560577392577, 0.017486400604248047, 0.017725471496582032, 0.017709087371826172, 0.01769878387451172, 0.017677791595458986, 0.01743414306640625, 0.017185792922973633, 0.017541023254394533, 0.01762060737609863, 0.017820095062255858, 0.017426336288452148, 0.017401983261108398, 0.017475584030151366, 0.01818828773498535, 0.01768182373046875, 0.017543264389038086, 0.01749247932434082, 0.01734422492980957, 0.017273120880126953, 0.01720319938659668, 0.017120288848876952, 0.01706671905517578, 0.0169617919921875, 0.01699635124206543, 0.020703104019165038, 0.019355520248413086, 0.01744576072692871, 0.01739967918395996, 0.017405311584472657, 0.017278976440429687, 0.01749017524719238, 0.017391775131225588, 0.01737459182739258, 0.017384031295776366, 0.01759270477294922, 0.01743667221069336, 0.017528831481933595, 0.017368799209594728, 0.017465215682983398, 0.01748214340209961, 0.017340415954589843, 0.01736809539794922, 0.017449888229370117, 0.017381120681762695, 0.017488191604614258, 0.017413536071777345, 0.017242719650268554, 0.017168384552001953, 0.017131519317626954, 0.017123327255249024, 0.017462656021118163, 0.01740585517883301, 0.017421024322509766, 0.017458623886108398, 0.01746352005004883, 0.01734275245666504, 0.017368511199951173, 0.017293952941894532, 0.01724415969848633, 0.01718272018432617, 0.017184480667114258, 0.01732588768005371, 0.01780784034729004, 0.01730352020263672, 0.01729539108276367, 0.01747068786621094, 0.017545343399047852, 0.017541791915893553, 0.01761689567565918, 0.017451007843017577, 0.017385408401489257, 0.017946399688720704, 0.01731203269958496, 0.017425727844238282, 0.017367424011230467, 0.01747318458557129, 0.01754710388183594, 0.017486656188964844, 0.017500064849853517, 0.01757360076904297, 0.01748809623718262, 0.01749612808227539, 0.017573984146118164, 0.017716896057128905, 0.017570144653320314, 0.017524736404418945, 0.01735385513305664, 0.017580032348632812, 0.017584127426147463, 0.017321983337402345, 0.017184576034545897, 0.017344640731811523, 0.017221696853637697, 0.017392736434936523, 0.01743939208984375, 0.017230079650878905, 0.017135616302490234, 0.017211296081542968, 0.017410144805908204, 0.017482784271240233, 0.01765065574645996, 0.017364992141723632, 0.017338367462158204, 0.01734646415710449, 0.01752262306213379, 0.017464927673339844, 0.01741472053527832, 0.017383424758911133, 0.017313791275024415, 0.017335359573364257, 0.017261503219604492, 0.017319936752319336, 0.01744895935058594, 0.01746963119506836, 0.017528383255004883, 0.01745305633544922, 0.017527040481567384, 0.018011455535888673, 0.017437376022338868, 0.018804447174072265, 0.017748159408569338, 0.01834156799316406, 0.01784668731689453, 0.01761689567565918, 0.017501312255859373, 0.01778505516052246, 0.017609376907348633, 0.017522687911987304, 0.017819360733032228, 0.01774550437927246, 0.017776544570922852, 0.017671039581298828, 0.01766124725341797, 0.01769327926635742, 0.017442815780639647, 0.017502208709716797, 0.017481727600097655, 0.017637344360351564, 0.0176210880279541, 0.01767171287536621, 0.017950368881225587, 0.01760291290283203, 0.017361312866210937, 0.017298879623413085, 0.01744915199279785, 0.017305984497070312, 0.017338367462158204, 0.01734646415710449, 0.01710089683532715, 0.01708831977844238]",tokens/s,57.00974655089845,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,3159.3472,4423.876608,0.0,4028.628992,3944.723968,s,1,10.06894921875,10.06894921875,0.0,10.06894921875,10.06894921875,10.06894921875,10.06894921875,[10.06894921875],,kWh,9.33011871750106e-05,1.0284588581744736e-05,2.9152801099982528e-05,0.00013273857685673786,,MB,3048.185856,4763.615232,0.0,4353.687552,4305.05728,s,10,1.147124671936035,0.1147124671936035,0.00020432700330958774,0.11468398666381835,0.11498229293823242,0.1150433048248291,0.11509211433410645,"[0.11510431671142578, 0.11450886535644532, 0.11447369384765625, 0.11482249450683593, 0.11451148986816406, 0.11480099487304687, 0.11456610870361328, 0.11459334564208984, 0.11477462768554687, 0.11496873474121094]",tokens/s,2231.66676005618,kWh,3.418145743168676e-06,3.769615614673295e-07,2.2705283022092642e-06,6.06563560684527e-06,tokens/kWh,42204975.140790775,MB,2932.334592,4763.615232,0.0,4353.687552,4305.05984,s,10,25.44476782226563,2.544476782226563,0.019668975351995483,2.54449072265625,2.565529443359375,2.5716923339843754,2.5766226464843753,"[2.520806640625, 2.520442626953125, 2.52698388671875, 2.542894775390625, 2.56067822265625, 2.546086669921875, 2.524440185546875, 2.560419677734375, 2.564159912109375, 2.577855224609375]",tokens/s,24.759510654631082,kWh,7.292893752266355e-05,8.043900098729084e-06,3.751758686159039e-05,0.00011849042448298304,tokens/kWh,531688.5332708698,,s,630,25.442107032775883,0.04038429687742203,0.0007000421652442235,0.04031345558166504,0.04099657821655273,0.041246515274047844,0.043280706901550295,"[0.040855422973632816, 0.04014092636108398, 0.039540096282958986, 0.03972774505615234, 0.04019327926635742, 0.04052659225463867, 0.04020563125610352, 0.04050188827514648, 0.0403251838684082, 0.04016332626342774, 0.04009164810180664, 0.039913471221923826, 0.03970431900024414, 0.04036223983764648, 0.03982438278198242, 0.039672832489013675, 0.03988188934326172, 0.039881568908691406, 0.04000153732299805, 0.03993804931640625, 0.04004476928710937, 0.04014672088623047, 0.039874561309814455, 0.03991961669921875, 0.03984694290161133, 0.040043487548828124, 0.03980233764648437, 0.0402435188293457, 0.040106208801269534, 0.040567935943603514, 0.04097040176391602, 0.040549087524414065, 0.04090380859375, 0.0401596794128418, 0.04042387390136719, 0.0395425910949707, 0.039675201416015625, 0.0396984977722168, 0.039705120086669925, 0.03958403015136719, 0.039642337799072266, 0.039824062347412106, 0.03970579147338867, 0.03936486434936524, 0.03955779266357422, 0.03965542221069336, 0.0400445442199707, 0.039470592498779294, 0.03975363159179687, 0.03964358520507812, 0.03998060989379883, 0.03985776138305664, 0.0398812141418457, 0.03946697616577149, 0.03954332733154297, 0.03974137496948242, 0.03986841583251953, 0.039944255828857425, 0.039865375518798825, 0.03970943832397461, 0.03996489715576172, 0.04280934524536133, 0.040013824462890625, 0.04001177597045898, 0.03975372695922851, 0.03952009582519531, 0.040059040069580075, 0.04025139236450195, 0.04046790313720703, 0.040299072265625, 0.04029574584960938, 0.03996883010864258, 0.04125964736938476, 0.03980704116821289, 0.039559070587158206, 0.04222742462158203, 0.0403070068359375, 0.04029439926147461, 0.04017270278930664, 0.04177724838256836, 0.040790782928466794, 0.04057702255249023, 0.03998726272583008, 0.03977004623413086, 0.03967302322387695, 0.039609375, 0.03983510589599609, 0.04045993423461914, 0.03991139221191406, 0.03999609756469726, 0.039702529907226565, 0.03967935943603516, 0.0395043830871582, 0.03967007827758789, 0.03952751922607422, 0.0398507194519043, 0.039882015228271485, 0.039797470092773436, 0.039766014099121096, 0.0397496337890625, 0.039775520324707034, 0.039588382720947266, 0.03974934387207031, 0.039741920471191405, 0.03975075149536133, 0.03972393417358398, 0.03985612869262695, 0.04014182281494141, 0.040544929504394533, 0.040443775177001956, 0.040186336517333984, 0.04000153732299805, 0.03968761444091797, 0.039668193817138674, 0.03957974243164063, 0.03966345596313477, 0.03960457611083985, 0.0398743667602539, 0.04005855941772461, 0.03987843322753906, 0.04021916961669922, 0.040209503173828126, 0.03966864013671875, 0.03960601425170898, 0.03967948913574219, 0.0395305290222168, 0.041644031524658204, 0.0405766716003418, 0.04041353607177734, 0.039948287963867186, 0.039558368682861327, 0.03957132720947266, 0.03950070571899414, 0.03954073715209961, 0.03952217483520508, 0.03959321594238281, 0.03981401443481445, 0.03975196838378906, 0.039932830810546875, 0.03979756927490234, 0.03985408020019531, 0.03977011108398437, 0.04019331359863281, 0.03965568161010742, 0.0395720329284668, 0.039608222961425785, 0.03991961669921875, 0.040169086456298825, 0.040175743103027343, 0.04042982482910156, 0.0404398078918457, 0.04059660720825195, 0.040604545593261716, 0.040065025329589846, 0.03997081756591797, 0.039687553405761716, 0.03984832000732422, 0.03978601455688477, 0.04030332946777344, 0.040232288360595704, 0.04030752182006836, 0.040218017578125, 0.04012486267089844, 0.039796062469482425, 0.03991535949707031, 0.03978294372558594, 0.040304702758789064, 0.040519134521484375, 0.040325023651123046, 0.04028911972045898, 0.04012646484375, 0.04012441635131836, 0.04015468978881836, 0.0405549430847168, 0.04065871810913086, 0.04026595306396484, 0.04088156890869141, 0.040743328094482424, 0.042321311950683595, 0.04021894454956055, 0.04016502380371094, 0.03987907028198242, 0.039807456970214844, 0.03968780899047852, 0.03972854232788086, 0.039646080017089844, 0.03975785446166992, 0.03995033645629883, 0.04043161773681641, 0.040263553619384766, 0.039686145782470705, 0.039626750946044925, 0.03971635055541992, 0.03964915084838867, 0.040409568786621095, 0.04027606582641602, 0.04050742340087891, 0.04042140960693359, 0.04047568130493164, 0.0398612174987793, 0.03957145690917969, 0.0396404800415039, 0.03975433731079102, 0.04015436935424805, 0.0399633903503418, 0.04033126449584961, 0.04089974212646484, 0.043192832946777344, 0.04057535934448242, 0.04053807830810547, 0.040613887786865234, 0.040509441375732425, 0.040275966644287106, 0.04048691177368164, 0.04069200134277344, 0.040369407653808594, 0.04021526336669922, 0.040375137329101564, 0.03991567993164063, 0.039771968841552735, 0.039752574920654295, 0.039814559936523435, 0.039700702667236326, 0.04330499267578125, 0.04085391998291016, 0.041115646362304685, 0.040499198913574216, 0.040210430145263674, 0.04019507217407227, 0.0398653450012207, 0.03994214248657227, 0.039879871368408204, 0.03957843017578125, 0.03973734283447266, 0.040390655517578124, 0.04097228622436523, 0.040357440948486326, 0.04052217483520508, 0.040618175506591796, 0.04070995330810547, 0.04019404983520508, 0.040513534545898434, 0.0400937614440918, 0.04042156982421875, 0.040435455322265626, 0.040208286285400394, 0.04050543975830078, 0.04200188827514648, 0.040721023559570316, 0.040296031951904294, 0.040468799591064454, 0.040022014617919925, 0.041304256439208986, 0.040981983184814455, 0.04051801681518555, 0.040532577514648435, 0.041093120574951174, 0.04027391815185547, 0.040171134948730466, 0.040341632843017575, 0.04096201705932617, 0.04045596694946289, 0.04057753753662109, 0.041202816009521484, 0.040522624969482425, 0.04096387100219727, 0.040760929107666016, 0.040620670318603516, 0.04080230331420898, 0.040888320922851565, 0.041322494506835936, 0.04043718338012695, 0.04061430358886719, 0.0406918716430664, 0.04081369781494141, 0.04054412841796875, 0.040289279937744144, 0.04009983825683594, 0.0400766716003418, 0.03977593612670898, 0.04031584167480469, 0.03991049575805664, 0.040552513122558594, 0.04063692855834961, 0.04036438369750977, 0.040850433349609375, 0.04025187301635742, 0.04033526229858399, 0.0400574722290039, 0.040013248443603516, 0.04004121780395508, 0.04056864166259765, 0.040890369415283206, 0.04046992111206055, 0.04085321426391601, 0.04101824188232422, 0.040525825500488284, 0.04064828872680664, 0.044755359649658204, 0.04294655990600586, 0.04009344100952148, 0.040220062255859376, 0.04104399871826172, 0.04048569488525391, 0.040705696105957034, 0.0404637451171875, 0.04044063949584961, 0.04027817535400391, 0.04001587295532227, 0.03996985626220703, 0.04011513519287109, 0.039916702270507816, 0.041370368957519534, 0.040498401641845705, 0.04111859130859375, 0.04197750473022461, 0.041314655303955075, 0.040546302795410154, 0.04031875228881836, 0.04090700912475586, 0.04037580871582031, 0.04092361450195312, 0.040506561279296874, 0.040583358764648435, 0.040513534545898434, 0.04035238265991211, 0.040479873657226564, 0.040215423583984375, 0.04011142349243164, 0.040235710144042966, 0.04030822372436523, 0.041027137756347656, 0.04100396728515625, 0.04066918563842774, 0.040304607391357425, 0.040169086456298825, 0.04002207946777344, 0.04003670501708984, 0.039927616119384765, 0.040044097900390624, 0.04087007904052734, 0.04110947036743164, 0.04058566284179688, 0.040384544372558596, 0.04035343933105469, 0.03998492813110351, 0.040030784606933593, 0.039833824157714845, 0.03977807998657226, 0.04061798477172852, 0.04094771194458008, 0.04108083343505859, 0.04040499114990234, 0.040357887268066404, 0.04005068969726563, 0.03990902328491211, 0.040163681030273436, 0.03988655853271485, 0.03994972610473633, 0.0398996467590332, 0.03988313674926758, 0.04031203079223633, 0.040355743408203124, 0.04090969467163086, 0.040869537353515624, 0.04091305541992187, 0.04036156845092773, 0.040280479431152344, 0.03984975814819336, 0.03980265426635742, 0.039591678619384764, 0.04053443145751953, 0.040448478698730465, 0.04071014404296875, 0.04061183929443359, 0.04061798477172852, 0.04076339340209961, 0.03993600082397461, 0.040908702850341795, 0.040598369598388674, 0.04014080047607422, 0.040134654998779294, 0.03970035171508789, 0.039785694122314454, 0.04341843032836914, 0.040132190704345705, 0.03983827209472656, 0.03961561584472656, 0.039715648651123044, 0.04018294525146485, 0.040761249542236325, 0.04009494400024414, 0.039933662414550784, 0.03954687881469727, 0.039610111236572265, 0.039456832885742185, 0.039446720123291014, 0.03932796859741211, 0.039524383544921875, 0.04053948974609375, 0.040094112396240236, 0.04018380737304687, 0.040021183013916016, 0.03991839981079102, 0.039583744049072264, 0.03947520065307617, 0.039485439300537106, 0.039423103332519534, 0.03957030487060547, 0.04035583877563476, 0.040288257598876956, 0.04101715087890625, 0.040588703155517575, 0.039892833709716795, 0.03977104187011719, 0.03982275390625, 0.039523998260498044, 0.039435230255126956, 0.03961206436157227, 0.03984572982788086, 0.04018227386474609, 0.039772159576416014, 0.0401715202331543, 0.039798175811767575, 0.039735744476318356, 0.03971913528442383, 0.03964255905151367, 0.03972147369384765, 0.039809120178222655, 0.03963894271850586, 0.04028416061401367, 0.04029433441162109, 0.0406569595336914, 0.04037836837768555, 0.040401950836181644, 0.04032748794555664, 0.040618656158447265, 0.040705055236816404, 0.04058620834350586, 0.04067027282714844, 0.04071721649169922, 0.04123046493530273, 0.04053420639038086, 0.04466700744628906, 0.040308734893798825, 0.0401080322265625, 0.04000284957885742, 0.040552383422851564, 0.04073756790161133, 0.0406036491394043, 0.04048281478881836, 0.04002345657348633, 0.039750240325927735, 0.039684097290039064, 0.03976192092895508, 0.039925537109375, 0.0396473274230957, 0.03992793655395508, 0.04100096130371094, 0.043756542205810545, 0.041468929290771485, 0.04025753784179688, 0.03991363143920899, 0.04014883041381836, 0.0397496337890625, 0.03961644744873047, 0.039537792205810544, 0.03970553588867187, 0.0404398078918457, 0.04033536148071289, 0.040318977355957034, 0.04040502548217773, 0.04042121505737305, 0.04045836639404297, 0.04031488037109375, 0.04059519958496094, 0.040398750305175785, 0.040790336608886715, 0.040860767364501956, 0.04076809692382812, 0.04076784133911133, 0.040855552673339846, 0.04119884872436524, 0.04116761779785156, 0.040910846710205076, 0.04095382308959961, 0.040844383239746096, 0.04085855865478515, 0.04084326553344726, 0.040877761840820315, 0.04119174575805664, 0.040820735931396485, 0.04111974334716797, 0.0409345588684082, 0.04067155075073242, 0.04073321533203125, 0.04063436889648438, 0.04055244827270508, 0.04090060806274414, 0.040921089172363284, 0.04082876968383789, 0.04078211212158203, 0.04087590408325195, 0.04069375991821289, 0.04166783905029297, 0.041286399841308594, 0.04072006225585938, 0.040822654724121096, 0.040823230743408205, 0.04078742218017578, 0.04070556640625, 0.04061695861816406, 0.040546302795410154, 0.0404664306640625, 0.040677089691162106, 0.04086198425292969, 0.0408616943359375, 0.040613887786865234, 0.04039820861816406, 0.040101566314697266, 0.039989471435546875, 0.04001046371459961, 0.03966505432128906, 0.03960892868041992, 0.03983769607543945, 0.04006911849975586, 0.04015718460083008, 0.040046592712402344, 0.04073020935058594, 0.0407342414855957, 0.04256243133544922, 0.040914337158203126, 0.04026780700683594, 0.04578140640258789, 0.04020649719238281, 0.043675102233886716, 0.04073513412475586, 0.040523902893066406, 0.04036316680908203, 0.040037216186523436, 0.03997491073608399, 0.0399005126953125, 0.039908256530761715, 0.039731201171875, 0.03980438232421875, 0.04040937423706055, 0.04040703964233398, 0.040498241424560544, 0.041925567626953125, 0.04010089492797852, 0.040589920043945314, 0.04322124862670899, 0.041512161254882815, 0.04086876678466797, 0.04177081680297851, 0.04099631881713867, 0.04102012634277344, 0.040720382690429685, 0.040762783050537106, 0.040403553009033207, 0.0403642578125, 0.04015884780883789, 0.04045334243774414, 0.03982227325439453, 0.039913471221923826, 0.04019814300537109, 0.040567008972167966, 0.04065846252441406, 0.04108006286621094, 0.040855297088623045, 0.04092911911010742, 0.040882335662841794, 0.04099264144897461, 0.040994945526123046, 0.041082878112792966, 0.040943614959716795, 0.040941566467285154, 0.04115216064453125, 0.04091734313964844, 0.04089408111572266, 0.04093743896484375, 0.04099523162841797, 0.040862751007080075, 0.04101583862304688, 0.040790462493896486, 0.04078976058959961, 0.04087628936767578, 0.04074291229248047, 0.04073379135131836, 0.04079526519775391, 0.04054982376098633, 0.04075321578979492, 0.04090832138061523, 0.040780479431152344, 0.04066326522827148, 0.040815872192382814, 0.041089630126953124, 0.04089651107788086, 0.04069484710693359, 0.040690238952636716, 0.04064057540893555, 0.041551551818847655, 0.040702590942382814, 0.04087807846069336, 0.04095590209960937, 0.041010368347167966, 0.04089939117431641, 0.0409989128112793, 0.040830944061279295, 0.04084870529174805, 0.041075424194335936, 0.04095590209960937, 0.04080844879150391, 0.04099020767211914, 0.0407938232421875, 0.04083932876586914, 0.041089664459228514, 0.04077939224243164, 0.04084099197387695, 0.04096828842163086, 0.04103833770751953, 0.041193473815917966, 0.04082390213012695, 0.04106537628173828, 0.04107369613647461, 0.041262046813964844, 0.04091843032836914, 0.041388641357421874, 0.04087398529052735, 0.04076339340209961]",tokens/s,24.76210005674453,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4376.895488,4566.482944,0.0,4188.012544,4187.049984,s,1,10.3305439453125,10.3305439453125,0.0,10.3305439453125,10.3305439453125,10.3305439453125,10.3305439453125,[10.3305439453125],,kWh,9.650066008333719e-05,1.0637245674047769e-05,3.166196977399949e-05,0.00013879987553138445,,MB,4312.846336,4962.844672,0.0,4555.014144,4514.269184,s,10,7.85486279296875,0.785486279296875,0.0024385657671590765,0.7846548767089844,0.7884500244140625,0.7891213439941406,0.7896583996582032,"[0.78331787109375, 0.7868609619140625, 0.7877884521484375, 0.7831114501953125, 0.7833239135742187, 0.783201416015625, 0.78598583984375, 0.7883008422851563, 0.7831793823242188, 0.7897926635742187]",tokens/s,325.9127584369232,kWh,2.2828943265063682e-05,2.516557984143609e-06,1.5149542034153764e-05,4.049504328336105e-05,tokens/kWh,6321761.362462538,MB,4320.833536,4979.621888,0.0,4571.79136,4514.271744,s,10,466.9446015625,46.69446015625,0.009211138575253765,46.69581640625,46.701953515625,46.704838085937496,46.7071457421875,"[46.67080078125, 46.69037109375, 46.7013125, 46.69963671875, 46.69367578125, 46.69544140625, 46.69619140625, 46.70772265625, 46.69769140625, 46.6917578125]",tokens/s,1.3491964526238884,kWh,0.0013617363878057702,0.0001502109238077487,0.0009058930003548463,0.0024178403119683653,tokens/kWh,26056.31136520826,,s,630,466.9331598510745,0.7411637457953559,0.0004398744652296437,0.7411553955078125,0.7415886169433593,0.741754150390625,0.7420652239990234,"[0.7400548706054687, 0.7412684326171874, 0.7400137939453125, 0.7404874267578125, 0.7405681762695312, 0.7407911376953125, 0.740642333984375, 0.7402091674804687, 0.740773193359375, 0.7406885986328124, 0.74061376953125, 0.7400984497070312, 0.7404476928710938, 0.7406184692382812, 0.7407650146484375, 0.7409305419921876, 0.7405218505859374, 0.7407485961914062, 0.7407849731445313, 0.740691650390625, 0.7404711303710938, 0.7407369995117188, 0.7407920532226563, 0.740763916015625, 0.740427734375, 0.7405476684570312, 0.7409570922851563, 0.7412613525390624, 0.74046875, 0.7403253784179687, 0.7409314575195313, 0.7408845825195313, 0.7408425903320313, 0.7404266967773437, 0.7407796020507813, 0.7410220947265626, 0.7409541015625, 0.7407222900390625, 0.7410399169921875, 0.7417101440429688, 0.740493408203125, 0.7410515747070312, 0.7407597045898437, 0.741287841796875, 0.740619140625, 0.7408491821289063, 0.7407881469726563, 0.7413148803710937, 0.7415169067382813, 0.7405165405273437, 0.7405834350585937, 0.7412981567382813, 0.74104150390625, 0.7410548095703124, 0.740962646484375, 0.741011474609375, 0.7411691284179688, 0.7408289794921875, 0.7414130859375, 0.7407860107421875, 0.741963134765625, 0.7403405151367187, 0.74083740234375, 0.7407962646484375, 0.7412327880859375, 0.7407361450195312, 0.7408401489257812, 0.7409166870117188, 0.7415664672851563, 0.7402646484375, 0.7407185668945313, 0.7409295654296875, 0.7411096801757813, 0.7403663940429688, 0.7409575805664063, 0.7412445678710937, 0.7411536865234375, 0.7405241088867187, 0.7409991455078125, 0.7411691284179688, 0.741085205078125, 0.7407554321289063, 0.740642822265625, 0.7411220703125, 0.7422764892578125, 0.7411022338867187, 0.74110498046875, 0.7409793701171875, 0.7408450317382812, 0.7406897583007812, 0.7414053955078125, 0.7411097412109375, 0.7411240844726562, 0.7409183349609375, 0.7414363403320312, 0.7412568359375, 0.7411265258789063, 0.7414898071289062, 0.7413748168945312, 0.7415459594726562, 0.7411568603515625, 0.7411302490234375, 0.7410421752929688, 0.74123876953125, 0.7412080078125, 0.7415253295898437, 0.7408212280273437, 0.741011474609375, 0.741369873046875, 0.741525390625, 0.7403992309570312, 0.741412841796875, 0.7414989013671875, 0.7410319213867187, 0.741306396484375, 0.7412183227539062, 0.741035888671875, 0.7411995239257813, 0.7411327514648437, 0.7411178588867188, 0.74104638671875, 0.7415275268554687, 0.7411786499023437, 0.7417346801757813, 0.7408972778320313, 0.7411565551757813, 0.74149169921875, 0.7405444946289063, 0.7411159057617187, 0.7413350219726562, 0.7413760375976562, 0.7407493286132812, 0.7412242431640625, 0.7418446044921875, 0.7410775756835938, 0.7404031982421875, 0.7409903564453125, 0.7414483032226562, 0.7410680541992187, 0.74062451171875, 0.7414396362304687, 0.741398193359375, 0.7410178833007812, 0.7408229370117188, 0.7414319458007812, 0.7412367553710938, 0.7406735229492187, 0.7419535522460937, 0.741868896484375, 0.740987548828125, 0.7405936889648438, 0.7413206787109375, 0.7413677978515625, 0.741185546875, 0.7414883422851563, 0.7411674194335938, 0.74111181640625, 0.7407308959960938, 0.741491943359375, 0.7411206665039063, 0.7415253295898437, 0.7414500732421875, 0.7409674682617188, 0.7418802490234375, 0.7409812622070312, 0.7410646362304687, 0.7412675170898437, 0.7410360107421875, 0.741416015625, 0.7407230224609375, 0.74164013671875, 0.7409830322265625, 0.7409074096679688, 0.7415291748046875, 0.7415167846679688, 0.7413599853515624, 0.7414026489257812, 0.741384765625, 0.7411292724609375, 0.741028564453125, 0.7413885498046875, 0.7414886474609375, 0.7408046264648438, 0.7417279663085937, 0.7412145385742187, 0.7414307250976563, 0.7414728393554687, 0.7443988647460937, 0.7417261962890624, 0.74151123046875, 0.74071875, 0.7410575561523437, 0.7411285400390625, 0.7408866577148437, 0.7407396240234375, 0.7418423461914062, 0.74116357421875, 0.740701904296875, 0.7411981201171876, 0.7411087646484374, 0.7412780151367188, 0.7409827880859375, 0.7410835571289063, 0.7407659301757813, 0.741185546875, 0.7409679565429688, 0.74139697265625, 0.7413186645507812, 0.7406237182617188, 0.741683837890625, 0.7403906860351562, 0.7412840576171875, 0.7410258178710938, 0.7409801635742187, 0.74120361328125, 0.7415632934570312, 0.7408532104492187, 0.741116455078125, 0.7411136474609376, 0.7415145874023438, 0.7412662963867187, 0.7409889526367187, 0.7410872192382812, 0.7411751708984375, 0.7410032348632812, 0.7412933349609375, 0.7412293701171875, 0.741222412109375, 0.7412467651367187, 0.740702392578125, 0.7414190063476562, 0.7414312744140625, 0.741305908203125, 0.7415014038085938, 0.7416770629882813, 0.7416893310546875, 0.7411199951171875, 0.7407368774414063, 0.7420499267578125, 0.7413145751953125, 0.7412017211914063, 0.7411181640625, 0.741326171875, 0.7411607666015625, 0.7411056518554687, 0.7413658447265625, 0.741214111328125, 0.74111474609375, 0.7406795043945312, 0.7464931640625, 0.7411426391601562, 0.7410634765625, 0.7404827880859375, 0.7409974365234375, 0.7406197509765625, 0.7408665771484375, 0.7403984375, 0.7413561401367188, 0.7419658203125, 0.7407144165039062, 0.7410791015625, 0.7407882080078125, 0.7407738647460937, 0.7412030029296875, 0.741058837890625, 0.7410695190429688, 0.7415043334960938, 0.7412430419921875, 0.7411819458007812, 0.740724609375, 0.7415598754882813, 0.7413863525390625, 0.74105615234375, 0.740856689453125, 0.7412633666992188, 0.7410808715820313, 0.7409461059570313, 0.7410293579101562, 0.7410462646484375, 0.7412589111328125, 0.7407010498046875, 0.7408455810546875, 0.7406441650390625, 0.7417471313476562, 0.7409865112304688, 0.7412426147460938, 0.74111181640625, 0.7409611206054687, 0.7407144165039062, 0.7407513427734375, 0.7415823364257812, 0.7408370361328125, 0.7415029907226562, 0.740903564453125, 0.7415789794921875, 0.74149072265625, 0.7410846557617188, 0.741435791015625, 0.7410320434570312, 0.7414353637695312, 0.741074951171875, 0.7409930419921875, 0.7410699462890625, 0.7412860107421875, 0.7413436279296876, 0.7414398803710938, 0.7414678955078124, 0.7416117553710937, 0.74132421875, 0.7413458862304687, 0.7408488159179687, 0.7418765258789063, 0.7413718872070313, 0.7415316772460937, 0.7410808715820313, 0.741275390625, 0.7411348876953125, 0.7408580322265625, 0.7411875610351563, 0.7405813598632812, 0.7406832275390625, 0.7408024291992188, 0.7411677856445312, 0.74102783203125, 0.74060595703125, 0.741514404296875, 0.7412142944335938, 0.740965087890625, 0.7410953979492187, 0.7418016357421875, 0.7406708374023437, 0.7412825317382813, 0.741028076171875, 0.7408599243164062, 0.741296142578125, 0.7411630249023438, 0.7415357666015625, 0.7415214233398437, 0.74069921875, 0.7410963745117187, 0.7411138305664062, 0.741212158203125, 0.7413514404296875, 0.7405642700195313, 0.741257080078125, 0.7412816162109375, 0.7410144653320313, 0.7413077392578125, 0.7413639526367187, 0.7413846435546875, 0.74137939453125, 0.7410100708007813, 0.7410584106445313, 0.7407945556640625, 0.7417665405273437, 0.740880859375, 0.7411976928710937, 0.7413947143554688, 0.7411568603515625, 0.741291748046875, 0.7409461669921875, 0.7417200927734375, 0.741274658203125, 0.7410894775390625, 0.7413556518554687, 0.7417534790039062, 0.74169140625, 0.7408414916992188, 0.7413616943359375, 0.74169873046875, 0.7417782592773438, 0.7409581909179688, 0.7410831298828126, 0.7413800659179688, 0.741080322265625, 0.741001953125, 0.7416782836914062, 0.7413665771484375, 0.74156640625, 0.7408585815429688, 0.7410933837890625, 0.7409397583007813, 0.740996826171875, 0.7409339599609375, 0.7410524291992188, 0.7409766235351563, 0.7410626831054687, 0.7407861938476562, 0.741518310546875, 0.7409890747070312, 0.7406717529296875, 0.741853759765625, 0.7413754272460937, 0.7404221801757812, 0.7417546997070312, 0.7414581298828125, 0.7407673950195313, 0.7408295288085938, 0.7406038818359375, 0.7417908935546875, 0.7411633911132812, 0.7414458618164063, 0.7409154052734375, 0.7411790771484374, 0.7408560791015625, 0.741474365234375, 0.740706298828125, 0.741369873046875, 0.7410830688476563, 0.74087841796875, 0.7412449340820313, 0.7413771362304687, 0.7411264038085937, 0.7408414916992188, 0.7414598999023437, 0.741509765625, 0.7406919555664062, 0.7413411865234375, 0.7411691284179688, 0.7416173706054687, 0.7413078002929687, 0.741866455078125, 0.7411542358398437, 0.7416036987304687, 0.7412963256835937, 0.740874267578125, 0.741128173828125, 0.7413411865234375, 0.74148046875, 0.7407715454101562, 0.7412323608398438, 0.741462646484375, 0.7415228881835938, 0.7410775756835938, 0.7413575439453125, 0.7420906372070313, 0.7412470703125, 0.7411732177734375, 0.7411302490234375, 0.7413616943359375, 0.7414722290039063, 0.7408867797851563, 0.7420254516601562, 0.7411176147460937, 0.7410732421875, 0.7404628295898438, 0.7416627197265625, 0.741654541015625, 0.741158935546875, 0.7411773681640625, 0.7414108276367187, 0.7412153930664063, 0.7408607788085938, 0.7411317749023437, 0.741876220703125, 0.7417835693359375, 0.7407921142578126, 0.7413551025390624, 0.74128857421875, 0.741025390625, 0.74086962890625, 0.7408670043945312, 0.7433052368164063, 0.741336181640625, 0.741034912109375, 0.74132275390625, 0.7412633666992188, 0.74140673828125, 0.7411273803710937, 0.7411863403320312, 0.7417216796875, 0.741171630859375, 0.74113818359375, 0.74107275390625, 0.7415269775390625, 0.7411803588867187, 0.7416565551757812, 0.7418121948242188, 0.7415275268554687, 0.7414517822265625, 0.7410155639648438, 0.7417405395507812, 0.7411896362304687, 0.7407821044921875, 0.7414673461914062, 0.7416921997070313, 0.7413944091796875, 0.7407239379882813, 0.7417884521484375, 0.7412769165039063, 0.7408805541992187, 0.7412127685546875, 0.7414353637695312, 0.7416436767578125, 0.7408827514648437, 0.741300537109375, 0.7416688842773438, 0.74085302734375, 0.7411145629882813, 0.741289306640625, 0.7416286010742188, 0.7413043212890625, 0.7413575439453125, 0.7412879028320313, 0.740908203125, 0.7408115844726563, 0.7414262084960938, 0.7415858764648438, 0.740864013671875, 0.7411909790039063, 0.7413412475585938, 0.7407028198242187, 0.7415029907226562, 0.7410174560546875, 0.7412430419921875, 0.7408756713867187, 0.7415670776367187, 0.7412177124023438, 0.7410222778320312, 0.7408429565429687, 0.7411594848632812, 0.7414948120117187, 0.740800537109375, 0.7413162231445313, 0.7411687622070312, 0.7415235595703125, 0.7405779418945313, 0.7415884399414062, 0.7405798950195313, 0.7411806030273438, 0.7407767944335938, 0.7414476928710938, 0.7411036376953125, 0.7408927001953125, 0.7414456176757812, 0.741211181640625, 0.740918212890625, 0.7409500122070313, 0.7412920532226562, 0.741231689453125, 0.7421244506835938, 0.7411846313476562, 0.7412574462890625, 0.7411528930664063, 0.7410980224609375, 0.740861328125, 0.7416243896484375, 0.7414328002929688, 0.7416283569335937, 0.741484619140625, 0.74087158203125, 0.7415220336914062, 0.7411978149414062, 0.7420714721679688, 0.7412723999023437, 0.7412981567382813, 0.7415902099609375, 0.7411629638671875, 0.7409154663085937, 0.7412301635742188, 0.7416303100585937, 0.7414974975585937, 0.7410515747070312, 0.7414230346679688, 0.7410341796875, 0.741006103515625, 0.741768310546875, 0.7406672973632813, 0.7406749877929687, 0.7412117919921875, 0.7410226440429688, 0.7402691650390625, 0.7408823852539063, 0.74189306640625, 0.7408681030273437, 0.74082470703125, 0.7407844848632813, 0.741823974609375, 0.7409332885742187, 0.7408006591796875, 0.7410634155273438, 0.7409213256835937, 0.7413677978515625, 0.7405787963867188, 0.7414502563476563, 0.741238525390625, 0.7408397216796875, 0.7412633666992188, 0.7413800659179688, 0.741254150390625, 0.74080908203125, 0.74107275390625, 0.7413040771484375, 0.7409694213867187, 0.7408125610351562, 0.74138037109375, 0.7410497436523438, 0.741210693359375, 0.7412342529296875, 0.741245361328125, 0.741232666015625, 0.7413637084960938, 0.7414763793945313, 0.741001220703125, 0.7413013916015625, 0.7409385986328125, 0.7413507690429687, 0.74101123046875, 0.7411248779296875, 0.7411139526367188, 0.7416995849609375, 0.7412953491210937, 0.7412572631835938, 0.74071728515625, 0.7409677734375, 0.7418569946289062, 0.7414486694335938, 0.7406571655273437, 0.7415848999023438, 0.7412244262695312, 0.741185546875, 0.7410769653320313, 0.7414312744140625, 0.7413309326171875, 0.7411056518554687, 0.7408967895507812, 0.7413881225585938, 0.7412020874023437, 0.7409971313476562, 0.740896240234375]",tokens/s,1.349229513279663,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,2064.257024,2350.77632,0.0,1948.254208,1873.42336,s,1,9.25310546875,9.25310546875,0.0,9.25310546875,9.25310546875,9.25310546875,9.25310546875,[9.25310546875],,kWh,5.775403783332725e-05,6.3630336483403565e-06,1.7529180690006863e-05,8.164625217167446e-05,,MB,2130.157568,2451.439616,0.0,2034.23744,2023.140352,s,10,0.836176383972168,0.08361763839721681,0.00021672177935057457,0.08367342376708985,0.08389248352050781,0.08390043487548829,0.08390679595947266,"[0.08374873352050781, 0.08389071655273438, 0.08335977935791015, 0.08365408325195313, 0.08390838623046876, 0.08369596862792969, 0.08322249603271484, 0.08337766265869141, 0.08369276428222656, 0.08362579345703125]",tokens/s,3061.5550128777722,kWh,2.489160381038207e-06,2.7451202169236834e-07,1.6540926603728455e-06,4.417765063103421e-06,tokens/kWh,57947852.89468594,MB,2137.677824,2451.439616,0.0,2034.23744,2023.142912,s,10,16.631750854492186,1.6631750854492189,0.015191664361568321,1.6658027343750001,1.6784011474609375,1.6837164428710938,1.6879686791992188,"[1.6644376220703125, 1.6566011962890625, 1.671968505859375, 1.6671678466796875, 1.6347437744140625, 1.677219970703125, 1.6432252197265624, 1.68903173828125, 1.6706461181640626, 1.6567088623046875]",tokens/s,37.879355307312025,kWh,4.734042490813028e-05,5.221342144417219e-06,2.449373381342764e-05,7.705550086597512e-05,tokens/kWh,817592.5052979377,,s,630,16.629371109008787,0.026395827157156805,0.000501515563707492,0.02641535949707031,0.026859490776062013,0.02702726402282715,0.028147099323272712,"[0.02702128028869629, 0.026560640335083006, 0.026662815093994142, 0.02679155158996582, 0.026710336685180663, 0.026652736663818358, 0.026639392852783203, 0.027032159805297853, 0.026737024307250976, 0.02692915153503418, 0.02665679931640625, 0.026859487533569336, 0.026899967193603515, 0.026755392074584963, 0.026626016616821287, 0.026417375564575196, 0.026672576904296873, 0.026462783813476564, 0.026402816772460938, 0.02625062370300293, 0.026313343048095704, 0.02746134376525879, 0.026326528549194338, 0.026092256546020508, 0.025923648834228517, 0.025734848022460937, 0.0267609920501709, 0.025940511703491213, 0.027178911209106444, 0.026062944412231445, 0.026066816329956055, 0.025828800201416015, 0.025846527099609374, 0.02569766426086426, 0.025736799240112306, 0.0257873592376709, 0.02601308822631836, 0.025815423965454103, 0.025870367050170897, 0.025839807510375977, 0.025907199859619142, 0.025790464401245116, 0.026060415267944337, 0.025819807052612304, 0.025943935394287108, 0.02631817626953125, 0.026415615081787108, 0.026289407730102538, 0.026483455657958985, 0.026236480712890625, 0.026700223922729492, 0.026689535140991212, 0.026830848693847657, 0.026569919586181642, 0.02667830467224121, 0.02654595184326172, 0.026666879653930664, 0.026634368896484375, 0.026697471618652345, 0.026619808197021484, 0.026734943389892577, 0.026671104431152344, 0.026835168838500977, 0.027189504623413085, 0.02668137550354004, 0.026657983779907225, 0.02648758316040039, 0.026556415557861326, 0.026572799682617186, 0.02674073600769043, 0.02660147285461426, 0.02646131134033203, 0.02656528091430664, 0.026468576431274413, 0.026466304779052735, 0.026562559127807618, 0.026537984848022462, 0.026564735412597656, 0.026445695877075195, 0.02666694450378418, 0.026646656036376955, 0.026761152267456054, 0.026419200897216798, 0.026582656860351564, 0.02662540817260742, 0.026636575698852537, 0.026454463958740234, 0.026466144561767577, 0.02626323127746582, 0.02626799964904785, 0.025948192596435545, 0.025830047607421875, 0.02565884780883789, 0.025733375549316408, 0.025986400604248047, 0.025808767318725587, 0.025766687393188478, 0.025792192459106446, 0.0257457275390625, 0.025798303604125977, 0.025680479049682618, 0.025708288192749024, 0.02568806457519531, 0.025985023498535157, 0.027039295196533204, 0.029323583602905275, 0.026054367065429688, 0.025940383911132812, 0.02595840072631836, 0.025894304275512696, 0.025823839187622072, 0.025857568740844727, 0.026120895385742186, 0.02612816047668457, 0.025894912719726562, 0.02597068786621094, 0.02595840072631836, 0.026211904525756835, 0.02624870491027832, 0.026284896850585937, 0.026013216018676757, 0.026053184509277343, 0.02617136001586914, 0.026434976577758788, 0.026206560134887695, 0.02630393600463867, 0.02695782470703125, 0.026415103912353514, 0.02636185646057129, 0.026927295684814452, 0.027623231887817384, 0.026721920013427734, 0.0265948486328125, 0.02651571273803711, 0.02642531204223633, 0.026386367797851563, 0.026820959091186522, 0.026560319900512695, 0.026661056518554688, 0.026720832824707032, 0.02678281593322754, 0.026757375717163086, 0.026855871200561522, 0.026749183654785156, 0.026746816635131836, 0.026820640563964843, 0.026798912048339844, 0.02677654457092285, 0.026744096755981446, 0.026482688903808595, 0.026823328018188475, 0.0267174072265625, 0.026824607849121093, 0.027185152053833008, 0.026923967361450196, 0.026875423431396483, 0.026853696823120117, 0.026674495697021485, 0.026876768112182616, 0.02677555274963379, 0.026859519958496093, 0.026818527221679687, 0.026871999740600585, 0.02674799919128418, 0.02659312057495117, 0.026534719467163084, 0.0266810245513916, 0.02640118408203125, 0.026426912307739258, 0.02645449638366699, 0.026352863311767578, 0.02649372863769531, 0.026292287826538085, 0.025980512619018556, 0.025976703643798827, 0.02585036849975586, 0.025865440368652345, 0.02579055976867676, 0.02593040084838867, 0.028993535995483398, 0.025941375732421876, 0.025696224212646484, 0.025746080398559572, 0.02551398468017578, 0.025709951400756836, 0.025559200286865234, 0.025987552642822265, 0.025847808837890625, 0.02607708740234375, 0.026763263702392577, 0.026379295349121094, 0.025922176361083984, 0.025729375839233397, 0.02586134338378906, 0.025836320877075197, 0.0259965763092041, 0.02613897514343262, 0.026374528884887696, 0.02605392074584961, 0.02595846366882324, 0.025874303817749023, 0.026075008392333985, 0.027962272644042968, 0.02602556800842285, 0.026109567642211916, 0.025839872360229492, 0.02589695930480957, 0.02596713638305664, 0.02633932876586914, 0.026218463897705078, 0.02634979248046875, 0.02656643295288086, 0.026208288192749025, 0.026238975524902345, 0.026001632690429686, 0.026074911117553713, 0.0260446720123291, 0.02614860725402832, 0.026042272567749023, 0.02657084846496582, 0.026466527938842774, 0.026384159088134764, 0.026290176391601562, 0.02649087905883789, 0.026304351806640626, 0.026501279830932617, 0.026451904296875, 0.026709280014038085, 0.026608415603637695, 0.0267775993347168, 0.026611711502075194, 0.02667024040222168, 0.026607776641845705, 0.02675337600708008, 0.026662368774414063, 0.026780479431152342, 0.026711231231689454, 0.026773920059204103, 0.02671664047241211, 0.02696131134033203, 0.02680847930908203, 0.026786239624023437, 0.026884096145629883, 0.026785472869873046, 0.02664054489135742, 0.02678390312194824, 0.02671971130371094, 0.02682681655883789, 0.026612192153930663, 0.0267509765625, 0.026736160278320313, 0.028766687393188477, 0.027199296951293944, 0.026569375991821288, 0.026329120635986327, 0.026175775527954102, 0.026200063705444337, 0.0261079044342041, 0.026017791748046876, 0.02639411163330078, 0.025948352813720703, 0.02574527931213379, 0.025670080184936522, 0.025639007568359375, 0.02572073554992676, 0.02560736083984375, 0.025736000061035155, 0.025705823898315428, 0.025834016799926758, 0.025806175231933594, 0.02590780830383301, 0.025852096557617187, 0.025767936706542968, 0.025804800033569338, 0.025753599166870117, 0.025618303298950196, 0.025821311950683594, 0.026060384750366213, 0.02576630401611328, 0.025833471298217774, 0.02595840072631836, 0.025784320831298828, 0.025773439407348633, 0.025718624114990235, 0.025845983505249023, 0.025641504287719726, 0.025775808334350586, 0.02576345634460449, 0.026755615234375, 0.02569001579284668, 0.02578652763366699, 0.02602406311035156, 0.02649247932434082, 0.0259234561920166, 0.02580873680114746, 0.025618656158447266, 0.025770496368408204, 0.025544992446899416, 0.025757408142089842, 0.025655296325683592, 0.025841663360595703, 0.025985023498535157, 0.02597212791442871, 0.025782304763793944, 0.025788095474243163, 0.02577903938293457, 0.026015840530395507, 0.026259231567382812, 0.026231103897094727, 0.02622435188293457, 0.025985151290893554, 0.02609929656982422, 0.026375808715820313, 0.026020639419555663, 0.02646124839782715, 0.02654585647583008, 0.0263984317779541, 0.026308639526367188, 0.026176063537597657, 0.026250688552856446, 0.02610643196105957, 0.02641299247741699, 0.02633734321594238, 0.02650275230407715, 0.0264682559967041, 0.02664214324951172, 0.02673129653930664, 0.026884096145629883, 0.02670796775817871, 0.02675916862487793, 0.026658815383911134, 0.026961919784545898, 0.026704160690307618, 0.02736083221435547, 0.026788000106811524, 0.026857471466064452, 0.027456703186035155, 0.028009279251098633, 0.02695577621459961, 0.026704896926879884, 0.0267827205657959, 0.026978464126586915, 0.02678175926208496, 0.026776512145996093, 0.026647392272949218, 0.02671001625061035, 0.026474271774291992, 0.02657254409790039, 0.026139104843139648, 0.02623849678039551, 0.026350048065185545, 0.02636956787109375, 0.02662985610961914, 0.02686617660522461, 0.026601728439331056, 0.02675027275085449, 0.026732959747314454, 0.026704160690307618, 0.026699935913085938, 0.026772415161132813, 0.02650409507751465, 0.026709184646606446, 0.026686271667480468, 0.026894336700439454, 0.027478015899658204, 0.026656768798828126, 0.026613759994506835, 0.026691104888916014, 0.026488576889038086, 0.026370784759521485, 0.026307680130004882, 0.02630963134765625, 0.02611542320251465, 0.026202688217163084, 0.025993215560913087, 0.026152191162109376, 0.026124959945678712, 0.02643120002746582, 0.026364479064941406, 0.025923583984375, 0.02592767906188965, 0.02601087951660156, 0.02577689552307129, 0.025733375549316408, 0.026042335510253905, 0.025890111923217773, 0.025754079818725586, 0.02574460792541504, 0.025891231536865233, 0.025778144836425782, 0.02588688087463379, 0.025799200057983397, 0.025863487243652342, 0.025761920928955077, 0.025823423385620117, 0.02577622413635254, 0.025957536697387696, 0.02572972869873047, 0.02583328056335449, 0.025796960830688477, 0.025821184158325194, 0.02569011116027832, 0.02590924835205078, 0.02775654411315918, 0.02650726318359375, 0.02598297691345215, 0.02632294464111328, 0.02574336051940918, 0.025838623046875, 0.025881120681762695, 0.026015615463256835, 0.02622265625, 0.0265645751953125, 0.026259296417236327, 0.026184383392333983, 0.02596659278869629, 0.02595430374145508, 0.026023935317993165, 0.02628976058959961, 0.02620627212524414, 0.02609190368652344, 0.025900320053100587, 0.026145471572875976, 0.02631484794616699, 0.02624687957763672, 0.026134464263916017, 0.026173696517944336, 0.026459327697753908, 0.02647327995300293, 0.026070400238037108, 0.02621836853027344, 0.02620195198059082, 0.026157983779907225, 0.026187360763549803, 0.026950048446655273, 0.025939327239990234, 0.026169343948364256, 0.025893503189086915, 0.02603753662109375, 0.0263600959777832, 0.026651071548461913, 0.028622079849243164, 0.026647520065307618, 0.026874176025390627, 0.026784896850585938, 0.02692527961730957, 0.026825056076049805, 0.02674278450012207, 0.026816511154174806, 0.026750463485717774, 0.026935840606689455, 0.026747903823852538, 0.026661344528198242, 0.02673843193054199, 0.026704896926879884, 0.02677324867248535, 0.026720479965209962, 0.026771360397338868, 0.028391136169433593, 0.029312383651733397, 0.026923391342163087, 0.02689023971557617, 0.02719968032836914, 0.02691904067993164, 0.027814239501953126, 0.027481311798095702, 0.026848031997680665, 0.02684492874145508, 0.026695680618286134, 0.026725791931152345, 0.026740447998046875, 0.026791807174682616, 0.02678495979309082, 0.026732351303100584, 0.026703872680664063, 0.026715328216552734, 0.026447935104370116, 0.02674508857727051, 0.02701545524597168, 0.02660915184020996, 0.026548959732055663, 0.026564607620239256, 0.02634547233581543, 0.026490720748901367, 0.026285663604736328, 0.026227264404296874, 0.025863679885864257, 0.028203392028808595, 0.027274879455566406, 0.026156639099121092, 0.0259749755859375, 0.026147104263305663, 0.02671561622619629, 0.02712633514404297, 0.026347936630249022, 0.026736640930175783, 0.026218496322631835, 0.026361343383789062, 0.026359296798706053, 0.026765600204467773, 0.0263702392578125, 0.02661814308166504, 0.026364160537719727, 0.026369312286376952, 0.02738604736328125, 0.026737375259399412, 0.026758176803588867, 0.026701791763305664, 0.02673891258239746, 0.026720640182495117, 0.02686787223815918, 0.026802143096923827, 0.027010271072387695, 0.02729385566711426, 0.027193599700927735, 0.026954143524169923, 0.026798080444335938, 0.026497024536132813, 0.026632192611694337, 0.026652671813964843, 0.026656351089477538, 0.026601375579833983, 0.02660403251647949, 0.026629695892333983, 0.026636735916137695, 0.026816287994384767, 0.02653116798400879, 0.0262685432434082, 0.026219903945922853, 0.026092159271240235, 0.02607513618469238, 0.026062944412231445, 0.02609552001953125, 0.026475776672363283, 0.026872575759887694, 0.02660147285461426, 0.026345504760742187, 0.02618329620361328, 0.02619340705871582, 0.026041120529174806, 0.026037311553955077, 0.026014720916748047, 0.026304672241210938, 0.026101600646972655, 0.026097728729248048, 0.026412544250488282, 0.02648726463317871, 0.026308576583862306, 0.026062847137451172, 0.026021728515625, 0.026633567810058593, 0.026099807739257814, 0.026499807357788088, 0.02627724838256836, 0.0267836799621582, 0.026538463592529298, 0.026480863571166992, 0.026392000198364258, 0.026593856811523438, 0.026617855072021485, 0.026826751708984374, 0.02648031997680664, 0.026545536041259764, 0.026519903182983397, 0.026608224868774413, 0.026368223190307617, 0.02651910400390625, 0.02723075294494629, 0.026702016830444337, 0.02648819160461426, 0.02648739242553711, 0.026427423477172852, 0.02650476837158203, 0.02660393524169922, 0.02659324836730957, 0.026468223571777343, 0.026364288330078124, 0.026342239379882813, 0.027714527130126954, 0.026417152404785156, 0.0265850887298584, 0.026476543426513673, 0.02640480041503906, 0.02646339225769043, 0.026395551681518553, 0.02651747131347656, 0.026546207427978516, 0.026783103942871093, 0.026601343154907225, 0.026695840835571288, 0.02636582374572754, 0.02650534439086914, 0.02641916847229004, 0.02654867172241211, 0.026476736068725585, 0.026615808486938477, 0.02647555160522461, 0.02656480026245117, 0.02679654312133789, 0.02662224006652832, 0.02637811279296875, 0.027129024505615235, 0.026592191696166993, 0.027008064270019533, 0.026114559173583983, 0.026034624099731445, 0.025990367889404297, 0.02592848014831543, 0.025827583312988282, 0.02581679916381836, 0.025991199493408203, 0.02576598358154297, 0.02588252830505371, 0.025741247177124022, 0.025655359268188477, 0.025695327758789063, 0.02592860794067383, 0.025817087173461914, 0.02555084800720215, 0.025699647903442382, 0.025864896774291993, 0.025808895111083984, 0.02595430374145508, 0.02599839973449707, 0.02567171287536621, 0.02578755187988281, 0.025681663513183593, 0.026094655990600586, 0.026168256759643554, 0.02572083282470703]",tokens/s,37.88477603092904,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,7184.30208,7954.366464,0.0,7551.844352,7485.12768,s,1,13.0931357421875,13.0931357421875,0.0,13.0931357421875,13.0931357421875,13.0931357421875,13.0931357421875,[13.0931357421875],,kWh,0.00016620702140002474,1.832654920580597e-05,5.337615381199867e-05,0.0002379097244178294,,MB,2843.389952,8254.2592,0.0,7837.057024,7735.356416,s,10,3.59822216796875,0.359822216796875,0.0006188524955450805,0.35956118774414064,0.36061900634765626,0.36101775512695317,0.3613367541503906,"[0.35951092529296874, 0.35944155883789064, 0.359504150390625, 0.3596110534667969, 0.359662353515625, 0.35972952270507813, 0.3595113220214844, 0.35930438232421874, 0.3605303955078125, 0.36141650390625]",tokens/s,711.4624613202132,kWh,1.0535466778124677e-05,1.161870909687266e-06,7.020700060999688e-06,1.871803774881163e-05,tokens/kWh,13676647.276568983,MB,2852.888576,8275.23072,0.0,7858.028544,7759.281152,s,10,26.330813964843752,2.633081396484375,0.004968638224754051,2.6331734619140628,2.6393992919921874,2.6398162231445315,2.6401497680664066,"[2.626032958984375, 2.627359130859375, 2.62848974609375, 2.629377197265625, 2.631251708984375, 2.639306640625, 2.635389404296875, 2.63509521484375, 2.63827880859375, 2.640233154296875]",tokens/s,23.92633971897566,kWh,7.71681779802096e-05,8.511764286441459e-06,5.114451313779951e-05,0.00013682445540445058,tokens/kWh,460444.00333093345,,s,630,26.32782842636108,0.0417902038513668,0.0004091423633997235,0.04179265594482422,0.04227153396606445,0.042416332817077636,0.042722499122619634,"[0.04223244857788086, 0.0417729606628418, 0.041580638885498046, 0.04160076904296875, 0.04177695846557617, 0.04157833480834961, 0.04141116714477539, 0.041506816864013675, 0.04139212799072266, 0.04141241455078125, 0.04136569595336914, 0.04118710327148437, 0.04104377746582031, 0.04124611282348633, 0.04145174407958984, 0.041057056427001956, 0.041242401123046876, 0.04109539031982422, 0.04101520156860351, 0.04153318405151367, 0.041793888092041015, 0.04181196975708008, 0.0416890869140625, 0.04156620788574219, 0.041613311767578126, 0.04169481658935547, 0.041682975769042965, 0.04184921646118164, 0.041487998962402343, 0.04179731369018555, 0.041851009368896484, 0.04159955215454102, 0.04156611251831055, 0.041351264953613284, 0.0413941764831543, 0.04170512008666992, 0.0420781135559082, 0.041966014862060544, 0.04179558563232422, 0.04169728088378906, 0.04171366500854492, 0.041794719696044924, 0.04161417770385742, 0.042090015411376955, 0.04193059158325195, 0.04164988708496094, 0.041382816314697264, 0.04193452835083008, 0.041277759552001955, 0.04150271987915039, 0.04194918441772461, 0.04235235214233399, 0.04238297653198242, 0.04219766235351562, 0.042393024444580076, 0.04210892868041992, 0.0419944953918457, 0.0418306884765625, 0.04182223892211914, 0.041869152069091795, 0.04183603286743164, 0.04174300765991211, 0.04190412902832031, 0.041686206817626956, 0.04079289627075195, 0.041011199951171876, 0.0409859504699707, 0.041146305084228514, 0.04164886474609375, 0.041813343048095704, 0.04156892776489258, 0.041633377075195314, 0.041664928436279294, 0.04186111831665039, 0.041654239654541014, 0.041528545379638675, 0.04170166397094727, 0.04148892974853516, 0.0412542724609375, 0.041495166778564456, 0.041336830139160154, 0.04135321426391601, 0.041303169250488284, 0.04102143859863281, 0.04119184112548828, 0.04120419311523438, 0.041180416107177736, 0.04129254531860352, 0.04163993453979492, 0.041957374572753905, 0.041780799865722654, 0.04191209411621094, 0.04194780731201172, 0.041923648834228514, 0.04184944152832031, 0.04187580871582031, 0.04190595245361328, 0.04191664123535156, 0.04175872039794922, 0.04175667190551758, 0.04175641632080078, 0.04152550506591797, 0.041485313415527345, 0.04210790252685547, 0.04222566223144531, 0.042024703979492185, 0.041812030792236325, 0.041801727294921875, 0.04184902572631836, 0.04158054351806641, 0.04183859252929688, 0.041657600402832035, 0.04167756652832031, 0.041516193389892576, 0.04152540969848633, 0.04152799987792969, 0.04161231994628906, 0.04201980972290039, 0.04230144119262695, 0.04241408157348633, 0.04243385696411133, 0.042601150512695314, 0.042296607971191405, 0.04224265670776367, 0.04216640090942383, 0.041977344512939455, 0.04212262344360351, 0.04116748809814453, 0.04119551849365234, 0.04106444931030274, 0.04098457717895508, 0.04098252868652344, 0.04109107208251953, 0.04127334213256836, 0.04119065475463867, 0.04135193634033203, 0.041358463287353514, 0.041882465362548825, 0.041981502532958986, 0.04224444961547852, 0.04201894378662109, 0.04166860961914062, 0.04159897613525391, 0.041586688995361325, 0.04140851211547852, 0.04160921478271484, 0.041404415130615234, 0.04149452972412109, 0.041441280364990236, 0.04120556640625, 0.04126873779296875, 0.04132320022583008, 0.04126924896240235, 0.04127651214599609, 0.04153139114379883, 0.041845664978027344, 0.04217795181274414, 0.04187948989868164, 0.041830558776855466, 0.0419466552734375, 0.04178230285644531, 0.04193843078613281, 0.04198035049438477, 0.04195641708374023, 0.0419081916809082, 0.04162614440917969, 0.04162809753417969, 0.0415904655456543, 0.041613632202148435, 0.04188310241699219, 0.042197120666503905, 0.04232406234741211, 0.04204745483398437, 0.04186102294921875, 0.0419263687133789, 0.04199497604370117, 0.0417127685546875, 0.04154457473754883, 0.041588737487792966, 0.041764320373535155, 0.041640094757080075, 0.04168947219848633, 0.041918464660644535, 0.042089759826660154, 0.04250492858886719, 0.042503711700439456, 0.04259235382080078, 0.04238169479370117, 0.042325729370117186, 0.042206016540527344, 0.04139212799072266, 0.04148223876953125, 0.041439231872558595, 0.04137779235839844, 0.041256160736083985, 0.041243038177490234, 0.04115068817138672, 0.041277599334716794, 0.04134297561645508, 0.041286785125732424, 0.04127628707885742, 0.041299934387207034, 0.04164611053466797, 0.04206950378417969, 0.041914878845214845, 0.041918464660644535, 0.0417259521484375, 0.04154982376098633, 0.04139622497558594, 0.04125491333007812, 0.04161536026000977, 0.04145971298217774, 0.041381889343261716, 0.0416167984008789, 0.041476608276367184, 0.0415129280090332, 0.04153484725952149, 0.04118796920776367, 0.04140371322631836, 0.041570335388183596, 0.04162025451660156, 0.04195913696289062, 0.042063423156738285, 0.042140384674072266, 0.04190617752075195, 0.041793537139892575, 0.041971710205078124, 0.041995582580566404, 0.04190892791748047, 0.041859073638916014, 0.041672286987304685, 0.041674816131591796, 0.041709918975830075, 0.04157440185546875, 0.041474048614501956, 0.04185081481933594, 0.042186817169189456, 0.04228860855102539, 0.042365470886230466, 0.042073726654052734, 0.04197824096679687, 0.04181196975708008, 0.04199971389770508, 0.04179177474975586, 0.041816097259521484, 0.041713600158691404, 0.04172636795043945, 0.04190003204345703, 0.04239974212646484, 0.04245020675659179, 0.042689247131347655, 0.042468414306640626, 0.042186046600341795, 0.041777599334716795, 0.041552127838134764, 0.04157235336303711, 0.04155187225341797, 0.04149452972412109, 0.04159296035766601, 0.041516929626464846, 0.04120716857910156, 0.04126784133911133, 0.04127289581298828, 0.04106079864501953, 0.04110335922241211, 0.041619457244873044, 0.04166191864013672, 0.04181983947753906, 0.04183647918701172, 0.041904640197753903, 0.04170383834838867, 0.041514305114746096, 0.04152700805664063, 0.04138252639770508, 0.04154812622070313, 0.04136140823364258, 0.041530784606933595, 0.041536094665527344, 0.04146732711791992, 0.0414029426574707, 0.041332736968994144, 0.04153548812866211, 0.041594879150390625, 0.041662464141845705, 0.04210476684570313, 0.04222777557373047, 0.04210217666625977, 0.041939552307128904, 0.041903488159179686, 0.04199209594726563, 0.041925342559814456, 0.041938880920410156, 0.04189936065673828, 0.041888160705566405, 0.04179759979248047, 0.04149488067626953, 0.04144070434570313, 0.04170809555053711, 0.04195123291015625, 0.042231807708740236, 0.042077312469482424, 0.0421077766418457, 0.04214988708496094, 0.04213497543334961, 0.042076736450195315, 0.041998336791992184, 0.041859073638916014, 0.04185903930664062, 0.04185295867919922, 0.041918464660644535, 0.04189388656616211, 0.04203855895996094, 0.04227964782714844, 0.042461185455322265, 0.042625022888183595, 0.042412513732910155, 0.04181779098510742, 0.041842689514160154, 0.04160953521728516, 0.041594879150390625, 0.041449440002441405, 0.041483966827392575, 0.041774974822998044, 0.04129766464233398, 0.04134355163574219, 0.04119363021850586, 0.04124380874633789, 0.041315006256103515, 0.04149580764770508, 0.04221020889282227, 0.042081825256347655, 0.04187763214111328, 0.04197411346435547, 0.04170694351196289, 0.04157497787475586, 0.04152115249633789, 0.041371105194091796, 0.04130460739135742, 0.04149817657470703, 0.04164243316650391, 0.04184195327758789, 0.041847518920898434, 0.04151033782958984, 0.04152377700805664, 0.041545726776123046, 0.04206339263916015, 0.04206963348388672, 0.04213222503662109, 0.042210529327392575, 0.04200537490844727, 0.045676544189453126, 0.0412119026184082, 0.041801727294921875, 0.04176857757568359, 0.04140233612060547, 0.04146166229248047, 0.04166502380371094, 0.042176513671875, 0.04210217666625977, 0.04187606430053711, 0.04191231918334961, 0.042102783203125, 0.04204719924926758, 0.04205766296386719, 0.04211465454101562, 0.042046207427978516, 0.04174419021606445, 0.041740478515625, 0.04180287933349609, 0.04184492874145508, 0.041787582397460936, 0.042938880920410157, 0.04273152160644531, 0.04256089782714844, 0.04228160095214844, 0.042270721435546874, 0.04231564712524414, 0.042205310821533205, 0.04216844940185547, 0.04145151901245117, 0.04168294525146484, 0.04151091384887695, 0.04124787139892578, 0.041180030822753906, 0.0410335693359375, 0.04105231857299805, 0.04126841735839844, 0.04158687973022461, 0.04188016128540039, 0.04193862533569336, 0.04197001647949219, 0.042188800811767575, 0.04196352005004883, 0.04163900756835937, 0.041503326416015625, 0.04133100891113281, 0.04149187088012695, 0.04138454437255859, 0.04136345672607422, 0.04144537734985351, 0.04138598251342773, 0.041189407348632814, 0.0415846061706543, 0.04145065689086914, 0.04163670349121094, 0.04202867126464844, 0.04203148651123047, 0.04193280029296875, 0.042123264312744144, 0.04204748916625976, 0.041825439453125, 0.04178927993774414, 0.04188387298583984, 0.04199094390869141, 0.041963329315185545, 0.04206406402587891, 0.041902080535888675, 0.041455615997314454, 0.0413675537109375, 0.04165222549438476, 0.04222719955444336, 0.04210265731811524, 0.04207884979248047, 0.04205372619628906, 0.043654239654541016, 0.041900993347167965, 0.042016639709472656, 0.04192665481567383, 0.042011871337890624, 0.041583393096923826, 0.04145971298217774, 0.041524864196777346, 0.04175247955322266, 0.042248672485351565, 0.04255539321899414, 0.04247289657592773, 0.04257759857177734, 0.0425984001159668, 0.04235283279418945, 0.04225904083251953, 0.04215407943725586, 0.04226047897338867, 0.04149248123168945, 0.041390079498291016, 0.04109721755981445, 0.04084121704101563, 0.040971328735351566, 0.0410296630859375, 0.04113091278076172, 0.04122623825073242, 0.04182220840454102, 0.042237438201904294, 0.04228278350830078, 0.042187488555908204, 0.04202809524536133, 0.041718719482421875, 0.0414700813293457, 0.041636894226074216, 0.041581214904785155, 0.04136982345581055, 0.04133679962158203, 0.042700801849365234, 0.04126310348510742, 0.04144947052001953, 0.04136345672607422, 0.04159065628051758, 0.042014846801757814, 0.042039295196533204, 0.041859073638916014, 0.04171356964111328, 0.042330207824707033, 0.042102783203125, 0.041734142303466795, 0.041651775360107425, 0.04177305603027344, 0.04190604782104492, 0.041835071563720704, 0.041737632751464845, 0.041686912536621094, 0.041965694427490235, 0.042105438232421875, 0.04217036819458008, 0.04249599838256836, 0.042030303955078126, 0.04205196762084961, 0.04200694274902344, 0.04190412902832031, 0.04186675262451172, 0.04162713623046875, 0.04163792037963867, 0.04163267135620117, 0.04155398559570313, 0.04174848175048828, 0.041735359191894535, 0.04228179168701172, 0.04230694580078125, 0.04230601501464844, 0.04234870529174805, 0.042418174743652344, 0.04224409484863281, 0.042286144256591794, 0.04216099166870117, 0.042100833892822265, 0.04194508743286133, 0.04196147155761719, 0.04108697509765625, 0.04118307113647461, 0.04115635299682617, 0.041353633880615234, 0.041750526428222655, 0.04174339294433594, 0.04178633499145508, 0.04182220840454102, 0.041885311126708985, 0.04188166427612305, 0.041906558990478515, 0.04194815826416016, 0.0420994873046875, 0.04168515014648438, 0.04151910400390625, 0.041280513763427736, 0.041366527557373044, 0.04135430526733398, 0.04130502319335937, 0.04132649612426758, 0.041990238189697264, 0.04181196975708008, 0.04170918273925781, 0.04196905517578125, 0.041837440490722654, 0.04185916900634765, 0.041842079162597655, 0.04192931365966797, 0.04191641616821289, 0.04192659378051758, 0.04175468826293945, 0.04175807952880859, 0.04185356903076172, 0.0416993293762207, 0.041606143951416014, 0.0418458251953125, 0.04268230438232422, 0.04219084930419922, 0.042188800811767575, 0.042039295196533204, 0.042040382385253906, 0.042427326202392576, 0.041867263793945314, 0.041858592987060544, 0.04179558563232422, 0.04167625427246094, 0.041672767639160155, 0.041661376953125, 0.04191999816894531, 0.04192716979980469, 0.04230758285522461, 0.04248953628540039, 0.04238943862915039, 0.0422158088684082, 0.04223385620117188, 0.04225827026367188, 0.04222991943359375, 0.042162174224853514, 0.0421396484375, 0.04198121643066406, 0.042310367584228514, 0.04256764984130859, 0.04218931198120117, 0.04143718338012695, 0.04183244705200195, 0.04184473419189453, 0.04166656112670898, 0.04151500701904297, 0.041629695892333986, 0.041619457244873044, 0.04173619079589844, 0.04179945755004883, 0.0417196159362793, 0.04173865509033203, 0.04158230209350586, 0.04155350494384766, 0.04166521453857422, 0.04183859252929688, 0.04190719985961914, 0.04169728088378906, 0.04167987060546875, 0.04191641616821289, 0.04195043182373047, 0.041795902252197266, 0.041561759948730466, 0.04135404968261719, 0.04153276824951172, 0.04151145553588867, 0.04145779037475586, 0.04164799880981445, 0.04148851013183594, 0.041514015197753905, 0.04150985717773437, 0.0416993293762207, 0.042060928344726564, 0.04236710357666015, 0.04247571182250977, 0.04227884674072266, 0.0421525764465332, 0.04189558410644531, 0.041812320709228516, 0.041717758178710936, 0.04181305694580078, 0.0419480972290039, 0.04168294525146484, 0.04158041763305664, 0.041447551727294925, 0.04170908737182617, 0.042264671325683595, 0.04232172775268555, 0.04210704040527344, 0.042234272003173826, 0.04222566223144531, 0.04240793609619141, 0.042231807708740236, 0.04220006561279297, 0.041964511871337894, 0.04187139129638672, 0.041919742584228516, 0.041949951171875, 0.042526016235351564, 0.04299174499511719, 0.042883647918701175, 0.04273136138916016, 0.04252396774291992]",tokens/s,23.929052932037653,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2050.760704,2155.741184,0.0,1753.219072,1633.407488,s,1,8.9447646484375,8.9447646484375,0.0,8.9447646484375,8.9447646484375,8.9447646484375,8.9447646484375,[8.9447646484375],,kWh,5.217082972501051e-05,5.747039846359296e-06,1.5699457003998107e-05,7.361732657536791e-05,,MB,1642.692608,2241.724416,0.0,1824.52224,1762.836992,s,10,0.820909408569336,0.0820909408569336,0.0003810316196820429,0.08209516906738282,0.08251952972412109,0.08253438873291015,0.0825462759399414,"[0.08139862060546875, 0.08184774780273438, 0.082453857421875, 0.08224979400634766, 0.08173474884033204, 0.08251622772216796, 0.08194054412841797, 0.08254924774169922, 0.08243746948242188, 0.08178115081787109]",tokens/s,3118.4927024548488,kWh,2.431554430034579e-06,2.681564053762577e-07,1.6127975865333025e-06,4.312508421944139e-06,tokens/kWh,59362202.911267966,MB,1653.854208,2430.468096,0.0,2013.26592,1853.210112,s,10,12.950849243164063,1.2950849243164064,0.009816788785648081,1.2957139892578124,1.3078525024414063,1.3093563659667968,1.3105594567871093,"[1.301109375, 1.295614990234375, 1.29581298828125, 1.297327880859375, 1.3108602294921874, 1.307518310546875, 1.292753662109375, 1.290372802734375, 1.2766165771484375, 1.2828624267578126]",tokens/s,48.645458546476185,kWh,3.8523322572047385e-05,4.248749020660395e-06,2.11083872570667e-05,6.388045884977449e-05,tokens/kWh,986217.0863261168,,s,630,12.947786214828492,0.020552041610838873,0.00031355362301329113,0.02051806354522705,0.02090762538909912,0.021052077293395997,0.021661486034393322,"[0.02103696060180664, 0.020948352813720702, 0.020642335891723634, 0.020533599853515626, 0.02046976089477539, 0.02062233543395996, 0.020814847946166993, 0.02072979164123535, 0.02053436851501465, 0.020465791702270506, 0.02118947219848633, 0.020557695388793946, 0.0203633918762207, 0.02027622413635254, 0.020359743118286134, 0.02042025566101074, 0.02031660842895508, 0.020236000061035157, 0.020390527725219727, 0.0204400634765625, 0.02059516716003418, 0.02109494400024414, 0.0204083194732666, 0.020440351486206054, 0.02040070343017578, 0.020439199447631836, 0.020832256317138673, 0.020600032806396485, 0.0205893440246582, 0.020483936309814453, 0.020471839904785155, 0.0204453125, 0.020314111709594726, 0.020407871246337892, 0.020797664642333985, 0.020383968353271484, 0.020346847534179688, 0.020907840728759765, 0.02085500717163086, 0.020536832809448242, 0.020666879653930666, 0.020486143112182616, 0.020516063690185545, 0.020800479888916014, 0.022023551940917967, 0.021747360229492186, 0.020801887512207032, 0.020674400329589844, 0.020625568389892577, 0.02062758445739746, 0.020846912384033203, 0.020520959854125977, 0.020479999542236327, 0.020287488937377928, 0.02047920036315918, 0.020568864822387695, 0.020580352783203124, 0.02064793586730957, 0.02064384078979492, 0.020805631637573242, 0.021045248031616212, 0.021174272537231444, 0.021020671844482423, 0.02102217674255371, 0.020866016387939453, 0.020649663925170897, 0.020746559143066407, 0.02056387138366699, 0.0204815673828125, 0.020542015075683595, 0.020645151138305663, 0.020678592681884767, 0.020711679458618164, 0.02053945541381836, 0.020306400299072266, 0.020330495834350586, 0.02033452796936035, 0.020326784133911133, 0.020378944396972656, 0.020471744537353516, 0.02050924873352051, 0.020428735733032225, 0.02045120048522949, 0.020430368423461916, 0.020420831680297853, 0.02056438446044922, 0.020372447967529298, 0.020450239181518556, 0.020496383666992187, 0.02063974380493164, 0.02065180778503418, 0.02045180892944336, 0.020286304473876953, 0.020171072006225584, 0.020208223342895508, 0.02043289566040039, 0.020641376495361328, 0.020662687301635743, 0.020505664825439453, 0.021004831314086914, 0.020582656860351562, 0.0205579833984375, 0.0207096004486084, 0.020571935653686525, 0.02061516761779785, 0.020584447860717774, 0.02042265510559082, 0.02049843215942383, 0.020624799728393553, 0.020771135330200197, 0.02057040023803711, 0.02064588737487793, 0.020699136734008788, 0.020494400024414064, 0.020443424224853516, 0.020505695343017577, 0.02049648094177246, 0.020494815826416015, 0.020584447860717774, 0.020686847686767578, 0.02065113639831543, 0.020618112564086914, 0.020649984359741212, 0.020727039337158203, 0.020976383209228514, 0.020757728576660157, 0.021174720764160156, 0.021245344161987305, 0.02102128028869629, 0.021217279434204102, 0.02099404716491699, 0.021436256408691408, 0.02094095993041992, 0.020658464431762696, 0.020688608169555665, 0.020998144149780275, 0.02076233673095703, 0.020577951431274413, 0.020606815338134767, 0.020578399658203125, 0.020661056518554686, 0.020576000213623047, 0.020693119049072266, 0.02081177520751953, 0.020600223541259767, 0.020826719284057618, 0.02045257568359375, 0.02051750373840332, 0.02051862335205078, 0.02052681541442871, 0.02052403259277344, 0.020508384704589842, 0.020373504638671876, 0.020428768157958986, 0.020430240631103515, 0.02034668731689453, 0.020490848541259765, 0.02047407913208008, 0.02030499267578125, 0.02048092842102051, 0.02047113609313965, 0.021745887756347657, 0.020410816192626954, 0.02047385597229004, 0.020417856216430663, 0.02039910316467285, 0.020382528305053712, 0.02026995277404785, 0.02024448013305664, 0.02033772850036621, 0.020260128021240234, 0.020290239334106445, 0.020507200241088867, 0.020244735717773438, 0.020322463989257813, 0.020907615661621092, 0.02028303909301758, 0.020282432556152342, 0.020313312530517578, 0.020471584320068358, 0.020486848831176758, 0.020281663894653322, 0.020104896545410155, 0.020215007781982423, 0.020359968185424803, 0.020227104187011718, 0.02052377510070801, 0.02044745635986328, 0.020397279739379885, 0.020836095809936523, 0.020742464065551757, 0.020508672714233397, 0.020746240615844725, 0.020298816680908202, 0.02020652770996094, 0.02024380874633789, 0.020136192321777345, 0.020191648483276366, 0.02039094352722168, 0.020277919769287108, 0.02020310401916504, 0.020261247634887694, 0.020247936248779297, 0.020355648040771484, 0.020306047439575196, 0.020400415420532225, 0.020297632217407227, 0.02030112075805664, 0.020750656127929687, 0.02059516716003418, 0.02140943908691406, 0.020523359298706054, 0.02048115158081055, 0.020384639739990235, 0.020330528259277343, 0.020244287490844726, 0.020352287292480467, 0.02057219123840332, 0.02035923194885254, 0.020699935913085936, 0.02044927978515625, 0.020587776184082033, 0.02044595146179199, 0.020369407653808593, 0.0204017276763916, 0.020507072448730467, 0.020493375778198243, 0.020351167678833007, 0.020529024124145506, 0.02043766403198242, 0.020508800506591797, 0.020582496643066408, 0.020625408172607423, 0.020514911651611328, 0.020477855682373047, 0.020609024047851563, 0.020768768310546876, 0.02087116813659668, 0.021124095916748048, 0.021268512725830076, 0.02117068862915039, 0.02108860778808594, 0.021057664871215822, 0.020953088760375976, 0.021000192642211913, 0.02085430335998535, 0.02091801643371582, 0.020925151824951173, 0.020885631561279296, 0.020858272552490235, 0.020906463623046875, 0.020802751541137695, 0.02071583938598633, 0.020875104904174803, 0.020832351684570313, 0.020883520126342772, 0.02072166442871094, 0.020760704040527343, 0.020874399185180664, 0.02068931198120117, 0.02071174430847168, 0.02103811264038086, 0.020685792922973633, 0.02076019287109375, 0.020858591079711913, 0.02191222381591797, 0.021987327575683592, 0.02093164825439453, 0.020803903579711912, 0.020694976806640626, 0.020684959411621094, 0.02078335952758789, 0.02058678436279297, 0.020731903076171874, 0.02063155174255371, 0.020926464080810548, 0.020823808670043947, 0.020670719146728515, 0.02060697555541992, 0.02067580795288086, 0.02066115188598633, 0.02074201583862305, 0.020590368270874022, 0.020690847396850585, 0.02087763214111328, 0.020832191467285155, 0.020838464736938477, 0.020639232635498047, 0.020416255950927734, 0.020678592681884767, 0.020702016830444335, 0.020681888580322265, 0.020808000564575196, 0.020787200927734374, 0.02082815933227539, 0.020855424880981445, 0.020809247970581056, 0.02080169677734375, 0.020914464950561523, 0.020860864639282228, 0.0207869758605957, 0.020889823913574218, 0.02078447914123535, 0.020771488189697266, 0.020760576248168947, 0.020772863388061523, 0.020723552703857423, 0.02062351989746094, 0.0206759033203125, 0.021006240844726562, 0.020742176055908203, 0.020595455169677736, 0.020673856735229493, 0.02106643295288086, 0.02078838348388672, 0.02095734405517578, 0.021004480361938478, 0.020762624740600585, 0.020787200927734374, 0.02072166442871094, 0.020757951736450196, 0.02080415916442871, 0.0207127685546875, 0.020556480407714843, 0.020782304763793946, 0.0206396484375, 0.020701152801513672, 0.02090438461303711, 0.02089369583129883, 0.020913984298706053, 0.020934431076049805, 0.020927583694458008, 0.02086479949951172, 0.020733951568603515, 0.02062745666503906, 0.020692800521850584, 0.020662752151489258, 0.020674591064453126, 0.020618240356445314, 0.020660192489624023, 0.02074684715270996, 0.020675743103027344, 0.02072643280029297, 0.020756383895874024, 0.020775327682495116, 0.020671712875366212, 0.02060163116455078, 0.020568063735961914, 0.020674560546875, 0.02059391975402832, 0.020525760650634765, 0.020611135482788086, 0.020887168884277343, 0.020908416748046874, 0.021141504287719725, 0.02076246452331543, 0.02070047950744629, 0.020812639236450194, 0.020840448379516603, 0.02145484733581543, 0.020786304473876954, 0.02106243133544922, 0.020781152725219725, 0.021020095825195314, 0.020907712936401368, 0.020628639221191406, 0.020498207092285156, 0.020443071365356447, 0.020504640579223632, 0.020522943496704103, 0.02058860778808594, 0.020605920791625976, 0.020460256576538084, 0.020559263229370118, 0.02079420852661133, 0.02072972869873047, 0.02081760025024414, 0.020756927490234375, 0.021157632827758788, 0.020894367218017577, 0.020692991256713866, 0.020768768310546876, 0.020774911880493165, 0.02087321662902832, 0.020759807586669923, 0.02053196716308594, 0.020602880477905275, 0.020754592895507813, 0.02057161521911621, 0.020451711654663085, 0.020502527236938475, 0.020393983840942383, 0.020302112579345704, 0.02041142463684082, 0.020396928787231445, 0.020442943572998046, 0.020330495834350586, 0.020291296005249024, 0.020465951919555664, 0.02029132843017578, 0.020537599563598633, 0.020158464431762696, 0.020559871673583984, 0.020547391891479493, 0.020400320053100586, 0.020862079620361327, 0.021105056762695314, 0.020568544387817383, 0.020279296875, 0.020198816299438475, 0.02015292739868164, 0.020235488891601563, 0.020111616134643556, 0.020197919845581055, 0.020364927291870116, 0.020340768814086915, 0.02018339157104492, 0.02030182456970215, 0.02012774467468262, 0.020192928314208984, 0.020273120880126953, 0.020296064376831055, 0.020462848663330077, 0.02049510383605957, 0.020393503189086913, 0.020557535171508788, 0.020365760803222655, 0.02049295997619629, 0.020489311218261717, 0.020397727966308593, 0.02038198471069336, 0.022459007263183593, 0.021350400924682617, 0.020568000793457032, 0.020502592086791994, 0.020582399368286132, 0.02046175956726074, 0.02039520072937012, 0.02035366439819336, 0.020776735305786134, 0.020326623916625975, 0.020825279235839843, 0.020764575958251954, 0.0211190071105957, 0.02085340881347656, 0.020918655395507812, 0.021067615509033202, 0.020846336364746094, 0.02070742416381836, 0.020406431198120117, 0.020578304290771485, 0.020729600906372072, 0.02058902359008789, 0.020731231689453126, 0.020548032760620116, 0.020389535903930663, 0.020527360916137695, 0.020406400680541992, 0.020463584899902344, 0.020493696212768555, 0.020388639450073243, 0.02044198417663574, 0.02068134307861328, 0.02043734359741211, 0.02064793586730957, 0.020591840744018555, 0.020681503295898438, 0.02052662467956543, 0.020474271774291994, 0.020321855545043944, 0.020326911926269533, 0.02029529571533203, 0.020360864639282227, 0.020347616195678712, 0.020514144897460937, 0.02029225540161133, 0.02019273567199707, 0.02051535987854004, 0.020256767272949217, 0.020133888244628906, 0.020242143630981445, 0.020094655990600587, 0.02012015914916992, 0.020207616806030275, 0.020084192276000976, 0.020060096740722656, 0.020101728439331053, 0.020153375625610353, 0.020128671646118163, 0.02015558433532715, 0.020209760665893556, 0.02004047966003418, 0.020270624160766602, 0.02021833610534668, 0.021292768478393554, 0.020646175384521483, 0.02047536087036133, 0.020318496704101564, 0.020189279556274413, 0.020050432205200194, 0.020317855834960936, 0.02138492774963379, 0.021387487411499023, 0.02052716827392578, 0.020625024795532226, 0.020492895126342774, 0.020262527465820312, 0.020384159088134766, 0.020330495834350586, 0.02039151954650879, 0.02032271957397461, 0.020254720687866212, 0.020170751571655272, 0.020414432525634765, 0.02024825668334961, 0.020236095428466796, 0.02031216049194336, 0.02030745506286621, 0.020087743759155275, 0.0201474552154541, 0.020140607833862303, 0.02023852729797363, 0.02015804862976074, 0.02001251220703125, 0.0199768009185791, 0.02006051254272461, 0.02004582405090332, 0.02000486373901367, 0.02001263999938965, 0.020183456420898437, 0.020072704315185548, 0.02010905647277832, 0.020084735870361328, 0.02002908706665039, 0.01999260711669922, 0.02006662368774414, 0.020080608367919923, 0.02017616081237793, 0.020085311889648436, 0.020123199462890626, 0.020181631088256834, 0.020336063385009765, 0.020152448654174804, 0.02024822425842285, 0.02021046447753906, 0.020377824783325196, 0.020509920120239257, 0.02025734329223633, 0.020129791259765627, 0.02021785545349121, 0.020137983322143553, 0.020191167831420897, 0.020156448364257812, 0.020160287857055665, 0.021030399322509767, 0.020345600128173828, 0.020401472091674804, 0.02055423927307129, 0.020398271560668944, 0.020434335708618166, 0.020381471633911134, 0.020415136337280274, 0.020535455703735352, 0.020502527236938475, 0.020486143112182616, 0.02057756805419922, 0.02037583923339844, 0.020788608551025392, 0.02080134391784668, 0.02042665672302246, 0.02034569549560547, 0.02015001678466797, 0.02010758399963379, 0.020158464431762696, 0.020191232681274415, 0.02006768035888672, 0.02024435234069824, 0.020149023056030273, 0.020084735870361328, 0.020121152877807618, 0.020219423294067382, 0.02016659164428711, 0.020292352676391602, 0.020127967834472658, 0.02026825523376465, 0.02040649604797363, 0.0205828800201416, 0.020317279815673828, 0.020576480865478516, 0.020337024688720704, 0.020308000564575195, 0.020321887969970705, 0.020431583404541015, 0.02038755226135254, 0.02030828857421875, 0.020273183822631834, 0.020271104812622072, 0.02021504020690918, 0.020214847564697266, 0.0201060791015625, 0.020124319076538087, 0.020074016571044923, 0.02011734390258789, 0.02012406349182129, 0.020184831619262697, 0.020085216522216797, 0.020280736923217774, 0.020137056350708008, 0.020216608047485353, 0.02020240020751953, 0.020289215087890625, 0.020254783630371094, 0.020336896896362304, 0.020633567810058595, 0.02055740737915039, 0.0205251522064209, 0.020502880096435548, 0.02056959915161133, 0.020681215286254884, 0.020753696441650392, 0.02185420799255371, 0.020554464340209962, 0.02047590446472168, 0.020377599716186523, 0.020600831985473633, 0.02077686309814453, 0.020398208618164063, 0.020363231658935547, 0.020445184707641603, 0.020274911880493164]",tokens/s,48.65696649196221,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1389.297664,1490.944,0.0,1088.421888,1083.532288,s,1,8.9243994140625,8.9243994140625,0.0,8.9243994140625,8.9243994140625,8.9243994140625,8.9243994140625,[8.9243994140625],,kWh,3.613836400418222e-05,3.979057743158005e-06,1.1381397994003617e-05,5.149881974134384e-05,,MB,1561.382912,1608.384512,0.0,1191.182336,1170.870784,s,10,0.5586800003051757,0.055868000030517576,9.41109220710843e-05,0.055865552902221685,0.055997745132446285,0.05599810390472412,0.05599839092254639,"[0.05599846267700195, 0.0558480339050293, 0.055932926177978515, 0.05593750381469727, 0.055883071899414063, 0.055759361267089844, 0.05577097702026367, 0.055997665405273435, 0.05583481597900391, 0.055717182159423825]",tokens/s,4582.2295385580555,kWh,1.6736746631665178e-06,1.8457480615890098e-07,1.1037659623771727e-06,2.9620154317025917e-06,tokens/kWh,86427638.8502301,MB,1605.054464,1650.327552,0.0,1233.125376,1170.873344,s,10,14.828799316406249,1.482879931640625,0.014564010141743305,1.4859351806640624,1.4954535278320313,1.5026887390136718,1.5084769079589844,"[1.4645113525390625, 1.5099239501953126, 1.4931243896484374, 1.4914649658203125, 1.4682288818359375, 1.485005615234375, 1.493845703125, 1.48686474609375, 1.461979248046875, 1.4738504638671874]",tokens/s,42.48489621833254,kWh,4.3651673298084156e-05,4.814416030957779e-06,1.9266361444823004e-05,6.773245077386493e-05,tokens/kWh,930130.2297525756,,s,630,14.825771121978772,0.023532970034886923,0.0004897955124724398,0.023449696540832517,0.02396088237762451,0.024104425621032714,0.024972771606445315,"[0.024080320358276366, 0.023435359954833986, 0.023489696502685547, 0.023189855575561524, 0.023259584426879882, 0.023294015884399413, 0.023341312408447265, 0.023178815841674805, 0.023238271713256837, 0.023084640502929688, 0.023024608612060547, 0.02316828727722168, 0.02310537528991699, 0.02323744010925293, 0.02314188766479492, 0.023609920501708983, 0.023119680404663084, 0.023242944717407225, 0.02385308837890625, 0.023222240447998047, 0.023427072525024413, 0.023458911895751954, 0.023336896896362303, 0.023318592071533202, 0.0232225284576416, 0.023194047927856447, 0.0231549129486084, 0.023173120498657225, 0.023117023468017577, 0.023096096038818358, 0.02302566337585449, 0.023002559661865235, 0.023007808685302736, 0.02302886390686035, 0.02325606346130371, 0.023048063278198243, 0.02319532775878906, 0.02326323127746582, 0.023002527236938478, 0.02305286407470703, 0.023009567260742186, 0.023084800720214845, 0.023216447830200194, 0.023427072525024413, 0.023339008331298827, 0.023299583435058592, 0.023717632293701173, 0.0232491512298584, 0.02342144012451172, 0.023189504623413085, 0.02333228874206543, 0.023226879119873048, 0.023093311309814454, 0.023064159393310548, 0.02315715217590332, 0.023418880462646483, 0.023175167083740233, 0.023166976928710937, 0.02310758399963379, 0.023219295501708984, 0.0230982723236084, 0.023537792205810548, 0.02305353546142578, 0.02380188751220703, 0.02346188735961914, 0.023433216094970705, 0.023612831115722658, 0.023580352783203126, 0.023612640380859376, 0.02360643196105957, 0.023796255111694337, 0.02369536018371582, 0.02369945526123047, 0.023840768814086914, 0.023757951736450195, 0.023636863708496093, 0.023829824447631837, 0.02387014389038086, 0.023814367294311523, 0.023818016052246094, 0.023924736022949217, 0.024011968612670898, 0.023757152557373047, 0.02384739112854004, 0.0241582088470459, 0.02413315200805664, 0.024144224166870117, 0.0283568000793457, 0.024472927093505858, 0.02386390495300293, 0.023884864807128908, 0.02364633560180664, 0.023538240432739256, 0.02350105667114258, 0.02359212875366211, 0.023781248092651367, 0.02401686477661133, 0.02390934371948242, 0.023858335494995116, 0.024251232147216795, 0.02411520004272461, 0.02409676742553711, 0.024868864059448242, 0.023871488571166992, 0.02415190315246582, 0.023916479110717773, 0.023910303115844727, 0.02405177688598633, 0.023924768447875975, 0.023787744522094728, 0.02387353515625, 0.02396723175048828, 0.023948991775512695, 0.02398201560974121, 0.02405465507507324, 0.023910335540771484, 0.023908416748046876, 0.023788991928100585, 0.023697376251220703, 0.024951391220092774, 0.024123615264892578, 0.023768863677978515, 0.023838720321655273, 0.023836223602294922, 0.02376108741760254, 0.023861503601074217, 0.024375295639038085, 0.02368307113647461, 0.023586048126220702, 0.023480863571166993, 0.02352355194091797, 0.023418880462646483, 0.023400447845458985, 0.02346988868713379, 0.023466304779052736, 0.023365791320800782, 0.023379711151123046, 0.023443424224853515, 0.023480224609375, 0.02358233642578125, 0.023527040481567382, 0.023339872360229493, 0.023357311248779298, 0.02364633560180664, 0.023586816787719726, 0.023363584518432616, 0.023386112213134767, 0.02343731117248535, 0.023404544830322265, 0.02336131286621094, 0.023422271728515624, 0.02335772705078125, 0.02357302474975586, 0.023727935791015627, 0.023689504623413085, 0.02371379280090332, 0.023811967849731445, 0.023865087509155274, 0.023978368759155273, 0.024102912902832032, 0.023746240615844728, 0.02385708808898926, 0.023726463317871094, 0.023771135330200196, 0.023796960830688475, 0.024208160400390626, 0.023963647842407225, 0.02398748779296875, 0.024105663299560546, 0.02373344039916992, 0.02371900749206543, 0.023780704498291016, 0.024078752517700194, 0.02386124801635742, 0.02375663948059082, 0.02384502410888672, 0.023885696411132813, 0.023734399795532227, 0.023889663696289063, 0.02365056037902832, 0.024160255432128908, 0.02368716812133789, 0.023760831832885743, 0.023681087493896483, 0.023769023895263672, 0.023725568771362306, 0.02374300765991211, 0.023799711227416993, 0.02393052864074707, 0.023983423233032226, 0.023741823196411133, 0.023689504623413085, 0.023703615188598634, 0.023465856552124024, 0.023465471267700197, 0.023297664642333984, 0.023518144607543947, 0.023289344787597657, 0.023584415435791015, 0.02349951934814453, 0.02310563278198242, 0.023395904541015623, 0.023374271392822266, 0.02333286476135254, 0.023340864181518553, 0.02352060890197754, 0.023499616622924803, 0.023513088226318358, 0.023357440948486328, 0.023459840774536132, 0.023398399353027344, 0.023478271484375, 0.023398399353027344, 0.02330009651184082, 0.02319923210144043, 0.023279264450073243, 0.023630399703979493, 0.023262752532958984, 0.023255231857299805, 0.023157312393188478, 0.02350611114501953, 0.023286592483520507, 0.023709632873535155, 0.02350611114501953, 0.023317440032958985, 0.023353216171264648, 0.02323849678039551, 0.023271615982055665, 0.0232644157409668, 0.023219295501708984, 0.023351232528686525, 0.023875423431396484, 0.023771135330200196, 0.024260223388671873, 0.02365683174133301, 0.023642112731933593, 0.024040704727172853, 0.02860700798034668, 0.02451273536682129, 0.023969791412353517, 0.023783424377441405, 0.024262271881103515, 0.023767423629760743, 0.023885568618774413, 0.023904512405395508, 0.02377462387084961, 0.023757408142089844, 0.02449407958984375, 0.023969919204711913, 0.02389900779724121, 0.023960575103759766, 0.023736223220825196, 0.024087072372436524, 0.023568384170532225, 0.02331648063659668, 0.02324224090576172, 0.0234337272644043, 0.023390207290649414, 0.02344960021972656, 0.023292991638183595, 0.023360448837280272, 0.02445516777038574, 0.024715263366699217, 0.02373222351074219, 0.02348646354675293, 0.023272960662841798, 0.02327779197692871, 0.023290399551391602, 0.023088895797729492, 0.02305638313293457, 0.02308710479736328, 0.023378015518188477, 0.02323036766052246, 0.023166976928710937, 0.023119871139526366, 0.023137664794921874, 0.023089792251586912, 0.023398303985595705, 0.02315657615661621, 0.023068895339965822, 0.023322271347045898, 0.023335103988647462, 0.023426368713378908, 0.023847808837890627, 0.02333807945251465, 0.023606176376342772, 0.02345369529724121, 0.02326118469238281, 0.023283519744873048, 0.02323420715332031, 0.023265823364257813, 0.023216127395629883, 0.02327142333984375, 0.02337615966796875, 0.023186208724975586, 0.023255615234375, 0.0233701114654541, 0.02313222312927246, 0.02321401596069336, 0.023107135772705078, 0.02322502326965332, 0.02314396858215332, 0.02298307228088379, 0.02321161651611328, 0.023240928649902345, 0.023408639907836915, 0.02304435157775879, 0.022978303909301757, 0.023037599563598632, 0.023012704849243164, 0.022985631942749024, 0.022941791534423828, 0.022980607986450196, 0.023013376235961915, 0.02297225570678711, 0.02310918426513672, 0.02302764892578125, 0.02300332832336426, 0.023013311386108397, 0.023155967712402345, 0.02326815986633301, 0.023365631103515624, 0.024041471481323243, 0.023793664932250977, 0.023932640075683593, 0.027072799682617187, 0.02384486389160156, 0.02404310417175293, 0.023908607482910155, 0.023839935302734375, 0.023821279525756835, 0.02406399917602539, 0.02394726371765137, 0.02397161674499512, 0.023857376098632813, 0.024035327911376952, 0.02384486389160156, 0.023866592407226564, 0.0239051513671875, 0.023879552841186525, 0.023517215728759765, 0.023459840774536132, 0.02344304084777832, 0.023664703369140627, 0.024136032104492187, 0.02354380798339844, 0.02344313621520996, 0.02351750373840332, 0.023413951873779298, 0.02340127944946289, 0.0233123836517334, 0.02318505668640137, 0.023220575332641602, 0.023387935638427733, 0.02336073684692383, 0.023219200134277345, 0.023127552032470702, 0.023101951599121092, 0.023093248367309572, 0.02308095932006836, 0.023396352767944335, 0.023528671264648436, 0.02342959976196289, 0.023415103912353515, 0.023556095123291015, 0.023259136199951173, 0.02338159942626953, 0.023474592208862305, 0.02330828857421875, 0.023239967346191406, 0.023319263458251954, 0.02357062339782715, 0.02364201545715332, 0.023572095870971678, 0.02339254379272461, 0.023463935852050782, 0.023297183990478514, 0.02330300712585449, 0.024091743469238282, 0.023700384140014647, 0.023778783798217774, 0.023806400299072265, 0.024061952590942383, 0.025151615142822267, 0.023991296768188477, 0.023989023208618163, 0.023871519088745116, 0.023828447341918944, 0.02377337646484375, 0.023787519454956055, 0.023738143920898437, 0.02370172882080078, 0.023835968017578125, 0.02392518424987793, 0.02386319923400879, 0.023855455398559572, 0.023721439361572266, 0.02394371223449707, 0.023773183822631837, 0.02369923210144043, 0.02360905647277832, 0.02357708740234375, 0.02362710380554199, 0.023601823806762696, 0.023822336196899413, 0.02332851219177246, 0.02320512008666992, 0.023129087448120117, 0.023085056304931642, 0.023202016830444337, 0.023459808349609375, 0.023586879730224608, 0.023824287414550782, 0.02356617546081543, 0.02337926483154297, 0.023364288330078125, 0.02329804801940918, 0.02321379280090332, 0.023105823516845703, 0.02307075119018555, 0.023103456497192382, 0.02314409637451172, 0.023134336471557618, 0.023795936584472658, 0.02367625617980957, 0.023642879486083984, 0.023278944015502928, 0.025416255950927735, 0.023953407287597657, 0.023733375549316406, 0.02371878433227539, 0.023767040252685546, 0.02377334403991699, 0.023987648010253906, 0.02380633544921875, 0.023760992050170897, 0.023776863098144533, 0.023796064376831055, 0.023635871887207033, 0.023786720275878907, 0.024753023147583007, 0.024047584533691407, 0.023824607849121094, 0.023921695709228516, 0.023898719787597656, 0.023440927505493165, 0.02331862449645996, 0.023365440368652343, 0.02333305549621582, 0.023491008758544922, 0.023368000030517578, 0.02349193572998047, 0.02329430389404297, 0.02311404800415039, 0.02332057571411133, 0.023652063369750977, 0.02412348747253418, 0.02337811279296875, 0.02342428779602051, 0.023455968856811525, 0.023310848236083984, 0.02349260711669922, 0.023439359664916993, 0.02347216033935547, 0.02395132827758789, 0.02498150444030762, 0.024293600082397462, 0.023942943572998046, 0.02391244888305664, 0.023897247314453127, 0.02376140785217285, 0.023851360321044922, 0.023828447341918944, 0.023926399230957032, 0.02376531219482422, 0.023902048110961915, 0.02385536003112793, 0.023965696334838867, 0.023848960876464844, 0.023767295837402343, 0.023872352600097655, 0.023933855056762696, 0.02354899215698242, 0.02338630485534668, 0.023329055786132813, 0.02346416091918945, 0.02365446472167969, 0.023449792861938476, 0.023217824935913085, 0.023310848236083984, 0.023102527618408204, 0.02310223960876465, 0.023491680145263674, 0.023114656448364256, 0.02307481575012207, 0.02357663917541504, 0.02473948860168457, 0.023595327377319335, 0.023483999252319337, 0.023257471084594725, 0.023238143920898437, 0.023150720596313477, 0.02303219223022461, 0.02309529685974121, 0.023267423629760742, 0.02301955223083496, 0.023439327239990235, 0.023102752685546873, 0.023122655868530274, 0.02322332763671875, 0.023008224487304687, 0.02305023956298828, 0.022996992111206056, 0.023177215576171875, 0.023965120315551758, 0.025121023178100586, 0.023283872604370117, 0.023129983901977538, 0.023035743713378905, 0.022979007720947266, 0.022999008178710936, 0.022963680267333985, 0.022929983139038088, 0.023027679443359376, 0.023107616424560547, 0.02311577606201172, 0.023601152420043944, 0.02333273506164551, 0.02321830368041992, 0.023008256912231444, 0.02301644706726074, 0.023007040023803712, 0.023001056671142578, 0.02303388786315918, 0.02315817642211914, 0.02305232048034668, 0.023065343856811523, 0.02311577606201172, 0.02324025535583496, 0.023187904357910155, 0.023088800430297853, 0.02325948715209961, 0.023202911376953125, 0.023098112106323242, 0.023146528244018555, 0.023305887222290038, 0.023224800109863282, 0.023207935333251953, 0.02322172737121582, 0.02349110412597656, 0.02364355278015137, 0.02359561538696289, 0.023424192428588866, 0.02327225685119629, 0.023276992797851562, 0.023273887634277343, 0.023228832244873047, 0.023278656005859374, 0.023146432876586916, 0.023167743682861328, 0.02312918472290039, 0.02309596824645996, 0.02306505584716797, 0.022990367889404298, 0.022966527938842775, 0.02291916847229004, 0.022929407119750975, 0.023265983581542967, 0.023201791763305665, 0.0231092472076416, 0.022880640029907227, 0.02287615966796875, 0.02286534309387207, 0.02319001579284668, 0.02307414436340332, 0.023115743637084962, 0.023077728271484375, 0.0236213436126709, 0.023677183151245118, 0.023699392318725587, 0.023451648712158202, 0.02349660873413086, 0.023119808197021485, 0.023156896591186523, 0.023404544830322265, 0.02343731117248535, 0.02339788818359375, 0.02336614418029785, 0.023832447052001954, 0.023654176712036134, 0.023708000183105468, 0.023727104187011717, 0.02371878433227539, 0.023787647247314452, 0.023842815399169923, 0.023828224182128908, 0.023742719650268553, 0.023815679550170898, 0.023749120712280275, 0.023820287704467775, 0.023816192626953125, 0.023674848556518555, 0.023572032928466796, 0.023394271850585936, 0.023443967819213866, 0.02381430435180664, 0.023717727661132813, 0.023318527221679687, 0.02333695983886719, 0.023259136199951173, 0.023805696487426756, 0.023268863677978514, 0.023199615478515626, 0.02310643196105957, 0.022996992111206056, 0.022966272354125978, 0.023045312881469725, 0.023117984771728516, 0.023462560653686522, 0.02332419204711914, 0.023134239196777345, 0.02318614387512207, 0.023180639266967774, 0.02366873550415039, 0.02361382484436035, 0.023250944137573244, 0.02304537582397461, 0.023057024002075197, 0.023045343399047853, 0.02310163116455078]",tokens/s,42.49357384629012,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1860.182016,2872.967168,0.0,2470.445056,2443.388928,s,1,8.9561875,8.9561875,0.0,8.9561875,8.9561875,8.9561875,8.9561875,[8.9561875],,kWh,5.186940456250113e-05,5.71431315325968e-06,1.674612450799917e-05,7.432984222375998e-05,,MB,1802.481664,3885.891584,0.0,3466.592256,3034.868224,s,10,3.8240326232910156,0.38240326232910155,0.0021314215030286765,0.3829990539550781,0.384252377319336,0.38533383026123047,0.3861989926147461,"[0.37930682373046876, 0.383474609375, 0.38365753173828127, 0.380706787109375, 0.3840120544433594, 0.37941091918945313, 0.3810505065917969, 0.3831781005859375, 0.3828200073242187, 0.386415283203125]",tokens/s,669.4503557338453,kWh,1.1279425504487906e-05,1.2439178864856008e-06,5.1041707500000425e-06,1.762751414097355e-05,tokens/kWh,14522751.078381004,MB,1812.967424,3887.988736,0.0,3468.689408,3034.870784,s,10,23.969672119140622,2.3969672119140624,0.011516231966029456,2.3916727294921873,2.4061346435546875,2.4162810668945314,2.4243982055664066,"[2.38609716796875, 2.40315673828125, 2.38956201171875, 2.4038798828125, 2.391485107421875, 2.388785888671875, 2.3918603515625, 2.399729248046875, 2.388688232421875, 2.426427490234375]",tokens/s,26.283213089799542,kWh,7.061934709509715e-05,7.788946631402764e-06,3.604519550279962e-05,0.00011445348922929958,tokens/kWh,550441.9343108352,,s,630,23.9679157752991,0.03804431075444297,0.0005355317890971716,0.03790851020812988,0.03850929374694824,0.038754190063476565,0.04084539306640625,"[0.03839043045043945, 0.03797980880737305, 0.03808287811279297, 0.03816361618041992, 0.037722721099853515, 0.03772761535644531, 0.03789888000488281, 0.03771744155883789, 0.03757094573974609, 0.03774588775634766, 0.037639007568359376, 0.03825471878051758, 0.03780505752563477, 0.037594112396240234, 0.03768131256103516, 0.03751510238647461, 0.03751321411132812, 0.037748737335205076, 0.03778508758544922, 0.037677566528320314, 0.037823871612548826, 0.03781692886352539, 0.037840255737304686, 0.03841296005249024, 0.037937152862548826, 0.037655742645263675, 0.03762198257446289, 0.037765342712402346, 0.037832992553710934, 0.03792291259765625, 0.037806079864501956, 0.0397946891784668, 0.0380682258605957, 0.03805593490600586, 0.037754878997802735, 0.0380786247253418, 0.0377402229309082, 0.03771408081054688, 0.0381082878112793, 0.03795443344116211, 0.037754878997802735, 0.037664768218994144, 0.0376998405456543, 0.03766368103027344, 0.03775980758666992, 0.03762790298461914, 0.03761318588256836, 0.0382949447631836, 0.03819414520263672, 0.03765248107910156, 0.037757152557373046, 0.037875263214111325, 0.03805353546142578, 0.037939777374267576, 0.037748737335205076, 0.03771561431884766, 0.03799622344970703, 0.03771811294555664, 0.03792544174194336, 0.03773833465576172, 0.03773455810546875, 0.03783391952514648, 0.038050846099853514, 0.03834003067016602, 0.03791484832763672, 0.03785903930664063, 0.037695999145507815, 0.03781340789794922, 0.03785420989990235, 0.03788595199584961, 0.03767500686645508, 0.03762732696533203, 0.03775164794921875, 0.037529312133789065, 0.03764223861694336, 0.037634048461914066, 0.03775027084350586, 0.03755673599243164, 0.037779457092285154, 0.037601280212402347, 0.03755948638916016, 0.03829033660888672, 0.03768278503417969, 0.03795999908447266, 0.037773311614990236, 0.03773545455932617, 0.037800670623779294, 0.03759452819824219, 0.03765497589111328, 0.03781206512451172, 0.03792902374267578, 0.0380687370300293, 0.03821158218383789, 0.03834470367431641, 0.038629375457763675, 0.038456382751464846, 0.03862419128417969, 0.038209537506103515, 0.038107166290283205, 0.03824777603149414, 0.03818739318847656, 0.03823436737060547, 0.03821363067626953, 0.03830579376220703, 0.03814345550537109, 0.038332672119140626, 0.038410526275634765, 0.03829510498046875, 0.03836972808837891, 0.03845119857788086, 0.03866419219970703, 0.03834009552001953, 0.037992481231689454, 0.04204748916625976, 0.03840252685546875, 0.038340576171875, 0.03825872039794922, 0.03821382522583008, 0.03817657470703125, 0.03832012939453125, 0.03835903930664063, 0.03834467315673828, 0.03841664123535156, 0.03825641632080078, 0.038180545806884764, 0.03910688018798828, 0.03898691177368164, 0.03848809432983399, 0.03802803039550781, 0.03783196640014649, 0.03872163009643555, 0.03790252685546875, 0.03768364715576172, 0.03760086441040039, 0.03773891067504883, 0.03818278503417969, 0.03771920013427734, 0.03784553527832031, 0.0376693115234375, 0.03764825439453125, 0.037542015075683596, 0.03761151885986328, 0.037617664337158206, 0.037959808349609374, 0.03798348617553711, 0.03760585784912109, 0.03770383834838867, 0.037609375, 0.03761097717285156, 0.03763475036621094, 0.03759408187866211, 0.03765756988525391, 0.03758454513549805, 0.03756067276000977, 0.03786137771606445, 0.03780755233764648, 0.04082553482055664, 0.038117023468017576, 0.03780182266235352, 0.03778620910644531, 0.03800451278686524, 0.03796899032592774, 0.03767718505859375, 0.037785888671875, 0.037994590759277344, 0.037758560180664064, 0.03795027160644531, 0.038257823944091794, 0.03791958236694336, 0.03771187210083008, 0.03774259185791016, 0.03789324951171875, 0.03768000030517578, 0.03788595199584961, 0.03789004898071289, 0.03767279815673828, 0.03826704025268555, 0.03781836700439453, 0.038870174407958986, 0.03818796920776367, 0.03770694351196289, 0.03798294448852539, 0.03802447891235351, 0.037816864013671875, 0.03788211059570312, 0.037826496124267577, 0.037773311614990236, 0.03779558563232422, 0.03813401412963867, 0.03842700958251953, 0.038079872131347656, 0.037906494140625, 0.03764409637451172, 0.03788671875, 0.03755011367797852, 0.03774652862548828, 0.03757068634033203, 0.03756032180786133, 0.03774003219604492, 0.03823462295532227, 0.03832627105712891, 0.03784499359130859, 0.03776102447509765, 0.03773833465576172, 0.03782275390625, 0.037652320861816406, 0.042238048553466793, 0.03820041656494141, 0.03779225540161133, 0.037779808044433594, 0.0380211181640625, 0.03780515289306641, 0.037628833770751956, 0.03761971282958984, 0.03763167953491211, 0.03745964813232422, 0.03759574508666992, 0.03785897445678711, 0.038287742614746094, 0.03793875122070312, 0.03810166549682617, 0.038139678955078124, 0.03793625640869141, 0.03792291259765625, 0.037954334259033204, 0.03845315170288086, 0.03838745498657226, 0.038297950744628904, 0.037966110229492187, 0.038047454833984376, 0.03809497451782227, 0.0380558090209961, 0.03806208038330078, 0.038564990997314454, 0.03839632034301758, 0.0382529296875, 0.03818463897705078, 0.03843078231811523, 0.03849456024169922, 0.03819686508178711, 0.038340991973876956, 0.038457183837890624, 0.038346656799316405, 0.03831644821166992, 0.03842236709594726, 0.03814406585693359, 0.03827705764770508, 0.03858432006835937, 0.03902444839477539, 0.03818924713134766, 0.03795558547973633, 0.04034969711303711, 0.03855974578857422, 0.03909238433837891, 0.039255104064941405, 0.03815840148925781, 0.03810934448242188, 0.03779423904418945, 0.03801331329345703, 0.03793635177612305, 0.03780204772949219, 0.03794992065429687, 0.037844768524169924, 0.03772028732299805, 0.03777536010742188, 0.03770476913452148, 0.037622718811035155, 0.03811455917358399, 0.03768191909790039, 0.037806079864501956, 0.03952640151977539, 0.037994495391845705, 0.03770742416381836, 0.03795798492431641, 0.0376258544921875, 0.0377911376953125, 0.037739105224609375, 0.03792643356323242, 0.03789273452758789, 0.0381376953125, 0.03758883285522461, 0.03762387084960937, 0.03860079956054688, 0.03782656097412109, 0.037910526275634765, 0.03769343948364258, 0.03779379272460937, 0.03770566558837891, 0.03777763366699219, 0.03773014450073242, 0.03781951904296875, 0.03784342575073242, 0.03770185470581055, 0.03767110443115235, 0.03790230560302734, 0.03780326461791992, 0.038190975189208984, 0.0381756477355957, 0.038508544921875, 0.03848806381225586, 0.038016223907470705, 0.037959583282470705, 0.037938079833984374, 0.0378092155456543, 0.03787868881225586, 0.037986305236816405, 0.037754878997802735, 0.0377262077331543, 0.03775619125366211, 0.03803823852539062, 0.03765459060668945, 0.03774848175048828, 0.037625118255615236, 0.03805673599243164, 0.037776958465576174, 0.03882825469970703, 0.03803718566894531, 0.037944160461425784, 0.037760353088378905, 0.03774054336547852, 0.03784771347045898, 0.03774054336547852, 0.037804031372070314, 0.03798825454711914, 0.0383851203918457, 0.037710273742675784, 0.03787795257568359, 0.03772028732299805, 0.03759209442138672, 0.03757040023803711, 0.037597919464111326, 0.037709407806396485, 0.03764080047607422, 0.0376258544921875, 0.03770163345336914, 0.03754985427856445, 0.03776943969726562, 0.04143513488769531, 0.03798979187011719, 0.03785152053833008, 0.03788140869140625, 0.03822393417358398, 0.03817327880859375, 0.03788390350341797, 0.03779174423217774, 0.038100990295410156, 0.03809795379638672, 0.038042335510253905, 0.03812575912475586, 0.03795769500732422, 0.037920768737792966, 0.03793920135498047, 0.03790028762817383, 0.037875297546386716, 0.03791238403320313, 0.03776156616210938, 0.03787782287597656, 0.03770780944824219, 0.03768700790405274, 0.037762847900390625, 0.03768060684204102, 0.03759820938110352, 0.03765862274169922, 0.03779590225219726, 0.03774457550048828, 0.03770473480224609, 0.03759817504882813, 0.03772988891601563, 0.03804127883911133, 0.03799894332885742, 0.037902591705322265, 0.03787993621826172, 0.03805500793457031, 0.037850017547607424, 0.03798220825195313, 0.03775283050537109, 0.037778526306152346, 0.03781929779052735, 0.038332416534423826, 0.04118527984619141, 0.03822387313842773, 0.03794739151000977, 0.03809280014038086, 0.03808451080322266, 0.03818915176391602, 0.037943233489990236, 0.03829676818847656, 0.037989246368408204, 0.03796310424804687, 0.037972640991210935, 0.038010879516601564, 0.038234111785888675, 0.0377322883605957, 0.03763820648193359, 0.037676830291748044, 0.03775484848022461, 0.03789827346801758, 0.03765679931640625, 0.037676414489746096, 0.03771660614013672, 0.03770556640625, 0.037916000366210935, 0.03773676681518555, 0.03769712066650391, 0.03770460891723633, 0.03772358322143555, 0.03776160049438477, 0.03774054336547852, 0.03766672134399414, 0.037854976654052734, 0.038174846649169925, 0.03784521484375, 0.037709312438964845, 0.03786092758178711, 0.03811628723144531, 0.037924415588378904, 0.038172832489013674, 0.03818892669677734, 0.03803334426879883, 0.03790233612060547, 0.03789033508300781, 0.03778559875488281, 0.037800128936767576, 0.03782022476196289, 0.037740734100341795, 0.03774054336547852, 0.037781505584716796, 0.037789310455322266, 0.037633407592773435, 0.03766579055786133, 0.03762697601318359, 0.0377861442565918, 0.037668704986572266, 0.03766912078857422, 0.03758687973022461, 0.037709537506103515, 0.03794393539428711, 0.0381214714050293, 0.037765121459960936, 0.04014080047607422, 0.03836073684692383, 0.03891766357421875, 0.038283744812011716, 0.03819683074951172, 0.03812393569946289, 0.03811942291259766, 0.03798425674438476, 0.038044830322265626, 0.038050846099853514, 0.037902145385742186, 0.037900062561035154, 0.037825759887695314, 0.03819548797607422, 0.0379475212097168, 0.03811324691772461, 0.03808492660522461, 0.03806035232543945, 0.038109184265136715, 0.03854288101196289, 0.038349281311035155, 0.03855519866943359, 0.03813625717163086, 0.03804355239868164, 0.03818300628662109, 0.03800070571899414, 0.03811894226074219, 0.038072769165039065, 0.03806943893432617, 0.03776985549926758, 0.03773436737060547, 0.03796192169189453, 0.03786751937866211, 0.037986305236816405, 0.037797889709472655, 0.03771551895141602, 0.03782080078125, 0.037851200103759766, 0.037860671997070314, 0.03783891296386719, 0.03893337631225586, 0.0380211181640625, 0.037882785797119144, 0.03784486389160156, 0.03797030258178711, 0.03802582550048828, 0.03804774475097656, 0.03891404724121094, 0.040853504180908204, 0.03826483154296875, 0.03795558547973633, 0.0377957763671875, 0.037899520874023436, 0.037782337188720705, 0.03771097564697266, 0.03813260650634766, 0.03783174514770508, 0.037655487060546874, 0.03782035064697266, 0.03759929656982422, 0.03802521514892578, 0.03799039840698242, 0.037718017578125, 0.037649951934814456, 0.03909475326538086, 0.03865193557739258, 0.03821363067626953, 0.0378152961730957, 0.03786783981323242, 0.037882305145263674, 0.03773382568359375, 0.03787443161010742, 0.03783382415771484, 0.03779068756103516, 0.03782963180541992, 0.037667839050292966, 0.03767910385131836, 0.0375594253540039, 0.037817214965820314, 0.037668319702148435, 0.03767523193359375, 0.03763756942749023, 0.0379150390625, 0.037921249389648436, 0.03791872024536133, 0.03784089660644531, 0.037754878997802735, 0.038010879516601564, 0.037954753875732425, 0.03784128189086914, 0.03844755172729492, 0.03834470367431641, 0.03791238403320313, 0.03833465576171875, 0.03775833511352539, 0.03799513626098633, 0.03786342239379883, 0.03834265518188477, 0.03793920135498047, 0.03789542388916016, 0.03779660797119141, 0.0378711051940918, 0.03766732788085937, 0.037754878997802735, 0.03771587371826172, 0.03785855865478516, 0.03776598358154297, 0.03771350479125977, 0.038696800231933594, 0.03786371231079102, 0.037824798583984375, 0.03782656097412109, 0.03788508987426758, 0.037870433807373045, 0.037949119567871094, 0.03775686264038086, 0.03805763244628906, 0.03792969512939453, 0.03843411254882813, 0.038111934661865236, 0.03793305587768555, 0.03808444976806641, 0.03779190444946289, 0.03810665512084961, 0.03776681518554687, 0.03771648025512695, 0.03789241409301758, 0.037684608459472656, 0.03855791854858399, 0.03827248001098633, 0.038127582550048826, 0.038111808776855466, 0.03827878570556641, 0.03826847839355469, 0.03806700897216797, 0.038323265075683594, 0.038130207061767576, 0.037999008178710936, 0.03808870315551758, 0.0379634895324707, 0.03824166488647461, 0.03796780776977539, 0.037853534698486326, 0.03795772933959961, 0.03791712188720703, 0.03789424133300781, 0.03798614501953125, 0.037699680328369144, 0.03779590225219726, 0.037720062255859374, 0.03828556823730469, 0.03812736129760742, 0.03872358322143555, 0.03851603317260742, 0.04226323318481445, 0.03857107162475586, 0.03835798263549805, 0.038250240325927734, 0.038260574340820315, 0.03845935821533203, 0.03869353485107422, 0.038853534698486326, 0.03899407958984375, 0.03863622283935547, 0.03883827209472656, 0.038346561431884765, 0.03858015823364258, 0.038371135711669925, 0.03854713439941406, 0.03868953704833984, 0.03826240158081055, 0.038445568084716795, 0.04101516723632812, 0.039337982177734376, 0.03895296096801758, 0.03858227157592774, 0.038610145568847655, 0.03871619033813477, 0.038825984954833984, 0.03852262496948242, 0.038555904388427736, 0.038420417785644534, 0.038590526580810545, 0.03837247848510742, 0.03868147277832031, 0.03866624069213867, 0.038787071228027346, 0.03870684814453125, 0.038779232025146486, 0.039241729736328126, 0.03856320190429687]",tokens/s,26.28513909621075,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3809.091584,4375.642112,0.0,3997.171712,3878.257152,s,1,10.1610673828125,10.1610673828125,0.0,10.1610673828125,10.1610673828125,10.1610673828125,10.1610673828125,[10.1610673828125],,kWh,9.453367005832356e-05,1.0420346157089993e-05,3.048863550200731e-05,0.00013544265171742086,,MB,2068.639744,4862.181376,0.0,4454.350848,4371.844096,s,10,6.675686889648437,0.6675686889648438,0.001182260049307542,0.6671465148925781,0.6692501159667968,0.6694332855224608,0.6695798211669921,"[0.6663990478515625, 0.665687744140625, 0.668204345703125, 0.6683736572265625, 0.669616455078125, 0.6692094116210937, 0.66723583984375, 0.666953125, 0.6670571899414063, 0.6669500732421875]",tokens/s,383.48113719497974,kWh,1.942514973437639e-05,2.142278729064239e-06,1.2911815885000184e-05,3.4479244348440813e-05,tokens/kWh,7424756.6858748915,MB,2082.807808,5063.507968,0.0,4655.67744,4530.328576,s,10,392.17560546875006,39.217560546875,0.04674525619346836,39.191431640625,39.287291015625,39.2940673828125,39.2994884765625,"[39.161171875, 39.1854765625, 39.26026171875, 39.28578515625, 39.30084375, 39.2348046875, 39.185828125, 39.19703515625, 39.18309375, 39.1813046875]",tokens/s,1.6064232226963457,kWh,0.0011423082741539557,0.00012600511666804526,0.0007596047187943987,0.0020279181096164,tokens/kWh,31066.343212407653,,s,630,392.1716875610349,0.6224947421603733,0.000999651843105283,0.6221973571777344,0.6240720092773437,0.6245301025390625,0.6251899475097656,"[0.6208662719726562, 0.6207017211914062, 0.6207979125976563, 0.620652587890625, 0.6211030883789063, 0.6210949096679688, 0.6213795776367188, 0.6215086059570313, 0.6209710693359375, 0.620970947265625, 0.6212689208984375, 0.6219121704101562, 0.621631103515625, 0.6217035522460937, 0.6210741577148438, 0.6216338500976563, 0.621465576171875, 0.6212767944335937, 0.6213320922851563, 0.6221156005859375, 0.621531005859375, 0.6213934326171875, 0.6221089477539062, 0.6209555053710938, 0.6218368530273437, 0.6214320068359375, 0.6214171142578125, 0.621451416015625, 0.6215410766601562, 0.6220205688476562, 0.6226742553710938, 0.6227169189453124, 0.621549560546875, 0.6219837646484375, 0.6218383178710938, 0.6212990112304687, 0.621251220703125, 0.6218731689453125, 0.6219625244140625, 0.6210096435546875, 0.6220963745117187, 0.6212274169921875, 0.6215418090820313, 0.6216337890625, 0.6213214111328125, 0.6221744384765625, 0.6212382202148438, 0.6219413452148438, 0.6216642456054687, 0.6220770263671875, 0.6223819580078125, 0.621633544921875, 0.6220308227539062, 0.6218731079101563, 0.621556884765625, 0.6221239624023438, 0.6228521118164062, 0.6217460327148437, 0.6216721801757813, 0.6218858642578124, 0.6213114013671875, 0.6218358764648437, 0.6216796264648438, 0.6216312255859375, 0.6215479125976563, 0.62104296875, 0.6209993896484375, 0.6224806518554687, 0.6214332275390625, 0.6217445068359375, 0.6213316040039063, 0.6214786376953125, 0.6217033081054687, 0.6214041748046875, 0.621117431640625, 0.62139599609375, 0.6212894897460938, 0.6211517944335937, 0.6212304077148437, 0.6213383178710937, 0.6229478149414063, 0.62127783203125, 0.6217813110351562, 0.6214799194335937, 0.621645751953125, 0.6212383422851563, 0.6216724243164062, 0.622635009765625, 0.621991943359375, 0.62181298828125, 0.6215087890625, 0.621041748046875, 0.6216663818359375, 0.622186279296875, 0.6211631469726563, 0.6220943603515625, 0.621770751953125, 0.6214201049804687, 0.6217911376953125, 0.6219002685546875, 0.6219304809570313, 0.6224404296875, 0.6215733642578125, 0.622050048828125, 0.622199951171875, 0.6227362670898438, 0.6235402221679688, 0.621139404296875, 0.6225966796875, 0.6220348510742187, 0.6215162963867188, 0.62206005859375, 0.6221367797851562, 0.6229017944335937, 0.6219788818359375, 0.6231944580078125, 0.622035400390625, 0.6224629516601563, 0.6234024047851563, 0.62279736328125, 0.62261865234375, 0.6231654663085937, 0.6234412841796875, 0.6231921997070312, 0.6231817626953124, 0.6234131469726563, 0.6228252563476563, 0.623083740234375, 0.62229296875, 0.6221814575195312, 0.6223729858398438, 0.6235595092773437, 0.6238056030273438, 0.6229471435546875, 0.6226575317382812, 0.6221455078125, 0.6221781005859375, 0.621883056640625, 0.6221412353515625, 0.6218841552734375, 0.6221474609375, 0.6222430419921875, 0.6224573974609375, 0.6244620361328125, 0.6231836547851562, 0.6232293090820312, 0.6236917724609375, 0.623435546875, 0.6235765991210938, 0.621744873046875, 0.6233309326171875, 0.6228955078125, 0.6224219360351563, 0.6232003173828125, 0.6229783935546875, 0.6228689575195312, 0.6229219970703125, 0.62275927734375, 0.62302685546875, 0.6236508178710938, 0.6232855834960938, 0.62368017578125, 0.6235176391601562, 0.6234476928710937, 0.62329248046875, 0.6232743530273438, 0.6232473754882812, 0.6228970336914063, 0.6231710815429687, 0.6233646850585938, 0.6227291870117188, 0.623120361328125, 0.6228684692382812, 0.6239575805664063, 0.6243536987304688, 0.6232473754882812, 0.624051513671875, 0.6230247192382813, 0.6234602661132812, 0.6252095947265625, 0.6237081298828125, 0.6246498413085938, 0.6240689697265625, 0.6241747436523437, 0.6232557983398438, 0.6239718627929688, 0.6235609741210938, 0.623882568359375, 0.6232124633789062, 0.6231897583007813, 0.6230674438476562, 0.6230609741210937, 0.624166748046875, 0.6237717895507813, 0.6243942260742188, 0.6245888061523438, 0.625301513671875, 0.6247157592773438, 0.6230240478515625, 0.625141845703125, 0.6228869018554688, 0.6232366943359375, 0.62269873046875, 0.6231851806640625, 0.6220228271484375, 0.622017333984375, 0.6219386596679688, 0.6229503784179687, 0.6238351440429688, 0.62455810546875, 0.62429736328125, 0.6225447387695312, 0.623547119140625, 0.6232158203125, 0.6232850341796875, 0.6230805053710937, 0.6226461791992187, 0.62251171875, 0.62471533203125, 0.6247340698242188, 0.6254151000976562, 0.623763427734375, 0.6232801513671875, 0.62427685546875, 0.6225496826171875, 0.6223196411132812, 0.6223701171875, 0.6233524780273437, 0.6226534423828125, 0.6228392944335938, 0.6217301635742187, 0.62282568359375, 0.6233374633789063, 0.6248344116210938, 0.624234619140625, 0.6246707153320312, 0.624437255859375, 0.6261248168945313, 0.624300048828125, 0.6247915649414062, 0.6247709350585937, 0.623346923828125, 0.62327490234375, 0.6231838989257813, 0.6232733764648437, 0.624319091796875, 0.623203369140625, 0.6235957641601563, 0.6232378540039063, 0.6230384521484374, 0.6235934448242187, 0.62410546875, 0.624552734375, 0.622970703125, 0.6232434692382812, 0.6235320434570313, 0.6216826171875, 0.6229218139648437, 0.6228759765625, 0.622852783203125, 0.6244249267578125, 0.6225879516601562, 0.6232387084960938, 0.6226347045898437, 0.622448974609375, 0.6230695190429687, 0.6228805541992187, 0.62324755859375, 0.6231512451171874, 0.6230609741210937, 0.623164794921875, 0.6216956176757813, 0.6223012084960937, 0.623686767578125, 0.6242169189453125, 0.624099365234375, 0.623435791015625, 0.6225490112304688, 0.6222274780273438, 0.6235869140625, 0.623337890625, 0.6240316772460938, 0.6234563598632813, 0.62336962890625, 0.6241322021484375, 0.6243677978515625, 0.6238907470703124, 0.6246154174804688, 0.62384130859375, 0.6250864868164062, 0.6254507446289063, 0.6248143920898438, 0.6251089477539062, 0.6250797119140625, 0.6246277465820312, 0.6248864135742187, 0.6241300659179687, 0.6247028198242187, 0.62443994140625, 0.62450244140625, 0.6245584106445312, 0.6252871704101562, 0.6252933349609375, 0.6248130493164062, 0.6243065185546876, 0.6248900756835938, 0.6244520263671876, 0.624332275390625, 0.624459716796875, 0.6245643920898437, 0.6243065185546876, 0.6236464233398438, 0.6242077026367188, 0.624173583984375, 0.622940185546875, 0.6232531127929688, 0.623549072265625, 0.6243225708007812, 0.6241682739257812, 0.623501708984375, 0.6244286499023437, 0.6234568481445313, 0.6228500366210937, 0.62369189453125, 0.622315185546875, 0.6235504760742188, 0.6238941650390625, 0.622547607421875, 0.6224956665039062, 0.6225897216796875, 0.6231893920898437, 0.6223553466796875, 0.6224359130859375, 0.6219324951171875, 0.6216434326171875, 0.6220637817382813, 0.622115478515625, 0.621849609375, 0.6217793579101563, 0.6230022583007813, 0.622055419921875, 0.6230033569335938, 0.6232311401367188, 0.6235029296875, 0.6232695922851562, 0.6238994750976562, 0.6235864868164063, 0.6242169189453125, 0.62344384765625, 0.62461962890625, 0.6235402221679688, 0.6238945922851562, 0.6230337524414062, 0.6217464599609375, 0.6219135131835938, 0.6227423706054688, 0.6214451293945312, 0.6217722778320313, 0.6224737548828125, 0.6223883666992187, 0.6219784545898438, 0.6218731689453125, 0.6218629150390625, 0.6218421020507813, 0.6229135131835938, 0.6222031860351562, 0.6221757202148438, 0.6219984130859375, 0.6235321044921875, 0.6232987670898438, 0.62225, 0.6229113159179688, 0.6220924682617187, 0.6214716186523438, 0.6222839965820313, 0.6220870971679687, 0.622716552734375, 0.6221619262695313, 0.62134326171875, 0.6216268920898438, 0.62130224609375, 0.6210475463867188, 0.621955322265625, 0.6211614990234375, 0.6220830688476563, 0.6220595092773438, 0.62343359375, 0.62227880859375, 0.6221425170898438, 0.6217224731445312, 0.6216438598632813, 0.6211522827148438, 0.62152294921875, 0.6214039916992188, 0.62175146484375, 0.6217625732421875, 0.6218528442382812, 0.6212329711914063, 0.6213324584960938, 0.6222576904296875, 0.6224789428710937, 0.6215234985351562, 0.6221717529296875, 0.6216543579101562, 0.6217588500976563, 0.62298876953125, 0.621654541015625, 0.6225194091796875, 0.621765380859375, 0.6218440551757812, 0.6219963989257813, 0.621807861328125, 0.6218424072265625, 0.621813232421875, 0.6216232299804687, 0.6219638671875, 0.6222356567382813, 0.6216294555664063, 0.622065673828125, 0.6220779418945312, 0.6213734130859375, 0.621981689453125, 0.6218458862304688, 0.6219042358398438, 0.6228216552734375, 0.62259814453125, 0.622703857421875, 0.6227461547851563, 0.6227069702148438, 0.6219407348632813, 0.6222471313476563, 0.6224263305664063, 0.6222280883789062, 0.6221859130859375, 0.6219306640625, 0.62283349609375, 0.621927001953125, 0.6228889770507813, 0.6222005615234375, 0.6224120483398438, 0.6220709228515625, 0.62156201171875, 0.6224703979492188, 0.6221766357421875, 0.622147216796875, 0.6220294799804688, 0.6219849243164063, 0.6213472900390625, 0.6218675537109375, 0.6220467529296875, 0.6216909790039062, 0.6217915649414063, 0.6212394409179688, 0.62157080078125, 0.6216124877929687, 0.6215706176757813, 0.62171923828125, 0.6212115478515625, 0.6219024047851562, 0.6219141235351563, 0.6221842041015625, 0.6226741333007813, 0.6221475219726562, 0.62283984375, 0.6219939575195312, 0.621854736328125, 0.6222540893554688, 0.621770263671875, 0.6217395629882813, 0.6221505737304688, 0.6216661987304688, 0.6218499145507812, 0.6217726440429687, 0.6218966674804688, 0.622606201171875, 0.6219019775390625, 0.6216640625, 0.6225096435546875, 0.622348876953125, 0.6223441772460937, 0.62247119140625, 0.622202880859375, 0.6227476196289062, 0.6222908935546875, 0.6219326171875, 0.6220206298828125, 0.6229647216796875, 0.6225469360351562, 0.62230322265625, 0.6233429565429688, 0.6229756469726563, 0.62355859375, 0.6229109497070312, 0.6216934814453124, 0.62272265625, 0.622436767578125, 0.6221782836914063, 0.621923828125, 0.6228751831054687, 0.6234577026367187, 0.6220641479492187, 0.6225791015625, 0.6223378295898437, 0.6220719604492188, 0.6216929321289062, 0.6215291137695312, 0.6213193359375, 0.62173681640625, 0.62162255859375, 0.621828857421875, 0.6211678466796875, 0.621646240234375, 0.6214249877929687, 0.6218397827148437, 0.621078857421875, 0.621701171875, 0.6215374755859375, 0.6221947631835938, 0.6217788696289063, 0.6222756958007812, 0.6222693481445313, 0.6219939575195312, 0.6229188232421875, 0.6223552856445312, 0.6213916625976562, 0.6219757690429687, 0.6217498168945312, 0.6219063720703125, 0.6214553833007812, 0.6221741943359375, 0.6214533081054687, 0.6217052001953125, 0.621414306640625, 0.621573486328125, 0.6216115112304688, 0.6217853393554688, 0.623085205078125, 0.6220038452148438, 0.6223834228515625, 0.622569091796875, 0.6228565673828125, 0.6226044311523438, 0.6217865600585938, 0.622486328125, 0.6222801513671875, 0.6225289306640625, 0.6220162963867187, 0.6221029052734375, 0.6229155883789063, 0.62178271484375, 0.6214864501953125, 0.6217922973632812, 0.6217655639648437, 0.6219755249023438, 0.6215924072265625, 0.6230631103515625, 0.6217892456054688, 0.6223544311523438, 0.62274755859375, 0.6217227783203125, 0.621640625, 0.6221250610351563, 0.6218157958984375, 0.621573486328125, 0.6218594360351563, 0.6215670166015625, 0.6223014526367188, 0.6217172241210938, 0.6220453491210938, 0.6212354736328125, 0.621681396484375, 0.621613037109375, 0.6219480590820312, 0.6224996948242187, 0.6217942504882813, 0.6217050170898437, 0.6220220336914063, 0.6217483520507813, 0.6217632446289062, 0.6217789306640625, 0.6213734130859375, 0.6209495239257813, 0.6216151123046875, 0.6211727294921875, 0.6215557250976562, 0.6225057373046875, 0.6211812133789063, 0.6214102783203125, 0.6219301147460937, 0.6214495849609375, 0.6215593872070313, 0.6216724243164062, 0.6226577758789062, 0.6225548706054688, 0.6222279663085938, 0.6219957885742188, 0.6224058837890625, 0.6217394409179687, 0.6216561889648438, 0.6219207153320313, 0.6218521118164062, 0.6216215209960938, 0.621865234375, 0.621643798828125, 0.6220712890625, 0.6220040283203125, 0.6222424926757812, 0.6220428466796875, 0.6224735107421875, 0.6216294555664063, 0.622002197265625, 0.6223499145507813, 0.6226825561523438, 0.6219522705078125, 0.622004150390625, 0.6220866088867187, 0.6224613647460937, 0.621686767578125, 0.6215733642578125, 0.6223180541992187, 0.6216582641601562, 0.6230263061523438, 0.6226472778320312, 0.6219386596679688, 0.6220728759765625, 0.622066650390625, 0.62205517578125, 0.6220638427734375, 0.6219235229492187, 0.62180224609375]",tokens/s,1.6064392713253954,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1410.727936,1946.025984,0.0,1543.503872,1510.249472,s,1,8.721248046875,8.721248046875,0.0,8.721248046875,8.721248046875,8.721248046875,8.721248046875,[8.721248046875],,kWh,4.485959648335817e-05,4.9342942396451895e-06,1.3138343844004696e-05,6.293223456700806e-05,,MB,1490.833408,2042.494976,0.0,1625.2928,1592.853504,s,10,0.4803630332946777,0.048036303329467775,0.00012787688710240224,0.047997215270996096,0.04820678634643555,0.048257920837402345,0.04829882843017578,"[0.04830905532836914, 0.0479502067565918, 0.04809017562866211, 0.04788422393798828, 0.04800815963745117, 0.0479537582397461, 0.047986270904541016, 0.04808367919921875, 0.0481954231262207, 0.04790208053588867]",tokens/s,5329.30267852142,kWh,1.4369705882149175e-06,1.584725435294021e-07,9.477472178921889e-07,2.5431903496365085e-06,tokens/kWh,100660967.0552538,MB,1528.410112,2084.438016,0.0,1667.23584,1592.856064,s,10,13.672127807617187,1.3672127807617187,0.007722113292489644,1.3676487426757813,1.3759423706054688,1.3759516540527343,1.3759590808105469,"[1.3552559814453125, 1.3682822265625, 1.3518692626953126, 1.3730107421875, 1.3670152587890625, 1.366090576171875, 1.3759403076171874, 1.3759609375, 1.3661876220703124, 1.372514892578125]",tokens/s,46.07914794718394,kWh,3.9735018854280264e-05,4.382363489621787e-06,1.8604490646108987e-05,6.272187299001105e-05,tokens/kWh,1004434.2905709025,,s,630,13.669891460418699,0.021698240413363018,0.0004934869294607424,0.02163785648345947,0.022094307899475096,0.022199946880340576,0.023149786453247077,"[0.021654176712036132, 0.021330207824707032, 0.021317632675170898, 0.021249248504638673, 0.021142303466796877, 0.021131296157836914, 0.02127395248413086, 0.02114419174194336, 0.02117344093322754, 0.021250656127929687, 0.021202688217163087, 0.02143465614318848, 0.021280672073364256, 0.021315872192382814, 0.02169856071472168, 0.028588031768798827, 0.02164121627807617, 0.021290592193603516, 0.021453216552734376, 0.02124297523498535, 0.021515167236328125, 0.021110784530639647, 0.021442176818847657, 0.021465471267700195, 0.021184511184692383, 0.02104534339904785, 0.021147552490234374, 0.021104576110839844, 0.02115590476989746, 0.021354015350341798, 0.0216124153137207, 0.021758560180664063, 0.0214835205078125, 0.021379039764404296, 0.021260223388671874, 0.021223520278930662, 0.021148704528808595, 0.021277568817138673, 0.02160758399963379, 0.021256799697875976, 0.021341920852661133, 0.02124006462097168, 0.02139379119873047, 0.021350400924682617, 0.021225088119506835, 0.021471456527709962, 0.02141302490234375, 0.021349376678466796, 0.021341535568237306, 0.022024959564208985, 0.023199647903442384, 0.021556320190429686, 0.02139638328552246, 0.022073280334472655, 0.02114771270751953, 0.021235807418823242, 0.02180454444885254, 0.021282655715942383, 0.02129977607727051, 0.021397504806518555, 0.021415935516357423, 0.021428224563598632, 0.021317632675170898, 0.022245376586914063, 0.02199996757507324, 0.02180396842956543, 0.02154364776611328, 0.021388799667358398, 0.021369344711303712, 0.021352447509765626, 0.0212327995300293, 0.021230432510375978, 0.021341983795166015, 0.021516511917114258, 0.02163302421569824, 0.02135558319091797, 0.02147203254699707, 0.021231327056884765, 0.021143808364868164, 0.021264575958251954, 0.021372928619384765, 0.02146281623840332, 0.021613983154296874, 0.0215948486328125, 0.021436704635620116, 0.021425792694091797, 0.02164089584350586, 0.021654176712036132, 0.02132316780090332, 0.021221824645996094, 0.02127680015563965, 0.021276544570922852, 0.021311487197875977, 0.021499456405639647, 0.02122979164123535, 0.021365087509155275, 0.02123904037475586, 0.021330144882202147, 0.02145916748046875, 0.022245664596557618, 0.023752607345581055, 0.021733375549316408, 0.022083103179931642, 0.021963232040405272, 0.021828960418701172, 0.021973663330078125, 0.021952384948730468, 0.022001792907714843, 0.02185420799255371, 0.021942272186279296, 0.021910816192626952, 0.02203251266479492, 0.02198358345031738, 0.022015743255615235, 0.02215782356262207, 0.02201190376281738, 0.02205695915222168, 0.022095775604248045, 0.022167871475219727, 0.022122272491455076, 0.022054912567138672, 0.022163040161132814, 0.022133152008056642, 0.022106111526489256, 0.02192777633666992, 0.021897375106811525, 0.02251571273803711, 0.02200399971008301, 0.021755168914794922, 0.02155142402648926, 0.021719167709350586, 0.021702655792236326, 0.021673856735229494, 0.02165772819519043, 0.021470495223999023, 0.02134480094909668, 0.021393600463867186, 0.02128895950317383, 0.02146303939819336, 0.021360063552856447, 0.021301824569702147, 0.021407743453979493, 0.021459199905395507, 0.02148531150817871, 0.021395200729370116, 0.02142188835144043, 0.02128486442565918, 0.021201343536376954, 0.021295103073120117, 0.021209087371826172, 0.02120297622680664, 0.02139155197143555, 0.02168502426147461, 0.02145996856689453, 0.021266143798828126, 0.02117967987060547, 0.02121120071411133, 0.02116294479370117, 0.02140889549255371, 0.02126323127746582, 0.02126233673095703, 0.02125779151916504, 0.021255935668945312, 0.021336959838867187, 0.021335872650146484, 0.02139481544494629, 0.021493951797485353, 0.021444959640502928, 0.021466815948486328, 0.021371007919311524, 0.021334304809570312, 0.02128691291809082, 0.021270336151123045, 0.021495136260986328, 0.02153558349609375, 0.021472991943359374, 0.02156163215637207, 0.021482784271240233, 0.021715072631835936, 0.021702655792236326, 0.0216246395111084, 0.021541023254394533, 0.021439104080200194, 0.021757951736450197, 0.021514240264892577, 0.021476863861083984, 0.02143283271789551, 0.02142207908630371, 0.021372896194458008, 0.02208745574951172, 0.021801855087280272, 0.02151219177246094, 0.021331071853637695, 0.021414751052856444, 0.021331647872924804, 0.021456415176391602, 0.021216064453125, 0.02145280075073242, 0.021551103591918946, 0.02151628875732422, 0.02142617607116699, 0.02141606330871582, 0.02145631980895996, 0.021537216186523437, 0.021331968307495116, 0.021553247451782227, 0.02144246482849121, 0.02142617607116699, 0.021405536651611327, 0.021629152297973634, 0.022271936416625976, 0.021663200378417968, 0.021428768157958984, 0.021370048522949218, 0.02133475112915039, 0.02165155220031738, 0.021612543106079102, 0.021946367263793946, 0.021673824310302733, 0.02199567985534668, 0.022714368820190428, 0.02210371208190918, 0.021997856140136718, 0.02187884712219238, 0.021755008697509765, 0.02195750427246094, 0.02189926338195801, 0.021749759674072267, 0.02199724769592285, 0.022042943954467775, 0.022013952255249023, 0.021942272186279296, 0.021984895706176757, 0.022043264389038086, 0.02198486328125, 0.021834175109863282, 0.021900896072387696, 0.021907392501831054, 0.021806272506713867, 0.021873632431030274, 0.02201398468017578, 0.022015520095825195, 0.021976543426513673, 0.022006784439086914, 0.022114303588867186, 0.02219375991821289, 0.0227291202545166, 0.02205081558227539, 0.022124479293823242, 0.02218204879760742, 0.021923744201660156, 0.021819679260253907, 0.022615680694580077, 0.02197462463378906, 0.021805856704711916, 0.02145686340332031, 0.021387199401855468, 0.021520191192626954, 0.021594400405883788, 0.02116758346557617, 0.021249568939208985, 0.021262399673461913, 0.021294015884399414, 0.021362688064575194, 0.02127408027648926, 0.021290719985961912, 0.021242687225341797, 0.021423648834228516, 0.021279199600219727, 0.021382720947265624, 0.021520832061767577, 0.02141939163208008, 0.02132646369934082, 0.021355520248413085, 0.02149216079711914, 0.02164384078979492, 0.021735424041748046, 0.021606399536132814, 0.021620256423950195, 0.021809312820434572, 0.02177465629577637, 0.021794815063476563, 0.0217324161529541, 0.021846975326538086, 0.021835199356079103, 0.021889087677001953, 0.021676544189453126, 0.02145075225830078, 0.02189107131958008, 0.021808448791503905, 0.021785280227661134, 0.021936128616333008, 0.022120447158813478, 0.02199737548828125, 0.02169398307800293, 0.02165817642211914, 0.021620832443237304, 0.02141360092163086, 0.0215382080078125, 0.021748607635498046, 0.02215711975097656, 0.02195884895324707, 0.022001056671142577, 0.022514272689819335, 0.023840768814086914, 0.021972768783569335, 0.021833152770996095, 0.021981887817382813, 0.021811264038085938, 0.02177027130126953, 0.0216944637298584, 0.021460960388183594, 0.021512224197387696, 0.021432319641113282, 0.021526527404785157, 0.02224947166442871, 0.021680416107177733, 0.021489376068115233, 0.02145280075073242, 0.021424127578735352, 0.02151219177246094, 0.02160166358947754, 0.021459392547607422, 0.02150828742980957, 0.021525760650634766, 0.021449472427368162, 0.021405696868896484, 0.021282943725585937, 0.021401472091674804, 0.021609535217285158, 0.022145984649658203, 0.02214240074157715, 0.021989280700683594, 0.021957279205322266, 0.021747711181640626, 0.021747711181640626, 0.021796768188476562, 0.021756000518798828, 0.022068479537963866, 0.022166271209716797, 0.02207916831970215, 0.02194803237915039, 0.02184448051452637, 0.02173151969909668, 0.021587839126586916, 0.021497087478637697, 0.021434944152832033, 0.02126620864868164, 0.02167862319946289, 0.021964799880981444, 0.02183782386779785, 0.02178611183166504, 0.021678144454956055, 0.02178233528137207, 0.02172377586364746, 0.021635072708129883, 0.02179692840576172, 0.021852096557617186, 0.021855680465698243, 0.021690719604492186, 0.02163324737548828, 0.021506048202514647, 0.021504032135009767, 0.02186031913757324, 0.02202796745300293, 0.02190572738647461, 0.021888256072998047, 0.021648128509521483, 0.021557344436645507, 0.021730560302734375, 0.021516000747680664, 0.021681087493896484, 0.021364736557006835, 0.02144576072692871, 0.02130828857421875, 0.0213832950592041, 0.021337984085083007, 0.021342208862304687, 0.022567232131958007, 0.022031999588012694, 0.02203107261657715, 0.02195612716674805, 0.02185878372192383, 0.02166169548034668, 0.02151030349731445, 0.02161667251586914, 0.021550655364990234, 0.02158729553222656, 0.02153878402709961, 0.021390111923217773, 0.021392736434936523, 0.02154489517211914, 0.021547903060913087, 0.021465087890625, 0.021590015411376954, 0.021695552825927736, 0.021735584259033203, 0.021740320205688477, 0.021780384063720702, 0.021796960830688477, 0.021964128494262696, 0.022015647888183595, 0.022027040481567384, 0.021948640823364257, 0.02209769630432129, 0.02197068786621094, 0.021920000076293945, 0.02186057662963867, 0.02186854362487793, 0.02191923141479492, 0.02218035125732422, 0.021790815353393556, 0.021741695404052734, 0.021769119262695313, 0.021906400680541994, 0.021860479354858398, 0.021770015716552734, 0.021644384384155273, 0.021723583221435548, 0.021670368194580077, 0.021585344314575195, 0.021542816162109374, 0.02193401527404785, 0.022026975631713866, 0.022054752349853515, 0.022188192367553712, 0.022038528442382813, 0.02202787208557129, 0.022013824462890626, 0.021919967651367188, 0.0219036808013916, 0.021950464248657226, 0.02192086410522461, 0.022211391448974608, 0.022123903274536134, 0.021938911437988283, 0.02183888053894043, 0.021783519744873046, 0.021917695999145507, 0.021702655792236326, 0.021843967437744142, 0.02270310401916504, 0.02209414482116699, 0.022120384216308592, 0.02191334342956543, 0.021888288497924804, 0.02194710350036621, 0.02199318313598633, 0.022109600067138673, 0.022075872421264648, 0.02210652732849121, 0.022027679443359375, 0.022046943664550782, 0.0222457275390625, 0.022065183639526368, 0.022112255096435548, 0.021929344177246093, 0.021901727676391602, 0.021747200012207032, 0.02233568000793457, 0.022583040237426757, 0.022702880859375, 0.021765151977539064, 0.02169046401977539, 0.021594879150390624, 0.021716352462768554, 0.021692575454711913, 0.021553760528564454, 0.021518335342407227, 0.021439552307128906, 0.02134521675109863, 0.021403648376464843, 0.021368831634521485, 0.02147532844543457, 0.021710847854614256, 0.021946367263793946, 0.021995519638061522, 0.02190540885925293, 0.021960704803466798, 0.021729280471801758, 0.02229452705383301, 0.022672447204589843, 0.021525440216064454, 0.021379072189331053, 0.021519807815551757, 0.021415552139282226, 0.0212488956451416, 0.021262399673461913, 0.021282239913940428, 0.021399711608886717, 0.021469600677490236, 0.021509792327880858, 0.021733728408813477, 0.02142207908630371, 0.02214297676086426, 0.023027711868286133, 0.021462656021118163, 0.021450912475585938, 0.021397344589233397, 0.021637504577636718, 0.021845375061035156, 0.022023807525634764, 0.022049728393554686, 0.022079519271850586, 0.02258892822265625, 0.021837696075439453, 0.021550176620483398, 0.021425695419311525, 0.02141596794128418, 0.021321760177612305, 0.021368831634521485, 0.02142211151123047, 0.02132592010498047, 0.02127244758605957, 0.021274112701416017, 0.021363199234008787, 0.021303295135498047, 0.021346080780029298, 0.02129737663269043, 0.02124777603149414, 0.021700767517089842, 0.02190505599975586, 0.02152899169921875, 0.021365823745727538, 0.021406944274902345, 0.021628639221191407, 0.021413888931274414, 0.02152038383483887, 0.021809152603149414, 0.021432319641113282, 0.02146656036376953, 0.021716928482055663, 0.02178521537780762, 0.021807104110717773, 0.021704288482666017, 0.021784704208374025, 0.02193337631225586, 0.022106111526489256, 0.021987520217895507, 0.02212124824523926, 0.021638208389282227, 0.021812223434448243, 0.02155731201171875, 0.02164297676086426, 0.021266592025756835, 0.021123071670532227, 0.02390239906311035, 0.02443766403198242, 0.021527456283569335, 0.021164031982421876, 0.021329919815063478, 0.02125209617614746, 0.021336063385009766, 0.02163046455383301, 0.021893823623657226, 0.021915456771850587, 0.021903232574462892, 0.02170844841003418, 0.02183830451965332, 0.02175935935974121, 0.021650144577026367, 0.021669792175292968, 0.02177199935913086, 0.021796607971191408, 0.02169500732421875, 0.0215402889251709, 0.021713056564331056, 0.022343679428100584, 0.021749631881713867, 0.02166796875, 0.021380319595336914, 0.02127712059020996, 0.02154640007019043, 0.02153071975708008, 0.021488479614257813, 0.021480607986450195, 0.021569696426391602, 0.02180780792236328, 0.02168422317504883, 0.02168627166748047, 0.02167807960510254, 0.02180415916442871, 0.02163382339477539, 0.021428319931030275, 0.021515520095825195, 0.02158569526672363, 0.021665983200073242, 0.02139625549316406, 0.021219327926635743, 0.021204992294311522, 0.021215232849121093, 0.02147737693786621, 0.02574515151977539, 0.02159231948852539, 0.021347711563110352, 0.021256832122802733, 0.021249759674072267, 0.021551008224487304, 0.02132211112976074, 0.021295103073120117, 0.021378976821899414, 0.021430368423461913, 0.021493247985839844, 0.021501983642578125, 0.021885408401489257, 0.022052608489990234, 0.02202582359313965, 0.022200191497802734, 0.02223593521118164, 0.022403135299682617, 0.022089664459228515, 0.02210767936706543, 0.02205446434020996, 0.022045600891113282, 0.022071296691894532, 0.02202732849121094, 0.022016960144042967, 0.022199647903442383, 0.022276384353637695, 0.022112831115722657, 0.021864255905151366, 0.021922943115234374, 0.02193292808532715, 0.021869600296020506, 0.021890016555786134, 0.021802688598632814, 0.021797183990478516, 0.02201513671875, 0.021670719146728516, 0.02153500747680664]",tokens/s,46.08668633720838,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4338.884608,6253.576192,0.0,5851.05408,5850.293248,s,1,11.434525390625,11.434525390625,0.0,11.434525390625,11.434525390625,11.434525390625,11.434525390625,[11.434525390625],,kWh,0.00011455061884583605,1.2628530022672326e-05,3.4625861034004446e-05,0.0001618050099025128,,MB,4178.751488,6368.919552,0.0,5951.717376,5923.050496,s,10,2.072653274536133,0.20726532745361329,0.0005867602319000441,0.20732881927490235,0.20794956665039063,0.20821638336181642,0.20842983673095705,"[0.20665962219238282, 0.20729696655273439, 0.20692486572265625, 0.2065278778076172, 0.2074384002685547, 0.20748121643066406, 0.2073606719970703, 0.20659017944335936, 0.2084832000732422, 0.20789027404785157]",tokens/s,1235.1318145930304,kWh,6.0735320458336766e-06,6.698080405887437e-07,4.04691933730615e-06,1.079025942372857e-05,tokens/kWh,23725101.496358585,MB,4193.959936,6383.599616,0.0,5966.39744,5923.053056,s,10,22.874246826171873,2.2874246826171873,0.009563957335335372,2.2867103271484375,2.2999686767578127,2.301418542480469,2.302578435058594,"[2.29654541015625, 2.299646484375, 2.28665380859375, 2.27372119140625, 2.274565673828125, 2.28653564453125, 2.302868408203125, 2.286766845703125, 2.28882958984375, 2.27811376953125]",tokens/s,27.54189044070195,kWh,6.651318113333398e-05,7.33630002136346e-06,4.393795238389416e-05,0.00011778743353859156,tokens/kWh,534861.8108684645,,s,630,22.871278537750285,0.03630361672658769,0.0006036691324640971,0.03620489501953125,0.036610309600830074,0.03691748790740967,0.038162448654174806,"[0.03737276840209961, 0.03637443161010742, 0.03654860687255859, 0.036364639282226566, 0.03638236618041992, 0.036492992401123046, 0.03636598587036133, 0.03653036880493164, 0.03627260971069336, 0.03645436859130859, 0.03626342391967773, 0.03638700866699219, 0.04085590362548828, 0.03635763168334961, 0.03622732925415039, 0.03595289611816406, 0.0361451530456543, 0.036190208435058595, 0.03606521606445313, 0.03631520080566406, 0.03595468902587891, 0.03603209686279297, 0.03624832153320313, 0.036222625732421875, 0.03599481582641602, 0.03608198547363281, 0.036022785186767575, 0.03638902282714844, 0.036474464416503906, 0.03643212890625, 0.03640524673461914, 0.036334815979003905, 0.03653244781494141, 0.03627679824829102, 0.036050945281982424, 0.036091167449951174, 0.03639072036743164, 0.036350879669189456, 0.036170753479003906, 0.03625584030151367, 0.036249790191650394, 0.03622127914428711, 0.03609843063354492, 0.03625164794921875, 0.03648819351196289, 0.03630387115478516, 0.0362105598449707, 0.03603779220581055, 0.03609884643554687, 0.036347648620605466, 0.036261695861816406, 0.03630553436279297, 0.036294654846191404, 0.03753753662109375, 0.03677824020385742, 0.03616153717041016, 0.036278270721435545, 0.036544513702392575, 0.036198143005371095, 0.036556865692138674, 0.03655699157714844, 0.03941718292236328, 0.036415775299072264, 0.03729459381103516, 0.03665545654296875, 0.03673657608032226, 0.036599361419677734, 0.03668630218505859, 0.03646915054321289, 0.03648716735839844, 0.03644732666015625, 0.036366622924804685, 0.03641417694091797, 0.036920799255371096, 0.036598209381103516, 0.0364576301574707, 0.03670908737182617, 0.03746828842163086, 0.03642367935180664, 0.036478977203369144, 0.036208641052246096, 0.03645999908447266, 0.036490848541259766, 0.036590526580810544, 0.036552318572998045, 0.03661452865600586, 0.03652959823608398, 0.03657990264892578, 0.03659939193725586, 0.03649987030029297, 0.03670012664794922, 0.0366162223815918, 0.036790271759033204, 0.03738553619384766, 0.03680435180664063, 0.036999168395996096, 0.03740358352661133, 0.03634995269775391, 0.03634783935546875, 0.03661011123657226, 0.03621478271484375, 0.03625910568237305, 0.03647151947021485, 0.036362239837646484, 0.036519935607910156, 0.03637596893310547, 0.036597953796386716, 0.03637059020996094, 0.03640956878662109, 0.036913185119628905, 0.036505569458007814, 0.03634588623046875, 0.03611177444458008, 0.03630275344848633, 0.03640377426147461, 0.03639056015014648, 0.036227550506591794, 0.036122528076171875, 0.03599574279785156, 0.03595673751831055, 0.03611443328857422, 0.036067329406738284, 0.03600384140014649, 0.03604803085327148, 0.03594326400756836, 0.035985408782958986, 0.03706265640258789, 0.03609775924682617, 0.03598771286010742, 0.03582774353027344, 0.03593011093139648, 0.03651737594604492, 0.036088321685791014, 0.03611222457885742, 0.03658972930908203, 0.03610214233398437, 0.036165119171142575, 0.036170238494873046, 0.03602227020263672, 0.03603839874267578, 0.03588848114013672, 0.03596585464477539, 0.0362632942199707, 0.03668380737304688, 0.03610889434814453, 0.03611264038085937, 0.03600563049316406, 0.03582156753540039, 0.036026016235351566, 0.03592230224609375, 0.035923553466796876, 0.03637286376953125, 0.0359890251159668, 0.03624393463134765, 0.03628003311157227, 0.036181663513183596, 0.03604703903198242, 0.036104000091552735, 0.036310943603515625, 0.036498207092285156, 0.036466625213623045, 0.03619366455078125, 0.03784310531616211, 0.036958656311035155, 0.03641347122192383, 0.03615654373168945, 0.036292896270751954, 0.03618057632446289, 0.036189952850341794, 0.03605939102172852, 0.036036510467529294, 0.03630268859863281, 0.03649766540527344, 0.036208641052246096, 0.03637177658081055, 0.03606393432617187, 0.03614310455322266, 0.035969024658203126, 0.03585433578491211, 0.03571712112426758, 0.03772124862670898, 0.03705123138427734, 0.036445472717285154, 0.036181888580322265, 0.036047710418701175, 0.036122623443603515, 0.03661209487915039, 0.03673907089233398, 0.038067615509033204, 0.03667270278930664, 0.03591382217407227, 0.03583855819702148, 0.03655027389526367, 0.0359530258178711, 0.03592761611938477, 0.03653078460693359, 0.036259777069091795, 0.03598735809326172, 0.03604060745239258, 0.036020320892333986, 0.03607961654663086, 0.03590134429931641, 0.036009281158447266, 0.038201183319091794, 0.036207553863525394, 0.03615507125854492, 0.035753822326660155, 0.036147678375244144, 0.036345760345458986, 0.03635580825805664, 0.036178302764892575, 0.0361082878112793, 0.03591977691650391, 0.03595222473144531, 0.0359183349609375, 0.035769760131835936, 0.03584675216674805, 0.0360832633972168, 0.03599200057983398, 0.0360318717956543, 0.036450942993164065, 0.035934207916259765, 0.03589120101928711, 0.03592547225952149, 0.03596886444091797, 0.0359532470703125, 0.03583395385742188, 0.035915584564208985, 0.03608355331420898, 0.03578505706787109, 0.03612646484375, 0.0362663688659668, 0.03570470428466797, 0.035802398681640625, 0.03579948806762695, 0.03591513442993164, 0.03580815887451172, 0.03613695907592773, 0.036055038452148434, 0.036065502166748045, 0.03595600128173828, 0.03600230407714844, 0.03608707046508789, 0.03607830429077148, 0.03619385528564453, 0.0361701774597168, 0.03598937606811523, 0.036246814727783204, 0.03634175872802734, 0.03604892730712891, 0.036106849670410154, 0.0361453742980957, 0.036759201049804686, 0.03606966400146484, 0.03615049743652344, 0.03602000045776367, 0.03606425476074219, 0.03622019195556641, 0.035962944030761716, 0.03654108810424805, 0.03612284851074219, 0.03638227081298828, 0.036133087158203125, 0.036063041687011715, 0.036001983642578124, 0.036609569549560544, 0.035969505310058596, 0.035883007049560545, 0.035833854675292966, 0.035931583404541015, 0.0363372802734375, 0.03620070266723633, 0.036141761779785155, 0.03598153686523437, 0.03603638458251953, 0.036184127807617185, 0.036141151428222655, 0.036257633209228514, 0.036067329406738284, 0.03599078369140625, 0.03606195068359375, 0.03657932662963867, 0.03591936111450195, 0.035975006103515624, 0.03601996612548828, 0.03611856079101562, 0.03587161636352539, 0.03642704010009765, 0.035977535247802735, 0.03623772811889649, 0.036197887420654294, 0.03594905471801758, 0.036157440185546875, 0.03568592071533203, 0.03568268966674805, 0.03577439880371094, 0.036044960021972654, 0.03671449661254883, 0.03599769592285156, 0.03583504104614258, 0.03579580688476563, 0.03590668869018555, 0.035971969604492185, 0.0361512336730957, 0.0360055046081543, 0.03606719970703125, 0.03775955200195313, 0.035901054382324216, 0.035760513305664064, 0.03606528091430664, 0.03583990478515625, 0.03595478439331055, 0.03577036666870117, 0.03604406356811524, 0.036006622314453125, 0.03658083343505859, 0.036073566436767575, 0.03603094482421875, 0.036297889709472654, 0.03630707168579102, 0.03606195068359375, 0.03595785522460938, 0.035973983764648436, 0.03592166519165039, 0.03587916946411133, 0.03597721481323242, 0.03605299377441406, 0.0361451530456543, 0.03606118392944336, 0.03605644989013672, 0.036055072784423825, 0.036303455352783204, 0.03579404830932617, 0.036063968658447264, 0.03604288101196289, 0.03600387191772461, 0.0361187858581543, 0.03651968002319336, 0.03619839859008789, 0.03620457458496094, 0.03622444915771485, 0.036225566864013674, 0.03621014404296875, 0.036276126861572264, 0.03619903945922852, 0.036354049682617184, 0.03615129470825195, 0.03662361526489258, 0.03645481491088867, 0.03773382568359375, 0.03681923294067383, 0.03677452850341797, 0.03652403259277344, 0.03627763366699219, 0.0365082893371582, 0.036439456939697266, 0.03637478256225586, 0.03648956680297852, 0.03626803207397461, 0.036173824310302735, 0.03613491058349609, 0.03615129470825195, 0.03632310485839844, 0.0363276481628418, 0.03619839859008789, 0.03679846572875976, 0.03661004638671875, 0.03641753768920898, 0.036446208953857424, 0.0364769287109375, 0.036285793304443356, 0.03620521545410156, 0.03634175872802734, 0.03643612670898438, 0.03670819091796875, 0.03618406295776367, 0.03615510559082031, 0.0362367057800293, 0.03683590316772461, 0.03611228942871094, 0.03603260803222656, 0.035923839569091794, 0.03588892745971679, 0.03601878356933594, 0.03611827087402344, 0.03616358566284179, 0.03606038284301758, 0.03676224136352539, 0.036038814544677736, 0.03594211196899414, 0.035899681091308595, 0.03590342330932617, 0.03607516860961914, 0.03607183837890625, 0.035876033782958984, 0.03590636825561523, 0.036353439331054685, 0.03630339050292969, 0.03620614242553711, 0.03620915222167969, 0.03626803207397461, 0.036211902618408204, 0.03611321640014648, 0.036466686248779294, 0.03630284881591797, 0.03726335906982422, 0.03645439910888672, 0.03617996978759765, 0.03664393615722656, 0.03623974227905274, 0.036355968475341796, 0.03649603271484375, 0.03667763137817383, 0.036259647369384765, 0.03636207962036133, 0.03669436645507813, 0.036245502471923825, 0.03659366226196289, 0.03633561706542969, 0.0358744010925293, 0.03587836837768555, 0.03613177490234375, 0.035966976165771485, 0.03628985595703125, 0.03592393493652344, 0.03610076904296875, 0.03598531341552735, 0.03617331314086914, 0.036160160064697265, 0.03611609649658203, 0.03599603271484375, 0.03647619247436523, 0.03838025665283203, 0.03629260635375976, 0.04167679977416992, 0.04527260971069336, 0.036280799865722656, 0.03595430374145508, 0.03612710571289063, 0.04012236785888672, 0.036511745452880856, 0.03688249588012695, 0.03630313491821289, 0.036055038452148434, 0.03613081741333008, 0.03653555297851562, 0.0362841911315918, 0.03624560165405273, 0.0360736312866211, 0.03607398223876953, 0.036012256622314456, 0.03596063995361328, 0.03603036880493164, 0.036192543029785154, 0.03611852645874023, 0.036226398468017576, 0.03770025634765625, 0.03651372909545898, 0.03634291076660156, 0.03621104049682617, 0.03634236907958984, 0.036206592559814454, 0.03607756805419922, 0.03615334320068359, 0.03640115356445312, 0.036378623962402344, 0.03623526382446289, 0.036148990631103516, 0.03603481674194336, 0.03625369644165039, 0.036206592559814454, 0.03620454406738281, 0.03602150344848633, 0.036913440704345706, 0.035952606201171876, 0.03616159820556641, 0.036416126251220704, 0.036187808990478514, 0.03623952102661133, 0.03616089630126953, 0.036211326599121095, 0.036359169006347655, 0.03613183975219727, 0.03635609436035156, 0.036103168487548826, 0.03622195053100586, 0.03646182250976562, 0.03603238296508789, 0.036207073211669924, 0.03614883041381836, 0.036237918853759765, 0.03611590576171875, 0.03659036636352539, 0.03644732666015625, 0.036268062591552734, 0.03659251022338867, 0.036232833862304685, 0.036280704498291017, 0.036157440185546875, 0.036274177551269535, 0.03696422576904297, 0.03644838333129883, 0.036431873321533206, 0.03657727813720703, 0.037107551574707034, 0.03643795013427734, 0.03629471969604492, 0.036467937469482424, 0.036571937561035155, 0.03640115356445312, 0.03636646270751953, 0.036179744720458984, 0.03617782211303711, 0.03622931289672852, 0.03638614273071289, 0.03658351898193359, 0.03627206420898438, 0.037009471893310546, 0.0363823356628418, 0.03617007827758789, 0.036044864654541015, 0.036099967956542967, 0.03643664169311524, 0.03623731231689453, 0.03608982467651367, 0.036087520599365236, 0.03613222503662109, 0.036125537872314456, 0.03623535919189453, 0.03623731231689453, 0.0361451530456543, 0.03598521423339844, 0.03749430465698242, 0.03603113555908203, 0.03633347320556641, 0.036199935913085936, 0.03634236907958984, 0.03630284881591797, 0.03639091110229492, 0.03634515380859375, 0.03598201751708984, 0.036063232421875, 0.03652780914306641, 0.03622943878173828, 0.036251102447509766, 0.036219425201416015, 0.036212734222412106, 0.036337665557861325, 0.03644416046142578, 0.036653118133544924, 0.036420928955078126, 0.036241214752197264, 0.03616262435913086, 0.036429824829101565, 0.036595455169677736, 0.03628851318359375, 0.03611177444458008, 0.03613955307006836, 0.037437503814697265, 0.0364661750793457, 0.036204319000244144, 0.03602710342407227, 0.03609500885009766, 0.03627721786499023, 0.03596492767333984, 0.03634995269775391, 0.03605657577514648, 0.036956161499023435, 0.03634175872802734, 0.03608575820922852, 0.03599113464355469, 0.03649577713012695, 0.03619622421264648, 0.03601353454589844, 0.036045120239257815, 0.03583011245727539, 0.036038654327392575, 0.036229118347167966, 0.03597107315063477, 0.036036544799804685, 0.03580096054077148, 0.03602163314819336, 0.035990016937255856, 0.03588876724243164, 0.03613708877563476, 0.03610038375854492, 0.036030750274658206, 0.03627964782714844, 0.036192928314208984, 0.036009376525878906, 0.03609849548339844, 0.035929790496826174, 0.03591145706176758, 0.035934913635253904, 0.0359354248046875, 0.035839935302734376, 0.036104705810546874, 0.036090335845947265, 0.03598303985595703, 0.036167903900146486, 0.03583552169799804, 0.036931934356689455, 0.036507678985595704, 0.035800735473632814, 0.03611273574829101, 0.03589839935302734, 0.03618505477905273, 0.03611238479614258, 0.03588675308227539, 0.036304542541503906, 0.03603696060180664, 0.03586492919921875, 0.0361776008605957, 0.03612303924560547, 0.036327327728271484, 0.03640956878662109, 0.03625571060180664, 0.03633951950073242, 0.03632537460327148, 0.036345855712890625, 0.03634719848632813, 0.036444511413574215, 0.03643632125854492, 0.03665100860595703, 0.03625571060180664, 0.036138206481933596, 0.03640198516845703, 0.03628441619873047, 0.03628646469116211, 0.03611356735229492]",tokens/s,27.545464892141993,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8214.26176,11251.089408,0.0,10848.567296,10616.027648,s,1,14.6757431640625,14.6757431640625,0.0,14.6757431640625,14.6757431640625,14.6757431640625,14.6757431640625,[14.6757431640625],,kWh,0.0002211354205666661,2.438576929504324e-05,6.723922045799927e-05,0.0003127604103197086,,MB,3991.093248,11683.10272,0.0,11265.900544,11070.470656,s,10,3.806494598388672,0.38064945983886717,0.0010613886177521343,0.38066357421874997,0.3817038726806641,0.381945915222168,0.3821395492553711,"[0.378303466796875, 0.3804141540527344, 0.37995913696289063, 0.38218795776367187, 0.37997836303710936, 0.3803094482421875, 0.3811336669921875, 0.38165008544921875, 0.3816453247070313, 0.3809129943847656]",tokens/s,672.5347780826155,kWh,1.1155931145679019e-05,1.2303034757801454e-06,7.3734421127407835e-06,1.975967673419995e-05,tokens/kWh,12955677.536814986,MB,3995.324416,11685.199872,0.0,11267.997696,11070.473216,s,10,29.121740234375,2.9121740234375,0.006231435576901955,2.911367431640625,2.918307275390625,2.9211138427734378,2.923359096679688,"[2.91715771484375, 2.92392041015625, 2.916811767578125, 2.904305908203125, 2.907447509765625, 2.909410400390625, 2.90612353515625, 2.905554931640625, 2.913324462890625, 2.91768359375]",tokens/s,21.633322560042426,kWh,8.509224432765374e-05,9.385862275273755e-06,5.658166460685896e-05,0.0001510597712097864,tokens/kWh,417053.45834602026,,s,630,29.118463600158663,0.04621978349231538,0.000731681437829562,0.04609744071960449,0.0466427547454834,0.047004865455627445,0.04981674510955813,"[0.048306495666503906, 0.04608147048950195, 0.0490912971496582, 0.045331806182861326, 0.045789825439453126, 0.045467681884765625, 0.04558428955078125, 0.045733985900878904, 0.04556595230102539, 0.045686016082763674, 0.04591494369506836, 0.046354366302490235, 0.046356159210205077, 0.04577926254272461, 0.04579103851318359, 0.04587558364868164, 0.04597724914550781, 0.0456091194152832, 0.04601036834716797, 0.04616527938842773, 0.04610326385498047, 0.046677566528320315, 0.04650166320800781, 0.04614825439453125, 0.045612640380859375, 0.04608041763305664, 0.045862911224365234, 0.04588544082641602, 0.045817089080810544, 0.046007038116455075, 0.04601388931274414, 0.04580524826049805, 0.04653366470336914, 0.046349342346191404, 0.0459989128112793, 0.04895334243774414, 0.04558415985107422, 0.04602041625976563, 0.045728160858154294, 0.04588297653198242, 0.04630684661865234, 0.046302238464355466, 0.04599123382568359, 0.04659868621826172, 0.04621468734741211, 0.046137825012207034, 0.04668323135375976, 0.046492095947265624, 0.045862945556640625, 0.04608659362792969, 0.04638719940185547, 0.04628275299072265, 0.046241790771484374, 0.046682113647460936, 0.04680851364135742, 0.04630176162719726, 0.04629913711547851, 0.04667145538330078, 0.0465882568359375, 0.046204990386962894, 0.046450687408447267, 0.046669822692871094, 0.05049139022827148, 0.048349342346191405, 0.04609820938110352, 0.045762367248535156, 0.04571142578125, 0.04586140823364258, 0.045623294830322264, 0.04595507049560547, 0.04582175827026367, 0.04560915374755859, 0.045623294830322264, 0.045766494750976563, 0.04618387222290039, 0.04646371078491211, 0.050405376434326174, 0.04571305465698242, 0.045650272369384765, 0.04584640121459961, 0.04629884719848633, 0.0464469108581543, 0.045866401672363284, 0.04615033721923828, 0.04657561492919922, 0.04627865600585938, 0.045674495697021485, 0.04614892959594727, 0.04599059295654297, 0.04607385635375977, 0.04598374557495117, 0.04605708694458008, 0.04607849502563476, 0.04591756820678711, 0.046163585662841795, 0.046801761627197264, 0.04599193572998047, 0.045932544708251956, 0.046342144012451174, 0.04874409484863281, 0.045867454528808596, 0.04657756805419922, 0.046343360900878906, 0.04600444793701172, 0.04631539154052734, 0.04673199844360352, 0.04895948791503906, 0.0462088623046875, 0.046471328735351564, 0.04598556900024414, 0.04625430297851563, 0.04649311828613281, 0.046426689147949216, 0.04612505722045898, 0.04619247817993164, 0.0463361587524414, 0.046104576110839846, 0.04633625411987305, 0.046626209259033206, 0.04642985534667969, 0.04635923385620117, 0.05069311904907227, 0.0459048957824707, 0.04639744186401367, 0.04669440078735351, 0.04674361419677735, 0.0478521614074707, 0.046235649108886716, 0.045776416778564456, 0.04552758407592773, 0.04540169525146484, 0.04568105697631836, 0.04584454345703125, 0.04567801666259766, 0.045660350799560545, 0.05007795333862305, 0.04538364791870117, 0.04607798385620117, 0.04576665496826172, 0.045714847564697264, 0.04570377731323242, 0.045897247314453125, 0.04602518463134766, 0.04589567947387695, 0.045852352142333984, 0.04621343994140625, 0.04632985687255859, 0.046020606994628906, 0.04637465667724609, 0.04627072143554688, 0.045864479064941406, 0.045858432769775394, 0.04615248107910156, 0.04585603332519531, 0.04593743896484375, 0.04620675277709961, 0.04600048065185547, 0.04800614547729492, 0.04620377731323242, 0.04617216110229492, 0.045765697479248045, 0.0459826545715332, 0.046150848388671874, 0.04615865707397461, 0.046059326171875, 0.046123199462890625, 0.04664115142822266, 0.046344287872314455, 0.04620483016967773, 0.04659404754638672, 0.046292991638183595, 0.04602812957763672, 0.04613391876220703, 0.04877721786499024, 0.04580352020263672, 0.04605952072143555, 0.046376705169677734, 0.04634368133544922, 0.04580188751220703, 0.04620640182495117, 0.050380897521972653, 0.045757247924804685, 0.04640873718261719, 0.04659471893310547, 0.04600428771972656, 0.04631167984008789, 0.0466770248413086, 0.046502208709716795, 0.046418590545654295, 0.048699966430664064, 0.046046497344970704, 0.045543296813964844, 0.04567859268188477, 0.04542345428466797, 0.04560076904296875, 0.045520896911621096, 0.04588748931884765, 0.04561651229858398, 0.045705856323242186, 0.045795326232910154, 0.0456888313293457, 0.04560076904296875, 0.04570873641967774, 0.046252609252929684, 0.046047233581542966, 0.046071807861328126, 0.04572979354858398, 0.04574323272705078, 0.04581600189208984, 0.04604179382324219, 0.04627775955200195, 0.04622608184814453, 0.04593686294555664, 0.045762367248535156, 0.045762847900390625, 0.045899486541748045, 0.04567782211303711, 0.04581228637695312, 0.04610483169555664, 0.04573516845703125, 0.0458474235534668, 0.045991134643554685, 0.045945343017578126, 0.04609667205810547, 0.04644406509399414, 0.046631393432617185, 0.0463092155456543, 0.046207134246826174, 0.0462371826171875, 0.04627711868286133, 0.04621052932739258, 0.04654092788696289, 0.04641628646850586, 0.04618035125732422, 0.04613516616821289, 0.046569534301757816, 0.04629305648803711, 0.045848575592041016, 0.046020606994628906, 0.04635193634033203, 0.04602627182006836, 0.04595804977416992, 0.04611072158813476, 0.046309375762939455, 0.04612300872802735, 0.04656742477416992, 0.04684799957275391, 0.046467071533203126, 0.046255359649658205, 0.04670745468139648, 0.0464989128112793, 0.04613961410522461, 0.04817919921875, 0.046137344360351565, 0.045578369140625, 0.045260608673095705, 0.045588321685791015, 0.045887649536132814, 0.045889854431152344, 0.04582380676269531, 0.04554336166381836, 0.04584447860717773, 0.04566016006469727, 0.04575360107421875, 0.04586572647094726, 0.04596905517578125, 0.046174560546875, 0.04583385467529297, 0.046171680450439456, 0.045814624786376955, 0.045706558227539065, 0.04598444747924805, 0.04621871948242187, 0.046085758209228514, 0.045835041046142576, 0.045879425048828124, 0.046186496734619144, 0.045871105194091794, 0.04599398422241211, 0.046015777587890626, 0.046117599487304685, 0.04593388748168945, 0.04607984161376953, 0.04601737594604492, 0.045895454406738284, 0.04607305526733398, 0.04627276611328125, 0.04605414581298828, 0.04592230224609375, 0.046415870666503906, 0.04617830276489258, 0.04596556854248047, 0.04608383941650391, 0.04655292892456055, 0.04793561553955078, 0.04597948837280273, 0.04624303817749023, 0.04700675201416016, 0.045950912475585935, 0.04628070449829102, 0.04629094314575195, 0.045959232330322265, 0.04628009414672852, 0.04644713592529297, 0.04609024047851563, 0.04597747039794922, 0.046319198608398435, 0.046407806396484376, 0.04608451080322266, 0.04647932815551758, 0.04685609436035156, 0.04662694549560547, 0.04635446548461914, 0.04687459182739258, 0.04638105773925781, 0.04757833480834961, 0.04757379150390625, 0.04547174453735352, 0.045352222442626954, 0.0463326416015625, 0.04560076904296875, 0.04566835021972656, 0.045649921417236325, 0.04601974487304687, 0.04592438507080078, 0.045935009002685545, 0.045706817626953125, 0.04557295989990234, 0.04582928085327148, 0.04592521667480469, 0.04552908706665039, 0.045739551544189454, 0.04603094482421875, 0.04598604965209961, 0.04573388671875, 0.04621932983398438, 0.04622687911987305, 0.04586969757080078, 0.04583161544799805, 0.04600070571899414, 0.045780990600585936, 0.04570236968994141, 0.04637916946411133, 0.046283393859863284, 0.046241790771484374, 0.046458881378173826, 0.046180320739746095, 0.04591360092163086, 0.04580611038208008, 0.046226753234863284, 0.046262977600097656, 0.04597555160522461, 0.04613129425048828, 0.04659526443481445, 0.04610086441040039, 0.04581814575195312, 0.04651193618774414, 0.046316993713378905, 0.04599388885498047, 0.046202014923095704, 0.04651391983032226, 0.04630527877807617, 0.04593824005126953, 0.04670064163208008, 0.04640937423706055, 0.04625888061523437, 0.04811494445800781, 0.04641971206665039, 0.046158241271972655, 0.04612156677246094, 0.046526561737060546, 0.046282657623291014, 0.046069759368896485, 0.04678860855102539, 0.0468388786315918, 0.04646572875976562, 0.046434528350830076, 0.04655513763427734, 0.047927745819091795, 0.04596121597290039, 0.04549836730957031, 0.0454389762878418, 0.04530995178222656, 0.04559360122680664, 0.04581071853637695, 0.0455761604309082, 0.045460990905761715, 0.04580812835693359, 0.04600831985473633, 0.04595264053344727, 0.045566337585449215, 0.04564352035522461, 0.04604524612426758, 0.04614524841308594, 0.04611529541015625, 0.046045185089111325, 0.04570707321166992, 0.04562963104248047, 0.0460307502746582, 0.046507614135742184, 0.046004737854003906, 0.045770206451416016, 0.04588934326171875, 0.046176990509033206, 0.04583203125, 0.04598799896240234, 0.04597760009765625, 0.04592435073852539, 0.04611072158813476, 0.045963264465332034, 0.04609395217895508, 0.04587353515625, 0.046015968322753904, 0.04641836929321289, 0.04613452911376953, 0.04597356796264648, 0.04648220825195312, 0.04640563201904297, 0.04670627212524414, 0.04624835205078125, 0.04641715240478515, 0.04622835159301758, 0.04614467239379883, 0.04656579208374023, 0.04630963134765625, 0.04595308685302734, 0.045956478118896485, 0.04656771087646484, 0.046284961700439456, 0.04628412628173828, 0.046483585357666016, 0.04639398574829102, 0.04603862380981445, 0.04630176162719726, 0.046354366302490235, 0.046147167205810545, 0.04628499221801758, 0.04663113784790039, 0.04645916748046875, 0.04668182373046875, 0.047568225860595705, 0.04812019348144531, 0.04596038436889648, 0.04576339340209961, 0.04548812866210938, 0.045385726928710936, 0.04557164764404297, 0.045916576385498044, 0.045501983642578125, 0.04526131057739258, 0.04566220855712891, 0.046427295684814456, 0.04619760131835938, 0.045600990295410156, 0.045639678955078124, 0.04592617416381836, 0.04574617767333984, 0.045897632598876956, 0.046418014526367186, 0.04600940704345703, 0.046029247283935544, 0.04609913635253906, 0.04643616104125976, 0.046157855987548825, 0.04570265579223633, 0.04593420791625977, 0.04606447982788086, 0.045795326232910154, 0.0458158073425293, 0.04628591918945312, 0.046121406555175784, 0.04571583938598633, 0.0461640625, 0.04648905563354492, 0.046125598907470707, 0.04601036834716797, 0.04641996765136719, 0.04597555160522461, 0.04584777450561523, 0.04634860610961914, 0.04613488006591797, 0.046008865356445314, 0.0461308479309082, 0.046550846099853514, 0.04641827011108399, 0.04611126327514648, 0.04618979263305664, 0.046107425689697265, 0.04597555160522461, 0.046202880859375, 0.04656899261474609, 0.04627264022827148, 0.04612745666503906, 0.046548545837402346, 0.046354110717773435, 0.04608287811279297, 0.046010303497314456, 0.04660745620727539, 0.04643299102783203, 0.04639148712158203, 0.046630912780761716, 0.04645632171630859, 0.04626278305053711, 0.04665718460083008, 0.04813619232177734, 0.045727680206298825, 0.04564096069335938, 0.045577022552490236, 0.04644659042358398, 0.04549647903442383, 0.04537737655639648, 0.0458054084777832, 0.04569718551635742, 0.04581171035766601, 0.04595097732543945, 0.04590387344360351, 0.045604480743408206, 0.0458202896118164, 0.04585827255249023, 0.04558492660522461, 0.04580684661865234, 0.04607257461547851, 0.04593414306640625, 0.045945121765136716, 0.04648771286010742, 0.046534366607666015, 0.04615737533569336, 0.04561993789672852, 0.045848224639892576, 0.04600457763671875, 0.0458076171875, 0.04576265716552735, 0.04592819213867187, 0.046100639343261716, 0.049219585418701174, 0.04568880081176758, 0.045994014739990235, 0.04589158248901367, 0.04576870346069336, 0.045991744995117184, 0.04622560119628906, 0.046170112609863284, 0.04630876922607422, 0.046295486450195315, 0.046104736328125, 0.046205951690673826, 0.046564319610595706, 0.046384449005126956, 0.04591705703735351, 0.046145374298095704, 0.04618415832519531, 0.045953311920166016, 0.0459931526184082, 0.04650476837158203, 0.04641513442993164, 0.04607664108276367, 0.04696092987060547, 0.047002559661865236, 0.0529846076965332, 0.04500848007202148, 0.04603126525878906, 0.046343841552734376, 0.04637958526611328, 0.046392608642578125, 0.046207489013671874, 0.04663296127319336, 0.04665958404541016, 0.04810163116455078, 0.04579030227661133, 0.045601696014404294, 0.0477957763671875, 0.04543024063110351, 0.04544960021972656, 0.04565033721923828, 0.045792991638183594, 0.04554927825927734, 0.045589248657226564, 0.045846431732177735, 0.045944862365722657, 0.04559881591796875, 0.0455577278137207, 0.04561920166015625, 0.045725696563720705, 0.046188480377197264, 0.04613347244262695, 0.04641062545776367, 0.045962207794189455, 0.04636671829223633, 0.04627260971069336, 0.04633795166015625, 0.04600201416015625, 0.04582166290283203, 0.04593094253540039, 0.049423553466796874, 0.04548691177368164, 0.046096446990966794, 0.045931903839111325, 0.045711936950683596, 0.04606284713745117, 0.046381824493408205, 0.045868385314941404, 0.04576675033569336, 0.045948638916015624, 0.046107486724853514, 0.045914112091064455, 0.04639920043945313, 0.046506145477294925, 0.04638937759399414, 0.046004318237304685, 0.046497695922851565, 0.04753606414794922, 0.04642601776123047, 0.0466596794128418, 0.04673926544189453, 0.0461212158203125, 0.049977344512939455, 0.04592758560180664, 0.04597436904907227, 0.04649075317382813, 0.046700736999511716, 0.046432960510253904, 0.04630313491821289, 0.0464851188659668, 0.046746078491210936, 0.046413951873779294, 0.046607776641845705, 0.0468070068359375, 0.04645119857788086, 0.04671500778198242, 0.04684598541259766]",tokens/s,21.63575690842997,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 56915 has 14.69 GiB memory in use. Of the allocated memory 14.29 GiB is allocated by PyTorch, and 313.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,853.331968,556.72832,0.0,178.25792,176.52224,s,1,7.511060546875,7.511060546875,0.0,7.511060546875,7.511060546875,7.511060546875,7.511060546875,[7.511060546875],,kWh,2.013667340417366e-05,2.214166881086207e-06,6.45611627600462e-06,2.8806956561264485e-05,,MB,1176.674304,669.974528,0.0,262.144,221.118976,s,10,0.2373123188018799,0.02373123188018799,7.72173514862464e-05,0.023694639205932615,0.023846604537963865,0.023847382164001465,0.023848004264831545,"[0.023846431732177733, 0.023769792556762696, 0.023693119049072266, 0.02362953567504883, 0.023674528121948243, 0.023655296325683594, 0.023848159790039063, 0.02381488037109375, 0.023684415817260742, 0.023696159362792967]",tokens/s,10787.472023891078,kWh,6.970796484486802e-07,7.687053776317147e-08,4.0972453886394427e-07,1.183674725075796e-06,tokens/kWh,216275632.63514575,MB,1211.006976,684.654592,0.0,276.824064,221.271552,s,10,13.6217958984375,1.3621795898437499,0.005329277517955597,1.3617802124023437,1.3692102783203126,1.3709354614257812,1.3723156079101562,"[1.3575599365234374, 1.3600166015625, 1.3583939208984375, 1.368826904296875, 1.3543009033203126, 1.37266064453125, 1.3644005126953125, 1.3580308837890624, 1.364061767578125, 1.3635438232421875]",tokens/s,46.24940827899681,kWh,3.945427498654954e-05,4.3513940986203914e-06,2.053856999753807e-05,6.4344239082708e-05,tokens/kWh,979108.6334709761,,s,630,13.616925313949594,0.02161416716499934,0.00044554585507595816,0.0215066556930542,0.021869215393066407,0.0222325496673584,0.023323972034454347,"[0.021236480712890624, 0.021513376235961914, 0.021559551239013673, 0.021807424545288084, 0.021418655395507812, 0.021406335830688475, 0.021258880615234375, 0.021897600173950194, 0.021334016799926758, 0.02127462387084961, 0.021506048202514647, 0.021445632934570313, 0.021560319900512694, 0.021444896697998046, 0.021406431198120118, 0.021496511459350585, 0.021620447158813477, 0.02152262306213379, 0.021377023696899415, 0.021294591903686523, 0.02125686454772949, 0.021400192260742187, 0.021376031875610352, 0.021254976272583007, 0.021309215545654295, 0.02146099281311035, 0.02182143974304199, 0.021465087890625, 0.02154105567932129, 0.02138297653198242, 0.0215285758972168, 0.021423744201660155, 0.021451135635375977, 0.021352767944335938, 0.021373823165893556, 0.0214653434753418, 0.021315488815307617, 0.021363168716430663, 0.021469600677490236, 0.024030271530151366, 0.0217542724609375, 0.021818784713745116, 0.021697439193725587, 0.021587167739868164, 0.02165635108947754, 0.021979455947875978, 0.021725215911865235, 0.02145987129211426, 0.02147439956665039, 0.021526176452636717, 0.022349855422973634, 0.021488832473754882, 0.021376895904541015, 0.02139638328552246, 0.02154911994934082, 0.02137696075439453, 0.021604352951049805, 0.0214769287109375, 0.021598175048828126, 0.02154745674133301, 0.021344287872314453, 0.021366783142089844, 0.021515615463256837, 0.02103910446166992, 0.021428096771240236, 0.021523616790771485, 0.02137392044067383, 0.021559551239013673, 0.02133580780029297, 0.021301248550415038, 0.021358591079711914, 0.021294240951538087, 0.021968000411987303, 0.02149087905883789, 0.02148828887939453, 0.021571424484252928, 0.021542783737182616, 0.021405759811401366, 0.021499839782714844, 0.02141404724121094, 0.021464672088623047, 0.022016416549682616, 0.021377023696899415, 0.021331968307495116, 0.02183782386779785, 0.021456159591674805, 0.021559616088867188, 0.021505983352661132, 0.021764575958251955, 0.021630975723266603, 0.02229574394226074, 0.02196361541748047, 0.02179836845397949, 0.021625343322753905, 0.021481472015380858, 0.022567264556884764, 0.021362079620361327, 0.021455488204956054, 0.021685888290405273, 0.02166783905029297, 0.0214334716796875, 0.02133046340942383, 0.021445247650146486, 0.021576704025268553, 0.021385951995849608, 0.02145075225830078, 0.02139468765258789, 0.021360992431640625, 0.02156716728210449, 0.021318368911743164, 0.021366304397583007, 0.02145280075073242, 0.021376703262329103, 0.021602815628051757, 0.021485855102539062, 0.0213090877532959, 0.0218767032623291, 0.02401113510131836, 0.021763200759887694, 0.02183660888671875, 0.02154435157775879, 0.02144937515258789, 0.021519519805908202, 0.021488447189331055, 0.021364063262939454, 0.02138591957092285, 0.021095584869384766, 0.02143539237976074, 0.021462879180908202, 0.02152681541442871, 0.021454111099243164, 0.021505823135375978, 0.021439456939697267, 0.021528255462646483, 0.0215284481048584, 0.021602399826049806, 0.02222457695007324, 0.021737695693969727, 0.02164339256286621, 0.021497856140136717, 0.02161782455444336, 0.022481760025024413, 0.021597471237182617, 0.021271263122558594, 0.02153267288208008, 0.02141798400878906, 0.021300928115844726, 0.021309440612792968, 0.02148080062866211, 0.02167087936401367, 0.02307891273498535, 0.023346271514892578, 0.021427104949951172, 0.021526527404785157, 0.02138966369628906, 0.021448352813720702, 0.021378175735473633, 0.021322175979614257, 0.021395904541015625, 0.021366336822509765, 0.021342655181884766, 0.021215232849121093, 0.021369888305664064, 0.02134934425354004, 0.021639232635498048, 0.021739200592041017, 0.02140166473388672, 0.021434560775756836, 0.021565216064453125, 0.02146892738342285, 0.02161712074279785, 0.02147020721435547, 0.021308095932006835, 0.02142585563659668, 0.021338367462158205, 0.021242176055908203, 0.021329631805419923, 0.02126268768310547, 0.02137049674987793, 0.021432479858398436, 0.02135001564025879, 0.02145955276489258, 0.021856607437133788, 0.02143824005126953, 0.02148543930053711, 0.021528863906860353, 0.021448768615722657, 0.02254198455810547, 0.021444608688354492, 0.021233823776245116, 0.02158470344543457, 0.02138092803955078, 0.021612543106079102, 0.021438432693481446, 0.021710847854614256, 0.02139548873901367, 0.02126233673095703, 0.021428224563598632, 0.021377023696899415, 0.021393312454223632, 0.02146633529663086, 0.021441408157348633, 0.021370880126953123, 0.022338655471801756, 0.02152332878112793, 0.021620927810668947, 0.021360479354858398, 0.021301248550415038, 0.021393407821655275, 0.021346303939819337, 0.021821760177612306, 0.021613279342651368, 0.021425119400024416, 0.02141798400878906, 0.021368831634521485, 0.021389312744140625, 0.021393184661865235, 0.021822975158691405, 0.028106592178344728, 0.023689855575561525, 0.02150614356994629, 0.021493919372558595, 0.02158415985107422, 0.021788543701171875, 0.021432159423828125, 0.02142972755432129, 0.021537151336669922, 0.02193833541870117, 0.021755903244018555, 0.021513856887817383, 0.02152662467956543, 0.02160691261291504, 0.022642656326293944, 0.02157548713684082, 0.02149996757507324, 0.021469120025634766, 0.021436416625976562, 0.0213602237701416, 0.021483936309814454, 0.02154003143310547, 0.021692480087280273, 0.022926080703735353, 0.021868383407592774, 0.021767936706542968, 0.021552799224853515, 0.02137785530090332, 0.021379072189331053, 0.021987552642822265, 0.02208745574951172, 0.021632160186767578, 0.02144540786743164, 0.021427871704101563, 0.02114784049987793, 0.021381919860839843, 0.021385215759277345, 0.021882688522338867, 0.021602495193481445, 0.021585376739501953, 0.02145510482788086, 0.02143052864074707, 0.021573663711547852, 0.02146892738342285, 0.021491552352905275, 0.021446815490722658, 0.021419296264648436, 0.021679296493530273, 0.021521663665771483, 0.021440927505493163, 0.02150003242492676, 0.02126790428161621, 0.02143289566040039, 0.021368928909301758, 0.02165670394897461, 0.021641151428222656, 0.02160111999511719, 0.02149190330505371, 0.021603488922119142, 0.021748384475708007, 0.022570112228393554, 0.021510208129882812, 0.02151299285888672, 0.021689952850341795, 0.02138912010192871, 0.021396095275878907, 0.021436256408691408, 0.02161680030822754, 0.021418176651000976, 0.02135379219055176, 0.021361152648925782, 0.02136787223815918, 0.021292255401611327, 0.02136649513244629, 0.021348512649536133, 0.021396928787231446, 0.021399423599243163, 0.02142892837524414, 0.021361759185791016, 0.02150716781616211, 0.02141276741027832, 0.021352575302124022, 0.021504447937011718, 0.021362016677856446, 0.021740352630615235, 0.02172480010986328, 0.021442592620849608, 0.021463424682617187, 0.02154412841796875, 0.021480255126953125, 0.021506048202514647, 0.02141798400878906, 0.021415935516357423, 0.021483295440673827, 0.021291231155395506, 0.021380447387695314, 0.021363391876220703, 0.021203104019165038, 0.021699424743652343, 0.021726112365722656, 0.022511199951171876, 0.021791231155395507, 0.021799999237060545, 0.021648319244384765, 0.021608448028564452, 0.022517759323120116, 0.02173734474182129, 0.02153913688659668, 0.021912895202636718, 0.02168502426147461, 0.021475040435791015, 0.021719039916992186, 0.02185215950012207, 0.021931615829467774, 0.021713312149047852, 0.02166374397277832, 0.021796863555908205, 0.021571584701538086, 0.021753856658935547, 0.021683584213256835, 0.021701248168945312, 0.021776384353637695, 0.02178665542602539, 0.021830751419067384, 0.02181193542480469, 0.021757120132446288, 0.0218239688873291, 0.021856767654418945, 0.0216760311126709, 0.021751808166503905, 0.022245376586914063, 0.021845760345458983, 0.021792831420898436, 0.021808544158935548, 0.021952384948730468, 0.021726112365722656, 0.021707839965820312, 0.021689504623413087, 0.02170217514038086, 0.02162099266052246, 0.021502208709716798, 0.021669408798217774, 0.021523040771484377, 0.021489280700683594, 0.021467424392700194, 0.02176585578918457, 0.021926048278808594, 0.02217763137817383, 0.02186412811279297, 0.021649696350097655, 0.021668224334716796, 0.022607519149780275, 0.021911104202270507, 0.021637920379638673, 0.021941919326782227, 0.021501920700073243, 0.02174569511413574, 0.021964319229125978, 0.022018207550048827, 0.02172934341430664, 0.021215103149414063, 0.021654848098754884, 0.021450944900512695, 0.021463680267333984, 0.021476831436157227, 0.021384832382202148, 0.021523584365844728, 0.021538591384887694, 0.021530176162719728, 0.021807552337646485, 0.02147942352294922, 0.021550592422485353, 0.021504608154296875, 0.021501663208007813, 0.021645824432373048, 0.021761375427246092, 0.022051359176635744, 0.021648256301879883, 0.021705663681030274, 0.021592063903808592, 0.02143779182434082, 0.021566112518310546, 0.021395776748657228, 0.023150527954101562, 0.021544704437255858, 0.021659872055053712, 0.02168832015991211, 0.02164486312866211, 0.021528255462646483, 0.021572128295898437, 0.021745920181274414, 0.021972736358642577, 0.021752864837646484, 0.02178761672973633, 0.021981472015380858, 0.021750848770141603, 0.022692512512207032, 0.021782527923583983, 0.021788288116455078, 0.02184185600280762, 0.02171129608154297, 0.02167807960510254, 0.02164735984802246, 0.021727455139160155, 0.021724832534790038, 0.02165977668762207, 0.02154300880432129, 0.02133907127380371, 0.0214619197845459, 0.021438207626342774, 0.02154521560668945, 0.02156675148010254, 0.021545759201049806, 0.022007871627807617, 0.02141779136657715, 0.0214836483001709, 0.021448703765869142, 0.021372991561889647, 0.021419904708862306, 0.021506111145019532, 0.021420095443725588, 0.02153392028808594, 0.02195884895324707, 0.021135200500488283, 0.02152448081970215, 0.021420032501220702, 0.021526432037353514, 0.021352319717407225, 0.021341888427734376, 0.02133660888671875, 0.021372095108032226, 0.02150275230407715, 0.021373151779174804, 0.02140550422668457, 0.021366783142089844, 0.021950464248657226, 0.021575679779052736, 0.02169241523742676, 0.02153171157836914, 0.02156844711303711, 0.02156915283203125, 0.02149603271484375, 0.022657344818115235, 0.021507904052734374, 0.021608320236206055, 0.021773632049560548, 0.021376895904541015, 0.02155404853820801, 0.021467231750488282, 0.02162892723083496, 0.021494943618774413, 0.02142207908630371, 0.02140598487854004, 0.021397600173950194, 0.021281343460083008, 0.021360864639282228, 0.02146886444091797, 0.021516223907470704, 0.02133852767944336, 0.021405471801757812, 0.021343488693237305, 0.02146588706970215, 0.021483360290527345, 0.02125388717651367, 0.021575935363769533, 0.021437503814697265, 0.021586143493652343, 0.021332704544067382, 0.02195043182373047, 0.02157904052734375, 0.021408096313476562, 0.02149043273925781, 0.021531679153442382, 0.021385568618774414, 0.021508544921875, 0.021430112838745116, 0.02132371139526367, 0.023760351181030273, 0.022306880950927734, 0.02149190330505371, 0.02148387145996094, 0.02141321563720703, 0.02173766326904297, 0.021580255508422852, 0.021537952423095703, 0.021437280654907225, 0.02126233673095703, 0.022544384002685547, 0.02185625648498535, 0.021630975723266603, 0.021977088928222657, 0.021675519943237305, 0.021795232772827147, 0.021745792388916017, 0.0215817928314209, 0.021440511703491212, 0.021544223785400392, 0.0216375675201416, 0.02151775932312012, 0.0219451847076416, 0.021585119247436522, 0.021475263595581055, 0.021744543075561524, 0.02140889549255371, 0.021465919494628907, 0.02141916847229004, 0.02146828842163086, 0.02176358413696289, 0.021487775802612304, 0.02141539192199707, 0.021592863082885744, 0.02155939292907715, 0.0215629768371582, 0.021763872146606446, 0.021606752395629883, 0.021607744216918946, 0.0216746883392334, 0.021718175888061523, 0.02150079917907715, 0.021529823303222655, 0.021543872833251952, 0.0215163516998291, 0.021417728424072267, 0.021360767364501952, 0.021480960845947264, 0.02174835205078125, 0.021801887512207033, 0.02326937675476074, 0.02180624008178711, 0.021788511276245117, 0.021608287811279298, 0.021486719131469725, 0.0214619197845459, 0.02257302474975586, 0.02166169548034668, 0.021725343704223632, 0.021858144760131835, 0.021579519271850586, 0.021688255310058593, 0.02163030433654785, 0.021555583953857423, 0.02142064094543457, 0.021446016311645506, 0.021441375732421875, 0.02157779121398926, 0.02131318473815918, 0.021421344757080078, 0.021473440170288086, 0.021369216918945312, 0.02115167999267578, 0.021372480392456053, 0.0214021110534668, 0.024878175735473632, 0.022911903381347656, 0.022460735321044922, 0.02148748779296875, 0.021491519927978514, 0.02141766357421875, 0.02157756805419922, 0.022239072799682617, 0.021631616592407227, 0.021651456832885742, 0.02146895980834961, 0.021473056793212892, 0.02152284812927246, 0.021473087310791016, 0.0215568962097168, 0.021619264602661132, 0.02139571189880371, 0.021346303939819337, 0.021520063400268553, 0.021483232498168945, 0.02184409523010254, 0.021637088775634767, 0.021454912185668945, 0.02143657684326172, 0.0214998722076416, 0.02136604881286621, 0.02233622360229492, 0.021698688507080077, 0.02163443183898926, 0.022107967376708983, 0.02165017509460449, 0.021835775375366212, 0.021809152603149414, 0.02177129554748535, 0.021502944946289064, 0.0214400634765625, 0.02154854393005371, 0.021502431869506837, 0.021457376480102538, 0.021403167724609377, 0.021461471557617188, 0.021397504806518555, 0.021450239181518553, 0.021422880172729492, 0.021454559326171876, 0.021368831634521485, 0.021394464492797853, 0.021459936141967773, 0.02132809638977051, 0.021341344833374024, 0.02138175964355469, 0.021444896697998046, 0.02145587158203125, 0.021709440231323242, 0.021944063186645508, 0.02167030334472656, 0.021497695922851563, 0.021485183715820314, 0.02149737548828125, 0.021431520462036134]",tokens/s,46.2659510480394,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1141.280768,1113.45664,0.0,710.934528,695.312896,s,1,8.30471875,8.30471875,0.0,8.30471875,8.30471875,8.30471875,8.30471875,[8.30471875],,kWh,2.6004952604148457e-05,2.8613328436554627e-06,8.22833991600408e-06,3.7094625363808e-05,,MB,1550.123008,1444.806656,0.0,1027.60448,994.274816,s,10,0.2531176338195801,0.025311763381958008,0.0006235929483128865,0.025140864372253418,0.025658957481384276,0.02638175058364868,0.026959985065460206,"[0.025498336791992187, 0.025018463134765623, 0.02520627212524414, 0.024891136169433593, 0.025244960784912108, 0.025137504577636718, 0.027104543685913085, 0.02503753662109375, 0.02483465576171875, 0.025144224166870118]",tokens/s,10113.874570369699,kWh,7.316929707602552e-07,8.069194602120069e-08,4.8296683026065e-07,1.295351747042106e-06,tokens/kWh,197629717.63041797,MB,1583.689728,1490.944,0.0,1073.741824,994.277376,s,10,10.863289794921874,1.0863289794921875,0.00733338133485121,1.0868092041015625,1.0927218139648438,1.0963896545410157,1.0993239270019532,"[1.085389404296875, 1.083471435546875, 1.09190673828125, 1.09041162109375, 1.1000574951171875, 1.0889669189453124, 1.087889404296875, 1.08572900390625, 1.0721033935546875, 1.0773643798828125]",tokens/s,57.99348189113929,kWh,3.179406308632532e-05,3.506378564830596e-06,1.440068396554019e-05,4.97011256166961e-05,tokens/kWh,1267576.9254376085,,s,630,10.860648006439206,0.017239123819744778,0.00030783701395145,0.017205663681030273,0.017521799278259278,0.01771261577606201,0.018445625324249273,"[0.01732294464111328, 0.01755721664428711, 0.017332511901855467, 0.01722368049621582, 0.01721660804748535, 0.01738640022277832, 0.017352703094482422, 0.017745344161987305, 0.017385120391845702, 0.017465728759765625, 0.0173055362701416, 0.017255008697509764, 0.017295360565185547, 0.017431936264038085, 0.0174147834777832, 0.017348608016967772, 0.017384511947631836, 0.017404287338256837, 0.017705535888671874, 0.017475423812866212, 0.017283071517944337, 0.017443296432495117, 0.017369951248168945, 0.017554271697998048, 0.01747100830078125, 0.017129791259765624, 0.01718320083618164, 0.017190143585205077, 0.017092159271240234, 0.01705459213256836, 0.017168384552001953, 0.017362943649291994, 0.017076223373413087, 0.01723187255859375, 0.017145856857299805, 0.01715113639831543, 0.017054559707641602, 0.017127424240112304, 0.017055744171142577, 0.017036800384521485, 0.01703068733215332, 0.017034208297729492, 0.017868799209594728, 0.01701888084411621, 0.01699795150756836, 0.016896448135375976, 0.01701683235168457, 0.016895999908447267, 0.016990207672119142, 0.017340415954589843, 0.017268320083618165, 0.017135007858276367, 0.01725651168823242, 0.017025983810424805, 0.016953344345092772, 0.01696076774597168, 0.017114912033081055, 0.017066976547241212, 0.017250335693359375, 0.017004512786865236, 0.01703286361694336, 0.016865631103515626, 0.016936960220336913, 0.016932863235473633, 0.01699839973449707, 0.016912160873413087, 0.01782192039489746, 0.018691328048706053, 0.01748396873474121, 0.01703993606567383, 0.01699430465698242, 0.017063295364379883, 0.017011327743530272, 0.017125375747680666, 0.016977407455444335, 0.017058303833007812, 0.016922527313232422, 0.016909568786621094, 0.017015647888183594, 0.017098751068115235, 0.017114431381225585, 0.017023679733276367, 0.016977664947509765, 0.017016544342041015, 0.016867488861083985, 0.017060224533081054, 0.017084287643432616, 0.016975839614868163, 0.017035423278808595, 0.01699635124206543, 0.01729516792297363, 0.017205568313598634, 0.01701465606689453, 0.016971872329711913, 0.017039264678955078, 0.01702697563171387, 0.0170883846282959, 0.017006208419799804, 0.016994911193847655, 0.016925695419311524, 0.01687228775024414, 0.01695497512817383, 0.01737936019897461, 0.0199616641998291, 0.017322719573974608, 0.017217536926269532, 0.01733987236022949, 0.01718899154663086, 0.01728348731994629, 0.017295360565185547, 0.01719465637207031, 0.01701260757446289, 0.01710867118835449, 0.01713363265991211, 0.017590656280517578, 0.017012895584106444, 0.01753107261657715, 0.017112607955932616, 0.017084991455078125, 0.017273855209350587, 0.017154655456542968, 0.017176607131958007, 0.017164575576782228, 0.01705779266357422, 0.017682432174682617, 0.017302848815917968, 0.017150304794311524, 0.01741619110107422, 0.017327423095703124, 0.0172794246673584, 0.01728879928588867, 0.017351520538330077, 0.017320959091186524, 0.017193632125854494, 0.017254560470581055, 0.01723904037475586, 0.017342464447021484, 0.017083391189575196, 0.017153120040893553, 0.01717091178894043, 0.01720569610595703, 0.017313119888305663, 0.017320575714111327, 0.01730089569091797, 0.017457792282104492, 0.01740947151184082, 0.017542816162109374, 0.01757049560546875, 0.017322208404541014, 0.017283071517944337, 0.017385536193847657, 0.017469375610351563, 0.017270784378051757, 0.01742959976196289, 0.017273408889770508, 0.01723583984375, 0.017131999969482423, 0.017228960037231445, 0.01721651268005371, 0.017213279724121094, 0.017188447952270508, 0.017406368255615236, 0.01722368049621582, 0.017188671112060547, 0.017166528701782226, 0.01736403274536133, 0.017336544036865235, 0.017612735748291017, 0.01739241600036621, 0.017423839569091798, 0.017463232040405275, 0.017459808349609376, 0.017352703094482422, 0.01733603286743164, 0.01730793571472168, 0.017296415328979492, 0.017592416763305665, 0.01739049530029297, 0.017628671646118164, 0.017378847122192384, 0.01756870460510254, 0.01730668830871582, 0.017441728591918945, 0.01721548843383789, 0.01729030418395996, 0.017459232330322264, 0.017287935256958008, 0.01734671974182129, 0.01707827186584473, 0.017017215728759767, 0.017096704483032226, 0.017088031768798827, 0.01698454475402832, 0.017038976669311524, 0.0169619197845459, 0.0172728328704834, 0.016950624465942383, 0.016923295974731446, 0.016887807846069337, 0.01693110466003418, 0.016879487991333007, 0.017186239242553712, 0.01714771270751953, 0.017054304122924805, 0.017055744171142577, 0.016889280319213867, 0.01700716781616211, 0.016957599639892577, 0.01689894485473633, 0.017078943252563476, 0.017019552230834963, 0.016985055923461913, 0.01698681640625, 0.016912448883056642, 0.017045440673828124, 0.01704960060119629, 0.01704115104675293, 0.017123584747314454, 0.017356800079345702, 0.017307647705078123, 0.0172359676361084, 0.018808832168579103, 0.01864089584350586, 0.0174202880859375, 0.017334432601928712, 0.017653919219970702, 0.017404735565185545, 0.01766080093383789, 0.017346431732177734, 0.017328256607055663, 0.017276927947998046, 0.019802112579345704, 0.017579839706420897, 0.017711296081542968, 0.017426719665527345, 0.017366527557373047, 0.017293535232543945, 0.017258432388305663, 0.01729724884033203, 0.017361120223999025, 0.01736240005493164, 0.017515039443969725, 0.017524736404418945, 0.0174202880859375, 0.01748908805847168, 0.017521472930908204, 0.017342464447021484, 0.01739072036743164, 0.017350624084472657, 0.017390655517578124, 0.017225568771362304, 0.01726051139831543, 0.017211999893188477, 0.01722972869873047, 0.01722694396972656, 0.01734543991088867, 0.017332223892211913, 0.01790086364746094, 0.017289920806884764, 0.017249759674072267, 0.017264352798461915, 0.017251136779785157, 0.017373184204101562, 0.017352703094482422, 0.017128992080688476, 0.01720163154602051, 0.017289215087890625, 0.01715328025817871, 0.017206016540527343, 0.017125247955322265, 0.0171910400390625, 0.01714348793029785, 0.01738947105407715, 0.017355167388916015, 0.017383424758911133, 0.017356800079345702, 0.017310720443725586, 0.017329151153564454, 0.017235679626464842, 0.01726848030090332, 0.01730614471435547, 0.017337823867797853, 0.017533472061157226, 0.017750015258789064, 0.017442815780639647, 0.017357919692993166, 0.017503007888793946, 0.017262399673461912, 0.0172193603515625, 0.017340959548950194, 0.017354751586914064, 0.01743667221069336, 0.017739776611328126, 0.017778688430786133, 0.017768447875976562, 0.01772979164123535, 0.017689823150634765, 0.017713695526123046, 0.017872831344604493, 0.017803327560424805, 0.01796505546569824, 0.01773695945739746, 0.01773583984375, 0.017793632507324218, 0.01777187156677246, 0.01767286491394043, 0.017671199798583986, 0.01786774444580078, 0.01760665512084961, 0.01821446418762207, 0.01763167953491211, 0.017425727844238282, 0.017273536682128908, 0.017106943130493164, 0.017254400253295898, 0.017340415954589843, 0.017459360122680664, 0.017325279235839843, 0.01732371139526367, 0.01712428855895996, 0.017187936782836914, 0.017288095474243165, 0.01719705581665039, 0.017059839248657227, 0.01720319938659668, 0.017323936462402344, 0.017153888702392577, 0.0170100154876709, 0.017276159286499025, 0.01723971176147461, 0.01708233642578125, 0.017063871383666992, 0.017426591873168945, 0.01728268814086914, 0.017235904693603515, 0.017076736450195314, 0.017262399673461912, 0.017122432708740233, 0.01717753601074219, 0.017007839202880858, 0.017945375442504883, 0.017272096633911133, 0.017350528717041017, 0.017176448822021486, 0.017368032455444337, 0.017201152801513672, 0.01701091194152832, 0.01700022315979004, 0.01703424072265625, 0.017027135848999023, 0.016935487747192383, 0.01703536033630371, 0.017027360916137695, 0.01699635124206543, 0.016948863983154296, 0.0183536319732666, 0.018162399291992187, 0.017209535598754884, 0.018173952102661133, 0.017246208190917968, 0.017325408935546877, 0.01737286376953125, 0.017308639526367187, 0.017231327056884765, 0.017195552825927735, 0.017214559555053712, 0.01719548797607422, 0.017347200393676758, 0.0172740478515625, 0.01794316864013672, 0.017336320877075196, 0.017651872634887697, 0.017164384841918946, 0.01712441635131836, 0.01723257637023926, 0.017235391616821288, 0.017164447784423827, 0.01763983917236328, 0.017457855224609374, 0.017530399322509764, 0.0174289608001709, 0.017571840286254883, 0.01738319969177246, 0.01729318428039551, 0.017252191543579102, 0.01714227294921875, 0.01721548843383789, 0.017343711853027344, 0.01728499221801758, 0.017238943099975586, 0.017238016128540038, 0.017352672576904298, 0.01733180809020996, 0.01760915184020996, 0.017735679626464843, 0.017276832580566406, 0.017260480880737304, 0.01730156707763672, 0.017165952682495118, 0.017314271926879884, 0.01744895935058594, 0.017448095321655272, 0.017025888442993162, 0.0169881591796875, 0.017061344146728517, 0.017131391525268554, 0.017065792083740233, 0.017228639602661133, 0.01728233528137207, 0.01711177635192871, 0.01704547119140625, 0.017125408172607423, 0.017258495330810548, 0.01717862319946289, 0.01721958351135254, 0.017213119506835937, 0.017180992126464845, 0.017042560577392576, 0.01718671989440918, 0.01730227279663086, 0.01739516830444336, 0.017314271926879884, 0.017248544692993164, 0.017337568283081056, 0.017277727127075194, 0.017267839431762695, 0.01721798324584961, 0.017142208099365234, 0.017293312072753905, 0.017159616470336914, 0.017162912368774413, 0.017184415817260743, 0.017328256607055663, 0.017287296295166017, 0.017289535522460937, 0.017191808700561525, 0.01717849540710449, 0.017269535064697264, 0.01725971221923828, 0.0172838077545166, 0.017251712799072266, 0.0172807674407959, 0.017295616149902344, 0.017262144088745116, 0.017391263961791994, 0.017310367584228516, 0.01736716842651367, 0.01723391914367676, 0.017256063461303713, 0.017205631256103516, 0.01755340766906738, 0.01728102493286133, 0.017485408782958983, 0.01746073532104492, 0.017414272308349608, 0.017459423065185546, 0.017392192840576172, 0.017342367172241212, 0.017438144683837892, 0.017257120132446287, 0.017193248748779297, 0.017280736923217774, 0.017347999572753906, 0.017299104690551757, 0.01759267234802246, 0.017250911712646484, 0.01720966339111328, 0.017448640823364257, 0.017327775955200197, 0.017469152450561524, 0.017377920150756836, 0.017284383773803712, 0.01719923210144043, 0.017207679748535157, 0.017453279495239258, 0.017510400772094727, 0.017557504653930665, 0.017516544342041016, 0.017512351989746093, 0.01725859260559082, 0.017133567810058595, 0.017258495330810548, 0.017005792617797853, 0.01745587158203125, 0.017063488006591798, 0.017028703689575195, 0.017101951599121094, 0.016923648834228516, 0.016992095947265626, 0.016972063064575195, 0.0169532470703125, 0.01711087989807129, 0.01697011184692383, 0.01690880012512207, 0.016986080169677734, 0.016994655609130858, 0.016954656600952148, 0.016961952209472657, 0.016889440536499024, 0.01702057647705078, 0.016892671585083008, 0.016982015609741212, 0.01695260810852051, 0.016984800338745117, 0.016875520706176757, 0.01701478385925293, 0.016910528182983397, 0.01696486473083496, 0.01694572830200195, 0.017039360046386717, 0.016891103744506836, 0.01699510383605957, 0.016955392837524414, 0.016955392837524414, 0.01702195167541504, 0.017031551361083985, 0.016992895126342774, 0.017113088607788086, 0.01706390380859375, 0.016963264465332032, 0.016983776092529296, 0.01696143913269043, 0.016952032089233397, 0.01692006492614746, 0.016895551681518555, 0.01720217514038086, 0.01703753662109375, 0.017006080627441408, 0.017031391143798827, 0.01739571189880371, 0.017019935607910156, 0.016952287673950194, 0.017130912780761717, 0.01712585639953613, 0.017035135269165037, 0.017116607666015624, 0.017072959899902342, 0.017051231384277343, 0.016942880630493165, 0.016943744659423828, 0.01700249671936035, 0.016950624465942383, 0.016984319686889647, 0.016883935928344727, 0.017194591522216796, 0.01696588706970215, 0.016890207290649415, 0.017022272109985352, 0.016951168060302734, 0.0169967041015625, 0.016968128204345703, 0.01695884895324707, 0.017019231796264647, 0.016976192474365236, 0.017043455123901367, 0.01702092742919922, 0.01704960060119629, 0.01716633605957031, 0.01708380889892578, 0.017098623275756834, 0.01711756706237793, 0.016929119110107423, 0.01717862319946289, 0.016969247817993163, 0.017013280868530274, 0.016985536575317383, 0.01693155288696289, 0.01704140853881836, 0.017083999633789062, 0.017010623931884766, 0.01700649642944336, 0.016982175827026366, 0.01681407928466797, 0.01694553565979004, 0.01696156883239746, 0.016960800170898436, 0.01707491111755371, 0.017102848052978514, 0.0170250244140625, 0.016986112594604492, 0.016986112594604492, 0.016963327407836914, 0.01702707290649414, 0.017031423568725584, 0.0172271671295166, 0.017051584243774415, 0.017043296813964843, 0.01708755111694336, 0.01707596778869629, 0.016990144729614257, 0.017031232833862306, 0.016955392837524414, 0.016861503601074218, 0.01699398422241211, 0.01702092742919922, 0.016920576095581053, 0.016904191970825197, 0.01699839973449707, 0.017352703094482422, 0.017223295211791993, 0.017012672424316408, 0.017033088684082032, 0.01699488067626953, 0.017027040481567383, 0.01694723129272461, 0.016904191970825197, 0.01696767997741699, 0.01696796798706055, 0.01703286361694336, 0.01696134376525879, 0.01701715278625488, 0.016959423065185546, 0.01697177505493164, 0.017088512420654296, 0.01848320007324219, 0.01926553535461426, 0.01709459114074707, 0.017179744720458984, 0.01708336067199707, 0.01703638458251953, 0.01704569625854492, 0.01701545524597168, 0.017094720840454103, 0.017452384948730467, 0.016976800918579102, 0.017136703491210936, 0.017140256881713868, 0.017165632247924806, 0.017077056884765626, 0.017219615936279298]",tokens/s,58.00758846308958,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4055.81824,4705.878016,0.0,4303.355904,4034.388992,s,1,10.38453125,10.38453125,0.0,10.38453125,10.38453125,10.38453125,10.38453125,[10.38453125],,kWh,9.091571402081703e-05,1.002120531231178e-05,2.8903912011990496e-05,0.0001298408313451193,,MB,3953.811456,4764.598272,0.0,4347.396096,4202.697728,s,10,2.0142627105712894,0.20142627105712893,0.0008618269899018308,0.20163979339599608,0.2023385238647461,0.20247959518432618,0.20259245223999023,"[0.1994179229736328, 0.2017224884033203, 0.20169357299804688, 0.20163494873046875, 0.20163352966308593, 0.20057228088378906, 0.20101548767089844, 0.2023071746826172, 0.20262066650390625, 0.20164463806152344]",tokens/s,1270.936500271073,kWh,5.87879450491664e-06,6.483278221077813e-07,3.886230886760156e-06,1.0413353213784578e-05,tokens/kWh,24583819.903575577,MB,3960.147968,4764.598272,0.0,4347.396096,4213.085184,s,10,19.398732788085937,1.9398732788085937,0.015244832768237111,1.9365691528320315,1.9624701171875,1.9669422119140625,1.9705198876953125,"[1.9352606201171876, 1.9199698486328125, 1.9284493408203125, 1.9463487548828124, 1.971414306640625, 1.9241138916015625, 1.9371800537109376, 1.961476318359375, 1.9385614013671875, 1.935958251953125]",tokens/s,32.47634816573819,kWh,5.6353572819251444e-05,6.213953703678509e-06,3.750088555623921e-05,0.00010006841207916916,tokens/kWh,629569.2985530492,,s,630,19.396289432525627,0.030787761004008943,0.0004868354313175797,0.030673439979553224,0.0312884801864624,0.03165604944229126,0.03234010257720948,"[0.03231536102294922, 0.03248540878295898, 0.0317066879272461, 0.031237663269042967, 0.030626720428466796, 0.030557695388793944, 0.03089206314086914, 0.03042300796508789, 0.03067136001586914, 0.03189139175415039, 0.030785600662231447, 0.030821504592895507, 0.030433631896972655, 0.030530080795288086, 0.03080998420715332, 0.030303871154785156, 0.03031907272338867, 0.03076918411254883, 0.030695423126220703, 0.030375328063964844, 0.03053219223022461, 0.030537727355957032, 0.030310304641723632, 0.0304354248046875, 0.03154447937011719, 0.030664608001708983, 0.030451648712158202, 0.03060633659362793, 0.030579967498779295, 0.03074905586242676, 0.030454143524169922, 0.030498815536499024, 0.030312000274658205, 0.03156563186645508, 0.03083737564086914, 0.030659616470336912, 0.030556543350219727, 0.030587360382080077, 0.03070579147338867, 0.030510719299316407, 0.030847423553466796, 0.030588863372802734, 0.03052441596984863, 0.030357791900634767, 0.031828384399414066, 0.03041676712036133, 0.03057459259033203, 0.03039900779724121, 0.030361343383789062, 0.030539072036743165, 0.030550880432128905, 0.030525440216064452, 0.030566207885742186, 0.03055536079406738, 0.030531744003295898, 0.030436159133911133, 0.030480384826660156, 0.030529151916503905, 0.03066304016113281, 0.030459903717041017, 0.03061065673828125, 0.030517536163330077, 0.03042355155944824, 0.03150534439086914, 0.030778751373291016, 0.030601791381835938, 0.030424640655517577, 0.030503360748291016, 0.03025868797302246, 0.030320512771606446, 0.03045849609375, 0.030463327407836915, 0.03014214324951172, 0.030484928131103515, 0.030097503662109375, 0.030443967819213866, 0.03042918395996094, 0.030393888473510742, 0.030230079650878906, 0.03032156753540039, 0.03050092887878418, 0.030197696685791017, 0.030219520568847656, 0.03017804718017578, 0.030207456588745116, 0.030451776504516602, 0.030353759765625, 0.03053379249572754, 0.030545888900756837, 0.03039232063293457, 0.030220287322998047, 0.0304005126953125, 0.030309600830078123, 0.030286624908447267, 0.030203392028808593, 0.030180896759033203, 0.03024380874633789, 0.030316543579101563, 0.03038412857055664, 0.030401695251464845, 0.030309152603149415, 0.0303125114440918, 0.03013164710998535, 0.030378559112548827, 0.030244863510131836, 0.03010089683532715, 0.030296672821044923, 0.030232576370239257, 0.03042099189758301, 0.030431232452392577, 0.03036908721923828, 0.030300863265991212, 0.030283008575439453, 0.030506975173950197, 0.03033782386779785, 0.030621696472167968, 0.030643871307373047, 0.03067843246459961, 0.03066771125793457, 0.031068159103393556, 0.03117465591430664, 0.030973215103149414, 0.031032032012939453, 0.030863040924072264, 0.032022144317626955, 0.03092959976196289, 0.03189132881164551, 0.03096575927734375, 0.03096540832519531, 0.03073420715332031, 0.030724576950073242, 0.030660608291625976, 0.03079167938232422, 0.03055615997314453, 0.03057254409790039, 0.031764287948608395, 0.030802112579345703, 0.030418943405151368, 0.030379104614257812, 0.030491455078125, 0.030466144561767577, 0.030304256439208983, 0.03042099189758301, 0.030379968643188475, 0.030338623046875, 0.030882303237915038, 0.030609151840209962, 0.030396671295166017, 0.030623743057250977, 0.030568288803100585, 0.030752767562866212, 0.030642175674438478, 0.0306910400390625, 0.030447999954223634, 0.030379135131835936, 0.030404544830322264, 0.030507328033447266, 0.030407360076904297, 0.03067398452758789, 0.030548927307128906, 0.030644447326660156, 0.030507808685302736, 0.03054080009460449, 0.030501888275146483, 0.03040563201904297, 0.03039190483093262, 0.03031395149230957, 0.03041391944885254, 0.030531423568725586, 0.030742528915405274, 0.030525440216064452, 0.030531744003295898, 0.0306911678314209, 0.03042508888244629, 0.030345375061035157, 0.0304486083984375, 0.03137379264831543, 0.031553951263427735, 0.030646272659301758, 0.030406656265258788, 0.030502304077148438, 0.03039811134338379, 0.030378976821899415, 0.030558176040649414, 0.03036774444580078, 0.03037593650817871, 0.030545663833618165, 0.030451135635375978, 0.030534463882446287, 0.03159577560424805, 0.03088262367248535, 0.03077494430541992, 0.030703968048095703, 0.0306362247467041, 0.03105513572692871, 0.03080633544921875, 0.03061577606201172, 0.030683135986328124, 0.030490016937255858, 0.03308108901977539, 0.03156899261474609, 0.032389022827148437, 0.030749696731567383, 0.03051193618774414, 0.030720191955566405, 0.030612543106079103, 0.030760959625244142, 0.03089795112609863, 0.030788864135742187, 0.030768896102905275, 0.031031295776367186, 0.0310435848236084, 0.030928543090820312, 0.030867103576660157, 0.030961631774902344, 0.030972192764282227, 0.03079648017883301, 0.03081564712524414, 0.03104947280883789, 0.031098560333251955, 0.031200159072875978, 0.031049247741699218, 0.031244768142700195, 0.03119673538208008, 0.03089151954650879, 0.030809024810791015, 0.030623743057250977, 0.030674943923950194, 0.030445568084716795, 0.030592512130737305, 0.03028646469116211, 0.030377855300903322, 0.030315776824951172, 0.03042790412902832, 0.03047769546508789, 0.03055059242248535, 0.030756128311157226, 0.030653215408325194, 0.03055955123901367, 0.030575231552124025, 0.03053984069824219, 0.030665952682495116, 0.03047715187072754, 0.03131385612487793, 0.032350208282470705, 0.031109119415283205, 0.030926847457885744, 0.031155967712402345, 0.030793983459472655, 0.03084067153930664, 0.030795040130615233, 0.030757759094238283, 0.03208806228637695, 0.03100057601928711, 0.03113369560241699, 0.031090688705444337, 0.031082271575927734, 0.03085772705078125, 0.031077632904052733, 0.031091167449951173, 0.031045696258544923, 0.03098415946960449, 0.03105788803100586, 0.030850271224975585, 0.031009567260742187, 0.031096832275390625, 0.031139839172363282, 0.03107369613647461, 0.03139641571044922, 0.03114396858215332, 0.03146969604492188, 0.031119007110595703, 0.031244543075561522, 0.031233695983886717, 0.03125279998779297, 0.03120047950744629, 0.03114473533630371, 0.031320064544677735, 0.03146342468261719, 0.03137094306945801, 0.031473087310791015, 0.03639936065673828, 0.0318239688873291, 0.031367712020874024, 0.03144732856750488, 0.03109244728088379, 0.031117311477661135, 0.031047679901123046, 0.031084543228149415, 0.031053056716918947, 0.03122662353515625, 0.031103200912475586, 0.031249856948852538, 0.03101705551147461, 0.03138528060913086, 0.03118342399597168, 0.03134668731689453, 0.03122150421142578, 0.03112166404724121, 0.03174300765991211, 0.031004735946655274, 0.031196063995361328, 0.03130067253112793, 0.030997184753417967, 0.03129913520812988, 0.031204032897949218, 0.030971904754638672, 0.03137862396240235, 0.03240633773803711, 0.031008384704589845, 0.031226240158081054, 0.030975488662719725, 0.03088025665283203, 0.03093654441833496, 0.03083523178100586, 0.031766368865966794, 0.031311744689941405, 0.030665632247924804, 0.030426784515380858, 0.03049648094177246, 0.030496864318847655, 0.030402496337890626, 0.030272064208984376, 0.030433311462402343, 0.030517248153686522, 0.03217382431030273, 0.03048054313659668, 0.030339168548583983, 0.030383264541625977, 0.03047715187072754, 0.030222047805786134, 0.030417184829711914, 0.030525440216064452, 0.030353279113769532, 0.030509183883666992, 0.03072982406616211, 0.03087606430053711, 0.031127552032470703, 0.030793439865112304, 0.030752031326293946, 0.030518272399902343, 0.030672895431518556, 0.030325023651123047, 0.030407455444335936, 0.030468191146850586, 0.030505823135375976, 0.03042918395996094, 0.030316543579101563, 0.030519296646118164, 0.03041423988342285, 0.03026495933532715, 0.030327199935913086, 0.03046816062927246, 0.03029827117919922, 0.030583135604858397, 0.03052899169921875, 0.03065888023376465, 0.030626016616821287, 0.03040176010131836, 0.03034982490539551, 0.030240543365478517, 0.03043612861633301, 0.030252767562866212, 0.03057459259033203, 0.030245952606201172, 0.030389408111572265, 0.03058787155151367, 0.030306688308715822, 0.03038662338256836, 0.030369792938232422, 0.03033497619628906, 0.030435199737548827, 0.03062339210510254, 0.03037628746032715, 0.03044473648071289, 0.030853567123413087, 0.030587392807006834, 0.030406656265258788, 0.03181376075744629, 0.030916479110717772, 0.03144908714294434, 0.030520704269409178, 0.030597951889038084, 0.030506559371948242, 0.03049087905883789, 0.030472192764282226, 0.03039161682128906, 0.03041756820678711, 0.030248224258422853, 0.03055686378479004, 0.03070368003845215, 0.030906368255615234, 0.030526559829711915, 0.03034000015258789, 0.03073023986816406, 0.030648319244384766, 0.03041279983520508, 0.03042835235595703, 0.03039427185058594, 0.030323328018188475, 0.030417280197143556, 0.030371744155883788, 0.030590208053588867, 0.030523231506347656, 0.03053785514831543, 0.030587135314941408, 0.030416479110717775, 0.030573280334472656, 0.03042736053466797, 0.03040460777282715, 0.030583040237426758, 0.03095871925354004, 0.030853759765625, 0.031025152206420898, 0.03075494384765625, 0.030764511108398437, 0.03109062385559082, 0.030832191467285157, 0.030925216674804686, 0.030767616271972657, 0.030881792068481444, 0.03082342338562012, 0.03094425582885742, 0.030676992416381835, 0.030910463333129884, 0.030883840560913086, 0.030910463333129884, 0.031033344268798828, 0.03134873580932617, 0.030969791412353516, 0.03101702308654785, 0.030848928451538086, 0.03085312080383301, 0.030953567504882814, 0.03102902412414551, 0.031224031448364258, 0.031152128219604492, 0.031246175765991212, 0.030851232528686524, 0.030650367736816408, 0.030528608322143554, 0.03185478401184082, 0.031157119750976563, 0.03105683135986328, 0.03187667274475098, 0.03153145599365234, 0.031098880767822266, 0.031123455047607423, 0.031006719589233397, 0.030857215881347655, 0.03079782485961914, 0.030823999404907227, 0.030800319671630858, 0.0308874568939209, 0.03136355209350586, 0.03088800048828125, 0.030729536056518555, 0.031024864196777344, 0.03076598358154297, 0.031102975845336913, 0.030920703887939452, 0.030924480438232423, 0.030993919372558593, 0.031092927932739257, 0.030764896392822264, 0.030591327667236327, 0.030742975234985353, 0.03087343978881836, 0.032000030517578125, 0.030941247940063477, 0.03118467140197754, 0.030931232452392578, 0.031053823471069338, 0.03080396842956543, 0.031160320281982422, 0.031129600524902344, 0.03120319938659668, 0.031180383682250977, 0.031086719512939454, 0.03123651123046875, 0.03077939224243164, 0.031178655624389647, 0.030795871734619142, 0.030827552795410156, 0.031036384582519533, 0.031250303268432617, 0.030881919860839845, 0.031115039825439453, 0.03111903953552246, 0.03083318328857422, 0.031053823471069338, 0.03082646369934082, 0.03159641647338867, 0.031147903442382812, 0.030938528060913087, 0.031382400512695315, 0.031287296295166016, 0.030859264373779297, 0.031088640213012695, 0.03338396835327148, 0.03162364768981934, 0.03118694305419922, 0.03174604797363281, 0.031696128845214847, 0.031807071685791014, 0.03124675178527832, 0.031055871963500976, 0.03117465591430664, 0.031064064025878906, 0.03076633644104004, 0.031095552444458007, 0.031086591720581053, 0.03097804832458496, 0.030932992935180665, 0.031272800445556644, 0.030854496002197265, 0.031005088806152343, 0.030679456710815428, 0.0307457275390625, 0.03066969680786133, 0.03055820846557617, 0.030310400009155275, 0.03071392059326172, 0.030441408157348634, 0.030645824432373046, 0.030495168685913086, 0.030424671173095705, 0.030788000106811524, 0.03061759948730469, 0.0305930233001709, 0.03076300811767578, 0.030478336334228515, 0.03073023986816406, 0.030506336212158203, 0.03171599960327148, 0.031682559967041016, 0.030877695083618165, 0.03083673667907715, 0.03090752029418945, 0.030714752197265625, 0.030533632278442382, 0.030719615936279296, 0.03051523208618164, 0.03058230400085449, 0.03053366470336914, 0.030451967239379884, 0.030577184677124024, 0.0305930233001709, 0.030352991104125978, 0.030386592864990233, 0.0307128963470459, 0.030548927307128906, 0.030671871185302735, 0.030608383178710938, 0.0314654712677002, 0.031859935760498045, 0.03088982391357422, 0.03051206398010254, 0.030734336853027344, 0.030416095733642578, 0.030849632263183595, 0.0304268798828125, 0.030329280853271485, 0.03044940757751465, 0.030529632568359374, 0.03050499153137207, 0.030332704544067383, 0.03161935997009278, 0.030924863815307617, 0.030482368469238283, 0.03058639907836914, 0.030501344680786132, 0.030517248153686522, 0.03062579154968262, 0.030611679077148436, 0.030463775634765624, 0.030502912521362304, 0.030484352111816406, 0.030379295349121094, 0.030401376724243163, 0.030521120071411133, 0.0305830078125, 0.03044272041320801, 0.030777952194213868, 0.030582975387573243, 0.030567968368530273, 0.03077395248413086, 0.030668256759643554, 0.03069705581665039, 0.030575328826904297, 0.031065311431884766, 0.030842720031738283, 0.03070867156982422, 0.030744575500488282, 0.03050716781616211, 0.030619487762451172, 0.03078348731994629, 0.030776512145996093, 0.030824575424194336, 0.03105788803100586, 0.03193657684326172, 0.03226505661010742, 0.03065363121032715, 0.030564992904663087, 0.030703712463378906, 0.030900127410888673, 0.03089955139160156, 0.030574848175048828, 0.031183263778686524, 0.03077939224243164, 0.03057663917541504, 0.030441471099853516, 0.03042099189758301, 0.030398624420166016, 0.03041878318786621, 0.030393375396728515, 0.030698463439941405, 0.030498655319213867, 0.030758527755737303, 0.0308940486907959, 0.030700096130371092, 0.03080556869506836, 0.030865856170654297, 0.03048841667175293, 0.03191996765136719, 0.03069491195678711, 0.030603103637695313, 0.030544448852539062, 0.030370208740234376, 0.030532800674438476]",tokens/s,32.48043921965575,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1472.057344,1326.383104,0.0,947.912704,945.250304,s,1,8.178099609375,8.178099609375,0.0,8.178099609375,8.178099609375,8.178099609375,8.178099609375,[8.178099609375],,kWh,4.011619602085451e-05,4.417830343113296e-06,1.32619550540225e-05,5.7795981417990304e-05,,MB,1331.990528,1519.321088,0.0,1111.49056,1098.82368,s,10,1.6119050292968748,0.16119050292968748,0.0005511915126697848,0.16121893310546875,0.16174463500976563,0.16185521240234377,0.16194367431640624,"[0.1598438720703125, 0.16109616088867187, 0.16131968688964843, 0.16081129455566406, 0.1617200622558594, 0.161114013671875, 0.16135888671875, 0.16111817932128905, 0.16196578979492188, 0.16155708312988282]",tokens/s,1588.1828975474389,kWh,4.707219859811589e-06,5.191210867039741e-07,3.1204326038710095e-06,8.346773550386572e-06,tokens/kWh,30670533.764288314,MB,1345.765376,1653.538816,0.0,1245.708288,1164.242432,s,10,89.03407812500001,8.903407812500001,0.010136610228676235,8.90321484375,8.9131953125,8.9178447265625,8.9215642578125,"[8.887240234375, 8.8903544921875, 8.8948603515625, 8.9013642578125, 8.90372265625, 8.90270703125, 8.9090263671875, 8.910146484375, 8.912162109375, 8.922494140625]",tokens/s,7.075942305097012,kWh,0.000260164826486438,2.8697575803232348e-05,0.0001730542361135283,0.00046191663840319856,tokens/kWh,136388.24576180012,,s,630,89.03100794982909,0.14131906023782398,0.0002831820943250541,0.14131715393066407,0.14165845794677734,0.1417887367248535,0.14212909545898436,"[0.14145948791503907, 0.14063404846191407, 0.14118077087402345, 0.14077389526367187, 0.14100070190429687, 0.14090165710449218, 0.1408253173828125, 0.1409248046875, 0.1407427215576172, 0.1410826873779297, 0.14081024169921874, 0.14104165649414063, 0.14084259033203125, 0.14099270629882812, 0.14092218017578126, 0.14089222717285158, 0.1408658905029297, 0.1405506591796875, 0.14191206359863281, 0.14092825317382812, 0.14114483642578124, 0.1407093505859375, 0.14122364807128907, 0.1407639617919922, 0.1410618896484375, 0.14123388671875, 0.14120109558105468, 0.14125914001464843, 0.14094154357910157, 0.14102940368652345, 0.14082275390625, 0.14112265014648437, 0.1410016326904297, 0.14109405517578125, 0.14125535583496093, 0.14095747375488282, 0.14095747375488282, 0.14070387268066406, 0.14129808044433595, 0.141316162109375, 0.14115635681152344, 0.14103961181640626, 0.14105599975585936, 0.14100070190429687, 0.1411903076171875, 0.14090675354003906, 0.14128182983398438, 0.1409761962890625, 0.1414606475830078, 0.14112582397460938, 0.14143350219726564, 0.14087484741210937, 0.1413939208984375, 0.14115933227539063, 0.14100405883789063, 0.14126153564453126, 0.14099430847167968, 0.14152114868164062, 0.14120072937011718, 0.14130181884765625, 0.1408948211669922, 0.14096989440917967, 0.14135836791992187, 0.14088572692871093, 0.1407283172607422, 0.140797607421875, 0.14076358032226563, 0.14064128112792967, 0.14102015686035158, 0.14123622131347657, 0.14100889587402343, 0.14116864013671876, 0.14103347778320313, 0.14112733459472657, 0.140813720703125, 0.14087420654296876, 0.14126124572753906, 0.1410738525390625, 0.1411774444580078, 0.14085894775390626, 0.14095199584960938, 0.14083482360839844, 0.14101298522949218, 0.1409863739013672, 0.1407665557861328, 0.1413351287841797, 0.14102125549316405, 0.14103961181640626, 0.1418887939453125, 0.14091746520996093, 0.14107565307617187, 0.14160263061523437, 0.1410345001220703, 0.14137957763671874, 0.14093927001953124, 0.14087577819824218, 0.14114163208007813, 0.14073663330078126, 0.14112159729003906, 0.14095936584472657, 0.141291259765625, 0.1413738250732422, 0.1412646026611328, 0.14135165405273437, 0.14123622131347657, 0.1409944610595703, 0.14084719848632812, 0.14152703857421875, 0.14121778869628906, 0.14128742980957032, 0.14146697998046875, 0.14108303833007813, 0.14127714538574218, 0.14101123046875, 0.14129766845703126, 0.14123826599121095, 0.1412954559326172, 0.1415836181640625, 0.14099539184570312, 0.14113095092773437, 0.14120643615722656, 0.1411658935546875, 0.14150930786132812, 0.14132838439941406, 0.14111279296875, 0.14087632751464843, 0.14100889587402343, 0.14088534545898437, 0.140742431640625, 0.14105279541015625, 0.1409410858154297, 0.14057455444335937, 0.140972412109375, 0.1408675842285156, 0.14127529907226563, 0.140922119140625, 0.14108230590820312, 0.14098728942871094, 0.14095881652832032, 0.14126991271972655, 0.14094070434570313, 0.1412122497558594, 0.14120140075683593, 0.14098335266113282, 0.14122285461425782, 0.14086537170410157, 0.14103363037109376, 0.14096588134765625, 0.14114405822753906, 0.1411051483154297, 0.14105804443359374, 0.14094950866699218, 0.14117478942871095, 0.14093516540527343, 0.14135910034179688, 0.14086697387695313, 0.14140617370605468, 0.1413658905029297, 0.14137753295898436, 0.14078073120117188, 0.14133740234375, 0.14125430297851563, 0.14134716796875, 0.1413137969970703, 0.14140786743164063, 0.1412918701171875, 0.14121603393554688, 0.14110086059570312, 0.14103570556640624, 0.14148403930664064, 0.14160896301269532, 0.14127923583984375, 0.1415755157470703, 0.14136131286621093, 0.14128134155273436, 0.14130630493164062, 0.1414983673095703, 0.14163885498046874, 0.14148236083984375, 0.14129930114746095, 0.14142684936523436, 0.14081878662109376, 0.14149375915527343, 0.14150743103027344, 0.14132838439941406, 0.14153114318847657, 0.1415167999267578, 0.1410723876953125, 0.1412458953857422, 0.1413120574951172, 0.14130975341796875, 0.14153952026367186, 0.14124761962890625, 0.14087033081054687, 0.14102537536621093, 0.14111549377441407, 0.1412437744140625, 0.1412491455078125, 0.14141798400878905, 0.14094140625, 0.14138575744628906, 0.14074844360351563, 0.14118576049804688, 0.141338623046875, 0.14120889282226562, 0.14171820068359375, 0.14092652893066407, 0.14119923400878906, 0.14101356506347656, 0.14121165466308594, 0.14130995178222655, 0.1415244140625, 0.14148637390136717, 0.14099017333984376, 0.14118771362304688, 0.14125254821777344, 0.1413441925048828, 0.1413570556640625, 0.14167507934570311, 0.1414082489013672, 0.14083686828613282, 0.14121888732910157, 0.14102330017089842, 0.14108761596679686, 0.14108876037597656, 0.14177676391601562, 0.14139610290527344, 0.14163558959960937, 0.14149221801757814, 0.1413080596923828, 0.14131350708007812, 0.14142604064941405, 0.14151373291015626, 0.1414710693359375, 0.14130447387695313, 0.14119322204589843, 0.1412434539794922, 0.14107084655761717, 0.1411844787597656, 0.14153555297851564, 0.1413782043457031, 0.14137344360351561, 0.14123008728027345, 0.1413570556640625, 0.14129560852050782, 0.14134585571289063, 0.14143994140625, 0.14125260925292968, 0.14129766845703126, 0.14135267639160157, 0.14133071899414062, 0.14153523254394532, 0.14170620727539063, 0.14111888122558594, 0.14121014404296875, 0.1411152648925781, 0.1412181091308594, 0.14105996704101562, 0.1413324737548828, 0.14158029174804687, 0.14123213195800782, 0.14176255798339843, 0.14107034301757812, 0.14107571411132813, 0.1408577880859375, 0.14136761474609374, 0.14135501098632813, 0.14120652770996095, 0.1410795593261719, 0.14096588134765625, 0.1412196807861328, 0.14103388977050782, 0.1415078125, 0.14150096130371093, 0.14145330810546874, 0.1414593963623047, 0.14109458923339843, 0.14106866455078124, 0.141127685546875, 0.14121075439453126, 0.14146444702148436, 0.1415925750732422, 0.14147523498535156, 0.14118563842773438, 0.14137554931640625, 0.14125459289550782, 0.14156390380859374, 0.14137957763671874, 0.14138163757324218, 0.14120057678222656, 0.14118380737304687, 0.14109286499023438, 0.14136026000976562, 0.14123858642578124, 0.14148051452636717, 0.14127923583984375, 0.14153318786621094, 0.1418035125732422, 0.14136729431152345, 0.14140594482421875, 0.14169523620605468, 0.14147715759277343, 0.14160684204101562, 0.14127781677246093, 0.14146546936035156, 0.14147772216796875, 0.14130221557617187, 0.1414368896484375, 0.141285400390625, 0.1415166778564453, 0.1412359619140625, 0.14129600524902344, 0.1411788787841797, 0.14131365966796874, 0.14122227478027344, 0.14144685363769532, 0.14057533264160157, 0.14106214904785155, 0.14102313232421876, 0.1410397186279297, 0.14097389221191406, 0.14116064453125, 0.14121778869628906, 0.14116249084472657, 0.14119241333007812, 0.14129014587402344, 0.1412691192626953, 0.140943359375, 0.14146131896972655, 0.14128076171875, 0.14126150512695312, 0.1416697540283203, 0.14129817199707032, 0.14105625915527345, 0.1409814453125, 0.14132704162597656, 0.14131814575195312, 0.1410846710205078, 0.141053955078125, 0.1413017578125, 0.14129273986816407, 0.14124281311035156, 0.1413492431640625, 0.1412147216796875, 0.1410463104248047, 0.14133091735839845, 0.1413119354248047, 0.14121171569824217, 0.1411112976074219, 0.1413668212890625, 0.14136679077148437, 0.1416386260986328, 0.14119635009765624, 0.14147471618652344, 0.14152006530761718, 0.1413456268310547, 0.14147357177734374, 0.1412395782470703, 0.14143174743652343, 0.14132342529296876, 0.14156787109375, 0.14134127807617186, 0.1413492431640625, 0.14145481872558593, 0.1411212158203125, 0.14166717529296874, 0.14157005310058593, 0.1414956817626953, 0.14128192138671875, 0.14131382751464844, 0.14174435424804688, 0.1415755157470703, 0.14143760681152343, 0.1418260498046875, 0.14139744567871093, 0.14164784240722655, 0.14133670043945312, 0.14132392883300782, 0.14165811157226563, 0.1410662384033203, 0.14105923461914063, 0.14149449157714844, 0.14112422180175782, 0.1410908203125, 0.1413035888671875, 0.1414105224609375, 0.1412912902832031, 0.14114633178710936, 0.14122979736328126, 0.14147132873535156, 0.1413987274169922, 0.14137548828125, 0.14150860595703124, 0.14153446960449217, 0.14137831115722657, 0.1412833251953125, 0.14143487548828124, 0.14144717407226562, 0.14104701232910155, 0.14170806884765624, 0.14126255798339843, 0.14171165466308594, 0.14156185913085936, 0.14114521789550782, 0.14112448120117188, 0.14133978271484374, 0.14135952758789064, 0.14126332092285157, 0.14150186157226563, 0.14135562133789062, 0.14144102478027343, 0.1411604461669922, 0.14132838439941406, 0.14138983154296875, 0.14135090637207032, 0.14146258544921875, 0.14141445922851562, 0.14108546447753906, 0.1413159942626953, 0.14144432067871093, 0.1418162841796875, 0.14188800048828126, 0.14148512268066407, 0.1415213165283203, 0.14119290161132814, 0.14145826721191407, 0.14137139892578124, 0.14140573120117186, 0.14166883850097656, 0.14143487548828124, 0.14152432250976563, 0.1412425994873047, 0.14132269287109375, 0.14122189331054688, 0.14141644287109376, 0.1420226593017578, 0.14211891174316407, 0.1415925750732422, 0.1414307861328125, 0.14179122924804688, 0.14129379272460937, 0.1421332550048828, 0.14138819885253906, 0.14116876220703126, 0.141025146484375, 0.14109849548339845, 0.1408599090576172, 0.14175628662109374, 0.1413796844482422, 0.14152093505859376, 0.14128034973144532, 0.14126908874511718, 0.14096263122558594, 0.14094744873046874, 0.14207589721679686, 0.14147366333007813, 0.14132850646972656, 0.14128073120117188, 0.14121533203125, 0.1412303009033203, 0.1411808624267578, 0.14171420288085937, 0.14133042907714843, 0.14161305236816407, 0.14135699462890625, 0.14132640075683595, 0.14123826599121095, 0.1412095947265625, 0.141486083984375, 0.14173954772949218, 0.14187362670898437, 0.1414819793701172, 0.141366943359375, 0.14103538513183594, 0.14115213012695313, 0.14150306701660156, 0.14143600463867187, 0.14144195556640626, 0.14122393798828126, 0.1413507537841797, 0.14107049560546875, 0.14114405822753906, 0.14135910034179688, 0.14179737854003907, 0.14163555908203124, 0.14138348388671876, 0.1414575653076172, 0.14106629943847657, 0.14138291931152344, 0.14153919982910157, 0.1421853485107422, 0.14193827819824217, 0.14152691650390625, 0.1418429718017578, 0.1414737548828125, 0.1414469451904297, 0.1414263000488281, 0.14154960632324218, 0.14178569030761717, 0.141623291015625, 0.14146322631835936, 0.1412360382080078, 0.14148031616210938, 0.14157632446289062, 0.1414430694580078, 0.14132147216796875, 0.14133938598632811, 0.14099046325683592, 0.14133453369140625, 0.14104371643066407, 0.14164787292480469, 0.1412768249511719, 0.14141270446777343, 0.14125033569335937, 0.141218017578125, 0.14125465393066405, 0.14102117919921875, 0.14165811157226563, 0.14155775451660157, 0.14152499389648437, 0.14134271240234375, 0.1411399688720703, 0.1412689971923828, 0.14137139892578124, 0.14142892456054687, 0.141627197265625, 0.14138914489746093, 0.14128195190429688, 0.14127308654785156, 0.14122393798828126, 0.1413570556640625, 0.14147517395019532, 0.14134701538085936, 0.141489990234375, 0.14188188171386718, 0.14152716064453125, 0.14145535278320312, 0.1416165771484375, 0.141664794921875, 0.141548828125, 0.14149913024902344, 0.14151589965820313, 0.14165020751953125, 0.14187171936035156, 0.14137344360351561, 0.14165402221679688, 0.14144102478027343, 0.14138163757324218, 0.14151174926757812, 0.14158944702148438, 0.14166426086425782, 0.14127513122558594, 0.14161882019042968, 0.1414470672607422, 0.1414599304199219, 0.14133859252929687, 0.14159858703613282, 0.14153334045410157, 0.1415147247314453, 0.1416859588623047, 0.14156402587890626, 0.14190870666503907, 0.14160281372070313, 0.1415535430908203, 0.1415230712890625, 0.14143283081054686, 0.14166157531738283, 0.14140249633789062, 0.14101913452148437, 0.1411907501220703, 0.14109432983398437, 0.14126588439941407, 0.1412947540283203, 0.14145417785644532, 0.1417216033935547, 0.14114154052734376, 0.14110357666015624, 0.14177894592285156, 0.14144102478027343, 0.14146969604492188, 0.14183815002441405, 0.14160914611816405, 0.14141439819335938, 0.14153932189941407, 0.14139596557617187, 0.1414713592529297, 0.14152128601074218, 0.1418354949951172, 0.14162818908691407, 0.14148812866210939, 0.14158642578125, 0.14165577697753906, 0.1413962860107422, 0.14158026123046874, 0.14171884155273437, 0.1416956787109375, 0.14155775451660157, 0.14166835021972657, 0.14216192626953125, 0.14160415649414063, 0.14148268127441407, 0.14198080444335937, 0.14197616577148436, 0.14152668762207032, 0.14170994567871092, 0.14188934326171876, 0.14164601135253907, 0.14136524963378908, 0.1416458282470703, 0.1414998779296875, 0.14226486206054687, 0.14176856994628906, 0.1414739227294922, 0.14199349975585937, 0.1415172119140625, 0.14173802185058593, 0.14159181213378907, 0.14214810180664061, 0.14181800842285155, 0.141580322265625, 0.14169097900390626, 0.14160850524902344, 0.14166819763183594, 0.14131056213378906, 0.14222950744628907, 0.1419325408935547, 0.14162124633789064, 0.14177484130859375, 0.1416510467529297, 0.14232669067382814]",tokens/s,7.076186314267256,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4300.292096,4676.517888,0.0,4273.995776,4273.234432,s,1,11.028177734375,11.028177734375,0.0,11.028177734375,11.028177734375,11.028177734375,11.028177734375,[11.028177734375],,kWh,9.549145692915318e-05,1.0525770530128258e-05,2.8383356039995977e-05,0.00013440058349927742,,MB,4264.087552,4789.764096,0.0,4372.56192,4345.85856,s,10,2.0308370208740234,0.20308370208740234,0.000383202749293088,0.20305646514892578,0.20334493255615235,0.20369014358520507,0.20396631240844726,"[0.20315280151367188, 0.2030226593017578, 0.20282786560058594, 0.20253167724609375, 0.2040353546142578, 0.20272547912597655, 0.20326821899414063, 0.20309027099609375, 0.20298745727539064, 0.20319523620605467]",tokens/s,1260.563981100875,kWh,5.988674370833213e-06,6.604452851791038e-07,3.982089353469515e-06,1.063120900948183e-05,tokens/kWh,24080045.81338558,MB,4271.087616,4804.44416,0.0,4387.241984,4345.86112,s,10,24.215185546875006,2.4215185546875,0.012631393268439053,2.42160986328125,2.43009814453125,2.440537353515625,2.4488887207031254,"[2.4509765625, 2.42445458984375, 2.4277783203125, 2.4264951171875, 2.4243955078125, 2.412826416015625, 2.41882421875, 2.413531982421875, 2.41608056640625, 2.399822265625]",tokens/s,26.016732301326606,kWh,7.073548224666692e-05,7.80203959170662e-06,4.490672753533124e-05,0.00012344424937370477,tokens/kWh,510351.8415773187,,s,630,24.212405700683572,0.038432390001085066,0.0006081264215322154,0.03830323219299316,0.038904248809814454,0.039313553237915036,0.041118613662719745,"[0.039761726379394534, 0.038781120300292966, 0.03931702423095703, 0.039003616333007814, 0.03888435363769531, 0.038686496734619144, 0.03873199844360352, 0.03884236907958984, 0.03872972869873047, 0.038850208282470704, 0.03873417663574219, 0.038639198303222655, 0.038642078399658206, 0.03877273559570313, 0.038434814453125, 0.039000064849853515, 0.03859251022338867, 0.039277664184570314, 0.039224224090576174, 0.03877040100097656, 0.03898128128051758, 0.03891878509521484, 0.0388485107421875, 0.038946399688720705, 0.03874857711791992, 0.038721023559570314, 0.038918495178222656, 0.03878908920288086, 0.03973721694946289, 0.03879353713989258, 0.03985203170776367, 0.03893619155883789, 0.03888102340698242, 0.0387815055847168, 0.03876607894897461, 0.038801151275634764, 0.038942943572998046, 0.03885526275634766, 0.03869081497192383, 0.03867155075073242, 0.038679359436035156, 0.038866943359375, 0.038903263092041014, 0.038756767272949216, 0.038885505676269534, 0.039400577545166016, 0.03928972625732422, 0.03891312026977539, 0.03895593643188477, 0.038816959381103515, 0.039050048828125, 0.038811649322509766, 0.03877638244628906, 0.038752574920654294, 0.0387564811706543, 0.03879459381103516, 0.038833953857421874, 0.04045001602172851, 0.03922000122070313, 0.03835228729248047, 0.038378143310546876, 0.03842873764038086, 0.038364990234375, 0.03956268692016601, 0.03865248107910156, 0.038515872955322265, 0.03864662551879883, 0.038708576202392576, 0.038617759704589846, 0.0392355842590332, 0.03872294235229492, 0.03977423858642578, 0.04014755249023438, 0.03854336166381836, 0.03860262298583984, 0.038569889068603515, 0.03872716903686523, 0.03861372756958008, 0.038776832580566405, 0.03836102294921875, 0.0382935676574707, 0.038634815216064454, 0.03843923187255859, 0.038330078125, 0.038177440643310544, 0.038109184265136715, 0.03810070419311523, 0.038147552490234375, 0.038091487884521484, 0.038141376495361326, 0.038223648071289064, 0.03807526397705078, 0.03838339233398438, 0.03822409439086914, 0.03809795379638672, 0.038032352447509764, 0.03816175842285156, 0.03823833465576172, 0.038180416107177734, 0.0382611198425293, 0.03831049728393555, 0.038316032409667966, 0.038333694458007814, 0.03827584075927734, 0.03818835067749023, 0.038185630798339844, 0.03836716842651367, 0.03815024185180664, 0.03815401458740234, 0.03811126327514648, 0.03825020980834961, 0.03814038467407226, 0.038354942321777344, 0.03844496154785156, 0.038629470825195314, 0.038330368041992184, 0.03838307189941406, 0.03831558227539063, 0.03844979095458984, 0.03872137451171875, 0.03837776184082031, 0.03861705780029297, 0.03885843276977539, 0.03888169479370117, 0.03858553695678711, 0.03930825424194336, 0.03930931091308594, 0.038604801177978515, 0.038289054870605466, 0.03842697525024414, 0.0383526725769043, 0.03836336135864258, 0.03811328125, 0.03792486572265625, 0.03804159927368164, 0.0380579833984375, 0.03803705596923828, 0.038056385040283205, 0.03798128128051758, 0.038128543853759765, 0.03796377563476563, 0.03802457427978516, 0.03793686294555664, 0.03806902313232422, 0.03827302551269531, 0.03827519989013672, 0.03818822479248047, 0.03822060775756836, 0.03826998519897461, 0.03856278228759766, 0.03863552093505859, 0.03836703872680664, 0.03826457595825195, 0.03825708770751953, 0.038604446411132816, 0.03853945541381836, 0.0383587532043457, 0.038523326873779296, 0.038456478118896485, 0.03850300979614258, 0.0388590087890625, 0.03846323013305664, 0.03832857513427734, 0.038492385864257815, 0.038302719116210936, 0.038562175750732425, 0.039303585052490236, 0.038242305755615234, 0.038164321899414065, 0.03825884628295898, 0.03837702560424805, 0.03820899200439453, 0.04126537704467773, 0.03921350479125976, 0.04157676696777344, 0.038852222442626955, 0.043234718322753905, 0.03839894485473633, 0.038384990692138674, 0.03836569595336914, 0.03827443313598633, 0.03835740661621094, 0.038346527099609375, 0.03814656066894531, 0.03828131103515625, 0.03818905639648437, 0.038580223083496096, 0.03840985488891602, 0.038080257415771486, 0.03885260772705078, 0.03824838256835938, 0.0379617919921875, 0.03792835235595703, 0.037943904876708984, 0.03787926483154297, 0.038182464599609375, 0.03792995071411133, 0.03826073455810547, 0.04267769622802734, 0.038117950439453124, 0.0381275520324707, 0.03796563339233398, 0.03790873718261719, 0.03785318374633789, 0.03818700790405274, 0.03825571060180664, 0.038152416229248046, 0.03900691223144531, 0.038152191162109376, 0.03928473663330078, 0.03851468658447266, 0.03824028778076172, 0.03836652755737305, 0.03867465591430664, 0.03831033706665039, 0.03861446380615234, 0.038537792205810543, 0.03871887969970703, 0.03866479873657227, 0.03845529556274414, 0.03895430374145508, 0.038760929107666015, 0.03863337707519531, 0.03894611358642578, 0.03877769470214844, 0.04321615982055664, 0.03856063842773438, 0.03858227157592774, 0.03863142395019531, 0.038358177185058594, 0.038120094299316405, 0.03825475311279297, 0.03824969482421875, 0.03827180862426758, 0.03813100814819336, 0.03831878280639649, 0.03826483154296875, 0.03824560165405273, 0.03829840087890625, 0.03836076736450195, 0.03823996734619141, 0.03828316879272461, 0.038395713806152344, 0.03831891250610352, 0.03823001480102539, 0.03822598266601562, 0.03825459289550781, 0.03869900894165039, 0.03846684646606445, 0.03827916717529297, 0.03830652618408203, 0.03859817504882813, 0.03933452987670898, 0.03883375930786133, 0.03839049530029297, 0.0385840950012207, 0.03833878326416015, 0.03826892852783203, 0.038338558197021484, 0.038507713317871096, 0.03834284973144531, 0.03844134521484375, 0.038321983337402346, 0.03835478210449219, 0.038273632049560545, 0.03826816177368164, 0.03818118286132813, 0.0380113296508789, 0.038346752166748044, 0.03800844955444336, 0.03778188705444336, 0.03794944000244141, 0.037631999969482424, 0.04075929641723633, 0.03950735855102539, 0.03802787017822266, 0.03805094528198242, 0.03799267196655273, 0.037929630279541014, 0.038225921630859375, 0.037955551147460936, 0.03819270324707031, 0.03824278259277344, 0.0381317138671875, 0.03831808090209961, 0.03834185409545898, 0.038144798278808595, 0.03927449417114258, 0.03841843032836914, 0.03832831954956055, 0.03824844741821289, 0.03827907180786133, 0.038176769256591796, 0.03847792053222656, 0.03824025726318359, 0.037967872619628903, 0.03975689697265625, 0.0390399055480957, 0.038234111785888675, 0.03811328125, 0.043096065521240234, 0.038537216186523435, 0.0385986557006836, 0.03838742446899414, 0.03834089660644531, 0.038721057891845705, 0.0386278076171875, 0.03848540878295898, 0.03834940719604492, 0.038191104888916014, 0.03811123275756836, 0.03810067367553711, 0.03850271987915039, 0.03852262496948242, 0.038655231475830075, 0.03910815811157226, 0.03855401611328125, 0.038330398559570315, 0.038217727661132815, 0.038760448455810545, 0.03849216079711914, 0.038174720764160154, 0.03851011276245117, 0.038141857147216796, 0.038169151306152345, 0.038052928924560546, 0.03815628814697265, 0.03807881546020508, 0.03797257614135742, 0.03796134567260742, 0.037900672912597654, 0.037967647552490234, 0.0379365119934082, 0.03822463989257813, 0.0379901123046875, 0.03836147308349609, 0.038316032409667966, 0.037967552185058595, 0.03789651107788086, 0.03811328125, 0.0386962890625, 0.03822659301757812, 0.03850022506713867, 0.03832640075683594, 0.0380497932434082, 0.038209537506103515, 0.038234111785888675, 0.03823382568359375, 0.03860508728027344, 0.038274208068847654, 0.03831894302368164, 0.038249664306640625, 0.03864863967895508, 0.03840752029418945, 0.03818713760375977, 0.0382817611694336, 0.03816348648071289, 0.03828425598144531, 0.03913318252563477, 0.03824639892578125, 0.03830537414550781, 0.03859292984008789, 0.03830374526977539, 0.038391807556152346, 0.03841024017333984, 0.03837484741210938, 0.03823263931274414, 0.038166465759277346, 0.03809286499023438, 0.03815971374511719, 0.03799311828613281, 0.0381317138671875, 0.03821542358398437, 0.03837772750854492, 0.039439521789550784, 0.03823702239990234, 0.03829276657104492, 0.03819916915893555, 0.03944038391113281, 0.03869696044921875, 0.03869257736206055, 0.038418720245361325, 0.038884864807128904, 0.03838617706298828, 0.03809689712524414, 0.03816243362426758, 0.038010879516601564, 0.0381030387878418, 0.03790848159790039, 0.03791667175292969, 0.038026302337646485, 0.037792510986328125, 0.03800889587402344, 0.038066303253173825, 0.03807401657104492, 0.03802556610107422, 0.03817062377929688, 0.03809075164794922, 0.038391807556152346, 0.03866566467285156, 0.03834105682373047, 0.038586238861083984, 0.03816883087158203, 0.038637569427490234, 0.03841843032836914, 0.03842201614379883, 0.038685184478759765, 0.03859167861938476, 0.038427295684814455, 0.03832783889770508, 0.03823843383789063, 0.03815222549438477, 0.04024169540405274, 0.039806495666503905, 0.039317825317382815, 0.03830374526977539, 0.03859247970581055, 0.03841641616821289, 0.03812351989746094, 0.03824435043334961, 0.038209537506103515, 0.038316032409667966, 0.03867420959472656, 0.03835452651977539, 0.038285118103027344, 0.038232894897460935, 0.038147903442382815, 0.038015167236328126, 0.037978111267089845, 0.03815135955810547, 0.03848822402954102, 0.038343326568603515, 0.0383076171875, 0.038203617095947266, 0.0382740478515625, 0.03839599990844727, 0.03877571105957031, 0.03832783889770508, 0.03835337448120117, 0.038432769775390625, 0.038201343536376955, 0.03952265548706055, 0.03838521575927734, 0.038035102844238285, 0.03808540725708008, 0.0382562255859375, 0.038422271728515624, 0.03825436782836914, 0.04207417678833008, 0.03819756698608399, 0.03803145599365235, 0.03835065460205078, 0.03798636627197265, 0.03801961517333984, 0.038067264556884764, 0.038238975524902345, 0.03860240173339844, 0.037833248138427734, 0.03791622543334961, 0.037898689270019534, 0.03811891174316406, 0.03849881744384766, 0.038034942626953124, 0.037983776092529294, 0.03805487823486328, 0.038191104888916014, 0.03832809448242187, 0.03811350250244141, 0.03824425506591797, 0.0381563835144043, 0.038125568389892575, 0.03801702499389648, 0.038250495910644534, 0.03830732727050781, 0.03816089630126953, 0.038319454193115235, 0.038285888671875, 0.03826492691040039, 0.038212993621826175, 0.03819174575805664, 0.03803897476196289, 0.03840185546875, 0.0394145278930664, 0.03842822265625, 0.03816803359985352, 0.038185760498046874, 0.03832844924926758, 0.03815996932983398, 0.038295135498046876, 0.03837427139282226, 0.03824591827392578, 0.03810284805297852, 0.038391551971435546, 0.03811990356445313, 0.03816198348999023, 0.03813859176635742, 0.03794345474243164, 0.038234111785888675, 0.038563488006591796, 0.03827337646484375, 0.03828736114501953, 0.038401023864746094, 0.03820169448852539, 0.03835356903076172, 0.03931727981567383, 0.03852105712890625, 0.03840585708618164, 0.03837539291381836, 0.03850678253173828, 0.03821091079711914, 0.038507198333740236, 0.038233184814453126, 0.038183742523193356, 0.038190750122070315, 0.0382033920288086, 0.03795561599731445, 0.03781017684936523, 0.03805430221557617, 0.037967456817626956, 0.0379040641784668, 0.03798015975952149, 0.03794985580444336, 0.038211902618408206, 0.03791667175292969, 0.03787062454223633, 0.03799286270141602, 0.03806246566772461, 0.038038784027099606, 0.038107166290283205, 0.03830662536621094, 0.03785311889648438, 0.0379571533203125, 0.03789478302001953, 0.038190433502197266, 0.03821350479125977, 0.039131744384765625, 0.03918662261962891, 0.03838102340698242, 0.03823654556274414, 0.04036793518066406, 0.038433120727539065, 0.03828531265258789, 0.03829350280761719, 0.038186943054199216, 0.03833993530273438, 0.03882249450683594, 0.03839807891845703, 0.038337760925292966, 0.0385167350769043, 0.0385687370300293, 0.03848720169067383, 0.038315902709960936, 0.03876553726196289, 0.0384502067565918, 0.03857097625732422, 0.03995852661132813, 0.03879935836791992, 0.03822172927856445, 0.038277183532714844, 0.03797366333007812, 0.03802764892578125, 0.03816447830200195, 0.03811328125, 0.038201343536376955, 0.03832329559326172, 0.0384186897277832, 0.03835491180419922, 0.0392224006652832, 0.038239105224609375, 0.03830489730834961, 0.038049823760986326, 0.0384356803894043, 0.038053855895996094, 0.037894176483154296, 0.038012928009033206, 0.03835884857177734, 0.03822761535644531, 0.03782675170898438, 0.037868896484375, 0.03785420989990235, 0.03796297454833984, 0.037946144104003904, 0.037905406951904294, 0.03773747253417969, 0.03768703842163086, 0.03770582580566406, 0.03792284774780273, 0.03796156692504883, 0.040206623077392575, 0.0378081283569336, 0.037660606384277345, 0.037639392852783206, 0.038228286743164065, 0.037956127166748045, 0.037746078491210935, 0.03777596664428711, 0.03765657424926758, 0.03794921493530273, 0.03804956817626953, 0.03792940902709961, 0.03789004898071289, 0.03804086303710937, 0.03771846389770508, 0.03780364990234375, 0.03813430404663086, 0.037992000579833984, 0.03809503936767578, 0.03792444610595703, 0.03801129531860352, 0.038064510345458984, 0.03799039840698242, 0.03779507064819336, 0.03791948699951172, 0.03807356643676758, 0.03811638259887695, 0.03796966552734375, 0.03802316665649414, 0.038424575805664066, 0.04006438446044922, 0.03838185501098633, 0.03846793746948242, 0.0382457275390625, 0.03783308792114258, 0.038039840698242185, 0.038321727752685546, 0.03826937484741211, 0.03818700790405274, 0.0380682258605957, 0.03809836959838867, 0.03782099151611328]",tokens/s,26.01971930373747,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 4 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 28.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 189869 has 14.74 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 80.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 106026 has 14.64 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 312.03 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1349, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1142, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 868, in forward hidden_states = self.mlp(hidden_states) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 781, in forward final_hidden_states.index_add_(0, top_x, current_hidden_states.to(hidden_states.dtype)) RuntimeError: CUDA error: invalid configuration argument CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1583.255552,1596.915712,0.0,1201.668096,1189.151232,s,1,8.466310546875,8.466310546875,0.0,8.466310546875,8.466310546875,8.466310546875,8.466310546875,[8.466310546875],,kWh,3.5574402879161464e-05,3.916985400328343e-06,1.096223099199961e-05,5.045361927148942e-05,,MB,1582.055424,1791.950848,0.0,1382.023168,1351.367168,s,10,0.47527958679199217,0.04752795867919922,0.0001405060291031286,0.0475042724609375,0.047698503112792966,0.04776477127075195,0.04781778579711914,"[0.04783103942871094, 0.047349502563476566, 0.0474101448059082, 0.04753100967407226, 0.047494945526123045, 0.04768377685546875, 0.04737731170654297, 0.04761840057373047, 0.047513599395751956, 0.04746985626220703]",tokens/s,5386.303285776069,kWh,1.4258651037194828e-06,1.5724679265124117e-07,9.432636272390402e-07,2.5263755236097645e-06,tokens/kWh,101330937.38741547,MB,1582.055424,1833.893888,0.0,1423.966208,1407.328256,s,10,13.680080322265626,1.3680080322265626,0.005092286023816497,1.3692230224609374,1.372664990234375,1.3746703247070313,1.3762745922851563,"[1.3663924560546874, 1.3722193603515624, 1.36963427734375, 1.3708880615234376, 1.3766756591796876, 1.3609461669921874, 1.3623831787109375, 1.368811767578125, 1.360443603515625, 1.371685791015625]",tokens/s,46.05236118201845,kWh,3.959619600836351e-05,4.367061092489081e-06,1.8069751583160582e-05,6.203300868401318e-05,tokens/kWh,1015588.3349284658,,s,630,13.677172939300538,0.02170979831635006,0.0003257925203109579,0.021650959968566894,0.02201076183319092,0.022220206356048582,0.023109855136871343,"[0.02183737564086914, 0.021971391677856444, 0.021774335861206053, 0.02169036865234375, 0.021478784561157226, 0.021575775146484375, 0.021658143997192382, 0.021591840744018556, 0.02197711944580078, 0.021448287963867187, 0.021748319625854492, 0.02164531135559082, 0.021510143280029297, 0.02131155204772949, 0.021342144012451172, 0.021348352432250976, 0.021510143280029297, 0.0214466552734375, 0.021825536727905274, 0.02328985595703125, 0.023052288055419923, 0.021985279083251954, 0.021737472534179687, 0.02159395217895508, 0.02135820770263672, 0.021760543823242186, 0.022798336029052735, 0.022199455261230468, 0.022010656356811525, 0.02200720024108887, 0.021852672576904295, 0.021866655349731444, 0.021757759094238282, 0.021776575088500977, 0.021420032501220702, 0.02141548728942871, 0.02150239944458008, 0.021568864822387696, 0.02147190475463867, 0.021398944854736326, 0.02151059150695801, 0.02261724853515625, 0.02175187110900879, 0.021832639694213868, 0.021522144317626953, 0.021604639053344726, 0.021640447616577147, 0.021680192947387697, 0.02156819152832031, 0.021274368286132814, 0.02143401527404785, 0.021611103057861326, 0.021487071990966798, 0.0213319034576416, 0.021312095642089843, 0.02142790412902832, 0.02147929573059082, 0.021328128814697266, 0.02131577682495117, 0.021321727752685548, 0.02128451156616211, 0.02140176010131836, 0.021917888641357422, 0.021655744552612304, 0.021626495361328126, 0.021563840866088868, 0.021434560775756836, 0.021370880126953123, 0.021405696868896484, 0.02141983985900879, 0.02174790382385254, 0.0214355525970459, 0.021416799545288086, 0.021501951217651367, 0.021476959228515623, 0.021436832427978517, 0.02153267288208008, 0.021452159881591798, 0.021387168884277344, 0.021381824493408204, 0.02141391944885254, 0.02147123146057129, 0.021594112396240234, 0.021640224456787108, 0.02175484848022461, 0.022253440856933593, 0.021506175994873047, 0.021579776763916016, 0.021671648025512694, 0.021647552490234374, 0.021651264190673827, 0.021673919677734375, 0.0216661434173584, 0.02167398452758789, 0.021784576416015625, 0.022504543304443358, 0.021859296798706053, 0.021761247634887695, 0.021699296951293946, 0.02184419250488281, 0.021999391555786132, 0.022024032592773437, 0.021909151077270508, 0.021924352645874022, 0.021831520080566408, 0.021915775299072265, 0.022086719512939453, 0.021868928909301758, 0.02190300750732422, 0.022864831924438476, 0.022024032592773437, 0.02205302429199219, 0.022237184524536133, 0.023139839172363282, 0.022139392852783202, 0.021809152603149414, 0.022282047271728514, 0.021975231170654298, 0.021981184005737304, 0.021725183486938478, 0.02167171287536621, 0.021833951950073243, 0.021721088409423828, 0.02184601593017578, 0.021690208435058592, 0.021622112274169922, 0.02175987243652344, 0.02165235137939453, 0.0216944637298584, 0.02162483215332031, 0.02165065574645996, 0.021822240829467772, 0.021741567611694337, 0.021651456832885742, 0.023347200393676756, 0.021571584701538086, 0.021628511428833007, 0.021868959426879882, 0.021728511810302734, 0.021762815475463868, 0.02168422317504883, 0.021634176254272462, 0.021855104446411134, 0.022321151733398437, 0.021559295654296876, 0.02191257667541504, 0.021799936294555664, 0.02167398452758789, 0.02168217658996582, 0.021618112564086914, 0.02167046356201172, 0.021621856689453125, 0.021588895797729494, 0.021708799362182618, 0.021550304412841798, 0.021771039962768555, 0.021788095474243162, 0.021668415069580078, 0.02167807960510254, 0.021939487457275392, 0.02173411178588867, 0.021538816452026367, 0.021575935363769533, 0.021631776809692384, 0.021573663711547852, 0.021658208847045897, 0.021582176208496093, 0.021749759674072267, 0.02162483215332031, 0.021790304183959962, 0.021707168579101564, 0.021611743927001954, 0.02174236869812012, 0.021544960021972655, 0.021747711181640626, 0.022042303085327147, 0.021905279159545897, 0.02189561653137207, 0.021768192291259765, 0.021710399627685548, 0.021640928268432617, 0.02163580894470215, 0.021563392639160156, 0.02168966484069824, 0.02205504035949707, 0.021524927139282227, 0.02162905693054199, 0.021596160888671875, 0.02168329620361328, 0.0215947208404541, 0.02175708770751953, 0.022016864776611328, 0.02207257652282715, 0.02200150489807129, 0.021978015899658202, 0.02183782386779785, 0.02178438377380371, 0.0217007999420166, 0.021542367935180665, 0.021638879776000975, 0.02160054397583008, 0.02154550361633301, 0.021573631286621094, 0.02167807960510254, 0.0220214729309082, 0.021695135116577148, 0.023735424041748047, 0.022834047317504883, 0.02168217658996582, 0.02164531135559082, 0.021594207763671876, 0.02172435188293457, 0.02173411178588867, 0.021724735260009766, 0.021698144912719725, 0.02151100730895996, 0.02159811210632324, 0.02150409507751465, 0.02153593635559082, 0.021566080093383788, 0.021452735900878907, 0.021502208709716798, 0.021549055099487305, 0.02160963249206543, 0.02166012763977051, 0.02234998321533203, 0.021653728485107424, 0.021590015411376954, 0.02162073516845703, 0.021624704360961915, 0.021708927154541015, 0.021655040740966795, 0.021715456008911133, 0.02171494483947754, 0.021648479461669923, 0.021617151260375975, 0.021598623275756835, 0.0216759033203125, 0.021869983673095703, 0.021912288665771485, 0.022030336380004883, 0.021542144775390626, 0.021825920104980467, 0.0216231689453125, 0.021612543106079102, 0.02159324836730957, 0.021603168487548827, 0.02166713523864746, 0.02168288040161133, 0.021784191131591798, 0.02188502311706543, 0.021725471496582032, 0.021899328231811524, 0.021958976745605468, 0.02193315124511719, 0.021725439071655275, 0.02178895950317383, 0.021696575164794923, 0.021952512741088868, 0.02390630340576172, 0.02294528007507324, 0.021631296157836915, 0.021466880798339843, 0.0219880313873291, 0.023131904602050782, 0.022017248153686525, 0.021768991470336913, 0.021746719360351562, 0.021492704391479493, 0.021491519927978514, 0.021562944412231444, 0.02178835105895996, 0.021662656784057616, 0.021710336685180662, 0.021647872924804686, 0.021536767959594725, 0.021513599395751953, 0.02174835205078125, 0.021776384353637695, 0.021596160888671875, 0.021501951217651367, 0.021493759155273438, 0.021559072494506837, 0.02167830467224121, 0.023006271362304688, 0.0216114559173584, 0.021579776763916016, 0.02164531135559082, 0.021702079772949218, 0.02160915184020996, 0.021518207550048827, 0.021615711212158203, 0.021676959991455077, 0.022882303237915038, 0.02233100891113281, 0.023055871963500976, 0.02203926467895508, 0.021883039474487304, 0.021764095306396485, 0.021585472106933595, 0.021685888290405273, 0.02164579200744629, 0.02167788887023926, 0.021867040634155274, 0.02187468719482422, 0.02191564750671387, 0.021695903778076172, 0.02154147148132324, 0.021574783325195312, 0.021640064239501954, 0.02166374397277832, 0.02166713523864746, 0.021545663833618164, 0.02189107131958008, 0.021618688583374023, 0.02166476821899414, 0.021589599609375, 0.021747968673706056, 0.021606559753417968, 0.021809152603149414, 0.02157948875427246, 0.021686368942260743, 0.021678272247314452, 0.02155628776550293, 0.021498815536499023, 0.021338111877441408, 0.02132905578613281, 0.021635936737060546, 0.02145894432067871, 0.021622783660888673, 0.021510143280029297, 0.021548383712768553, 0.021942943572998048, 0.021370880126953123, 0.021404991149902342, 0.021426111221313476, 0.021405887603759766, 0.02141241645812988, 0.021458303451538086, 0.021371519088745117, 0.02144576072692871, 0.021429119110107423, 0.02146713638305664, 0.02132294464111328, 0.021500736236572265, 0.021407743453979493, 0.021335136413574218, 0.021429119110107423, 0.0213668155670166, 0.021663455963134765, 0.021549215316772462, 0.021553279876708985, 0.021579776763916016, 0.021440256118774415, 0.021451007843017577, 0.021552255630493164, 0.021453695297241213, 0.021604352951049805, 0.02163711929321289, 0.021700607299804688, 0.021626880645751953, 0.021825536727905274, 0.022101503372192383, 0.021578239440917968, 0.021725183486938478, 0.02179465675354004, 0.02183945655822754, 0.02163974380493164, 0.02174950408935547, 0.021835647583007812, 0.02169385528564453, 0.021611488342285157, 0.021695903778076172, 0.021874847412109374, 0.022085567474365235, 0.021901023864746093, 0.02167478370666504, 0.021771488189697267, 0.02161664009094238, 0.021573631286621094, 0.02161430358886719, 0.02176790428161621, 0.021520959854125978, 0.02138105583190918, 0.021440576553344727, 0.021454368591308594, 0.02150217628479004, 0.02161484718322754, 0.021370655059814454, 0.021493247985839844, 0.021654239654541017, 0.021753856658935547, 0.02176630401611328, 0.02172297668457031, 0.02175584030151367, 0.02172115135192871, 0.021647327423095702, 0.021540735244750978, 0.021481632232666015, 0.021372928619384765, 0.02138057518005371, 0.02145948791503906, 0.021432319641113282, 0.021549055099487305, 0.021442047119140627, 0.021565792083740234, 0.021588031768798827, 0.021586015701293947, 0.021493759155273438, 0.02145020866394043, 0.021469024658203124, 0.02144326400756836, 0.02151219177246094, 0.021563392639160156, 0.02175116729736328, 0.021555360794067384, 0.021531103134155273, 0.021475040435791015, 0.021522432327270507, 0.021545248031616213, 0.021413888931274414, 0.021366432189941408, 0.021383520126342773, 0.02181065559387207, 0.02151683235168457, 0.021609760284423827, 0.021719263076782225, 0.021817440032958983, 0.022796703338623048, 0.021710687637329102, 0.02185641670227051, 0.021753856658935547, 0.02185420799255371, 0.021934080123901366, 0.021602304458618164, 0.02185625648498535, 0.021464256286621092, 0.02264147186279297, 0.02157513618469238, 0.021856128692626955, 0.02152284812927246, 0.021712928771972655, 0.021458623886108398, 0.0218338565826416, 0.021415456771850586, 0.02130179214477539, 0.021204416275024413, 0.021631872177124024, 0.0216944637298584, 0.02166579246520996, 0.022117759704589842, 0.02160089683532715, 0.021530624389648437, 0.021489664077758788, 0.021395456314086913, 0.02144256019592285, 0.021325824737548828, 0.021542911529541017, 0.021366336822509765, 0.021359039306640626, 0.02146303939819336, 0.021362688064575194, 0.021569536209106444, 0.0216494083404541, 0.021800960540771484, 0.02346931266784668, 0.02169523239135742, 0.022111360549926757, 0.0217445125579834, 0.02170457649230957, 0.021819520950317382, 0.022346912384033205, 0.022270816802978516, 0.021993471145629884, 0.022122047424316407, 0.02201171112060547, 0.023003776550292968, 0.02169856071472168, 0.02210383987426758, 0.021919776916503906, 0.021866016387939453, 0.021930784225463868, 0.021731199264526366, 0.021714080810546876, 0.02170966339111328, 0.02170863914489746, 0.02165692710876465, 0.021672767639160158, 0.021542207717895508, 0.02233318328857422, 0.021593023300170898, 0.021681312561035157, 0.02161248016357422, 0.021869024276733397, 0.021745439529418945, 0.021553823471069336, 0.021511808395385742, 0.021597600936889647, 0.02156755256652832, 0.02146601676940918, 0.02146713638305664, 0.021341728210449218, 0.021370687484741212, 0.021381792068481446, 0.021871423721313475, 0.021936128616333008, 0.021618495941162108, 0.02164508819580078, 0.021371295928955078, 0.021399072647094727, 0.021905887603759767, 0.021495136260986328, 0.021379743576049805, 0.021354496002197267, 0.02143756866455078, 0.021713792800903322, 0.02154489517211914, 0.021526592254638672, 0.021362688064575194, 0.021311487197875977, 0.021359712600708007, 0.02137321662902832, 0.021631616592407227, 0.022008064270019532, 0.021827295303344728, 0.02156470489501953, 0.021451520919799804, 0.02136796760559082, 0.021461759567260742, 0.021653568267822266, 0.021664928436279297, 0.021415807723999022, 0.021279743194580078, 0.021477632522583008, 0.02129280090332031, 0.021335552215576172, 0.02133247947692871, 0.02155913543701172, 0.021481632232666015, 0.021598207473754884, 0.021447999954223633, 0.021626752853393556, 0.021709535598754885, 0.0215184326171875, 0.02141168022155762, 0.021379104614257814, 0.02139334487915039, 0.021368511199951173, 0.02152057647705078, 0.02147056007385254, 0.021396448135375976, 0.021398624420166015, 0.021403968811035155, 0.021557151794433595, 0.02169107246398926, 0.021626688003540038, 0.021745376586914063, 0.021502431869506837, 0.02168009567260742, 0.022105663299560548, 0.022116832733154297, 0.022240959167480468, 0.02241708755493164, 0.022077375411987305, 0.021758176803588866, 0.021804800033569337, 0.0217890567779541, 0.02182102394104004, 0.021904191970825194, 0.022130495071411134, 0.022003007888793946, 0.021805376052856446, 0.021662080764770508, 0.021897216796875, 0.02156723213195801, 0.02151759910583496, 0.021486560821533204, 0.02182963180541992, 0.021821279525756836, 0.02198953628540039, 0.021612224578857423, 0.02170022392272949, 0.0216296329498291, 0.021696512222290038, 0.02159779167175293, 0.021603776931762696, 0.021598207473754884, 0.021826528549194337, 0.0220214729309082, 0.02184239959716797, 0.021799104690551758, 0.02206105613708496, 0.021798912048339843, 0.021661663055419923, 0.02184182357788086, 0.021885055541992188, 0.02166374397277832, 0.021987327575683592, 0.02187059211730957, 0.02214646339416504, 0.021583871841430666, 0.021760128021240235, 0.02177222442626953, 0.0216396484375, 0.021792255401611327, 0.021723712921142578, 0.022554239273071288, 0.022104352951049806, 0.021970016479492187, 0.021517087936401367, 0.021742944717407228, 0.02165657615661621, 0.02158729553222656, 0.021438880920410155, 0.0214836483001709, 0.021510112762451173, 0.021610143661499024, 0.021563776016235353, 0.02161664009094238, 0.02169968032836914, 0.02182032012939453, 0.021587968826293946, 0.021479040145874023, 0.021940223693847655, 0.021735807418823243, 0.021635072708129883, 0.022015615463256834, 0.022018367767333985, 0.021801023483276366, 0.021777599334716798]",tokens/s,46.06215062103461,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1582.67392,1596.915712,0.0,1201.668096,1189.151232,s,1,8.203900390625,8.203900390625,0.0,8.203900390625,8.203900390625,8.203900390625,8.203900390625,[8.203900390625],,kWh,3.496767919166738e-05,3.849963228949563e-06,1.0954453207999426e-05,4.9772095628616364e-05,,MB,1564.16,1791.950848,0.0,1382.023168,1351.367168,s,10,0.47518137359619145,0.04751813735961914,0.0002168038865286773,0.04744075202941894,0.047669537353515624,0.04788238372802734,0.04805266082763672,"[0.04809523010253906, 0.04736966323852539, 0.04743475341796875, 0.04732377624511719, 0.04743775939941406, 0.04756444931030274, 0.047313503265380856, 0.04762223815917969, 0.04744374465942383, 0.047576255798339843]",tokens/s,5387.416557652121,kWh,1.426084696565e-06,1.5722692729590755e-07,9.489682388487577e-07,2.5322798627096657e-06,tokens/kWh,101094671.15773974,MB,1564.16,1833.893888,0.0,1423.966208,1407.328256,s,10,13.738055297851563,1.3738055297851564,0.003616846912428085,1.3729713134765624,1.378206689453125,1.3787253540039062,1.3791402856445312,"[1.3766226806640625, 1.3780914306640626, 1.3720076904296874, 1.369878662109375, 1.3776171875, 1.369964111328125, 1.371584228515625, 1.3739349365234375, 1.3792440185546875, 1.3691103515625]",tokens/s,45.85801893653194,kWh,3.954381329343452e-05,4.3613261565511e-06,1.8056352520951623e-05,6.196149197093724e-05,tokens/kWh,1016760.53942584,,s,630,13.735277492523181,0.02180202776590983,0.00033505996915220957,0.02172318458557129,0.022011130142211913,0.02230405149459839,0.023348841247558597,"[0.021792863845825194, 0.02171536064147949, 0.021740928649902343, 0.022104480743408202, 0.021720895767211913, 0.021602815628051757, 0.021725183486938478, 0.021708799362182618, 0.02162073516845703, 0.02167193603515625, 0.022177791595458983, 0.021825536727905274, 0.021644319534301758, 0.021639520645141602, 0.021813888549804688, 0.021712896347045898, 0.021765119552612306, 0.021570560455322265, 0.021763423919677734, 0.021709440231323242, 0.021766176223754884, 0.021712896347045898, 0.021567487716674806, 0.02166988754272461, 0.021839679718017577, 0.021624736785888672, 0.021889312744140625, 0.02546073532104492, 0.021745664596557617, 0.02179427146911621, 0.021656095504760744, 0.02175292778015137, 0.0224736328125, 0.02186182403564453, 0.02158799934387207, 0.021578271865844725, 0.021590015411376954, 0.022145023345947267, 0.023379743576049803, 0.022912351608276368, 0.02170524787902832, 0.021743967056274415, 0.02168217658996582, 0.021753631591796874, 0.021598432540893556, 0.02158736038208008, 0.02166204833984375, 0.021698816299438477, 0.021593376159667967, 0.021689056396484375, 0.021585376739501953, 0.021885120391845703, 0.02172323226928711, 0.021776639938354492, 0.021716991424560548, 0.021757280349731446, 0.021977247238159178, 0.02166124725341797, 0.021662656784057616, 0.021712608337402343, 0.021700895309448243, 0.02183782386779785, 0.021635072708129883, 0.02174835205078125, 0.021863935470581054, 0.021833728790283204, 0.021700895309448243, 0.02176041603088379, 0.021870399475097658, 0.02232121658325195, 0.021790847778320313, 0.021800960540771484, 0.021995519638061522, 0.022013952255249023, 0.021804927825927734, 0.021670015335083007, 0.02170639991760254, 0.021778783798217773, 0.021894943237304686, 0.021772512435913084, 0.021722623825073242, 0.021675552368164062, 0.021636064529418946, 0.021950304031372072, 0.02182467269897461, 0.02172313690185547, 0.021699583053588867, 0.02174937629699707, 0.021737855911254884, 0.02172835159301758, 0.021562271118164063, 0.02302297592163086, 0.022726463317871093, 0.021971744537353517, 0.021757984161376955, 0.021964448928833008, 0.021762399673461913, 0.021704704284667968, 0.021678016662597655, 0.02178873634338379, 0.02189107131958008, 0.02178278350830078, 0.021847007751464843, 0.02165020751953125, 0.02180860710144043, 0.02170275115966797, 0.02177043151855469, 0.021647296905517577, 0.02167977523803711, 0.021861024856567383, 0.021740959167480468, 0.021682559967041017, 0.021635295867919922, 0.021594112396240234, 0.02301692771911621, 0.021633567810058593, 0.021609888076782227, 0.021658208847045897, 0.02163043212890625, 0.0215614070892334, 0.02174745559692383, 0.023663328170776366, 0.02226585578918457, 0.02230067253112793, 0.02196463966369629, 0.021778079986572267, 0.02161177635192871, 0.021776351928710937, 0.02163167953491211, 0.021575935363769533, 0.021558687210083007, 0.02153660774230957, 0.021620960235595704, 0.021592096328735353, 0.021465599060058595, 0.021591615676879884, 0.021607999801635743, 0.023083295822143555, 0.022989408493041992, 0.02352332878112793, 0.02171494483947754, 0.021651168823242188, 0.021597824096679687, 0.02163983917236328, 0.02170675277709961, 0.021774335861206053, 0.021847583770751952, 0.021686752319335936, 0.021675039291381835, 0.021607391357421873, 0.021624544143676757, 0.021647552490234374, 0.021594112396240234, 0.02155939292907715, 0.021712896347045898, 0.02162073516845703, 0.021739295959472656, 0.02167625617980957, 0.02175939178466797, 0.021699167251586913, 0.022181407928466797, 0.02164169692993164, 0.021683616638183592, 0.021682783126831053, 0.021985279083251954, 0.022149120330810547, 0.021783903121948243, 0.021810943603515626, 0.021600576400756837, 0.02165190315246582, 0.021644927978515624, 0.02157948875427246, 0.02183247947692871, 0.02165558433532715, 0.021796863555908205, 0.021833728790283204, 0.021719039916992186, 0.02164531135559082, 0.021686111450195313, 0.021727392196655274, 0.021741567611694337, 0.021634687423706056, 0.021668224334716796, 0.021896575927734373, 0.02173388862609863, 0.021714431762695312, 0.021962976455688475, 0.021739839553833008, 0.021694368362426757, 0.021821632385253906, 0.021968832015991212, 0.021942527770996093, 0.021925535202026367, 0.021745311737060548, 0.021611200332641602, 0.0217488956451416, 0.02175062370300293, 0.02183919906616211, 0.021625503540039063, 0.02229043197631836, 0.021660896301269533, 0.021826175689697264, 0.02182547187805176, 0.02184582328796387, 0.021658016204833985, 0.021659263610839842, 0.02179110336303711, 0.02169036865234375, 0.021958656311035156, 0.02200371170043945, 0.02187446403503418, 0.021815519332885742, 0.02172313690185547, 0.021821056365966797, 0.021703039169311523, 0.021751808166503905, 0.02170639991760254, 0.02177039909362793, 0.021827775955200194, 0.021848031997680664, 0.021776256561279298, 0.02165478324890137, 0.02163715171813965, 0.02164620780944824, 0.021628192901611328, 0.021668127059936523, 0.021608896255493164, 0.021617759704589845, 0.02165648078918457, 0.02166374397277832, 0.02167193603515625, 0.021753856658935547, 0.021729280471801758, 0.021736991882324218, 0.021582304000854494, 0.02168012809753418, 0.021654687881469726, 0.021617504119873048, 0.021821216583251955, 0.021717216491699217, 0.021565439224243164, 0.021592063903808592, 0.021595680236816406, 0.02164963150024414, 0.02159436798095703, 0.021602304458618164, 0.021669408798217774, 0.021567968368530272, 0.021766143798828123, 0.021823007583618163, 0.021803487777709962, 0.0217509765625, 0.021703359603881835, 0.02184796714782715, 0.021753599166870117, 0.021706880569458006, 0.021842144012451173, 0.02162892723083496, 0.02188694381713867, 0.021833759307861328, 0.021786624908447266, 0.021665567398071288, 0.02167625617980957, 0.02269152069091797, 0.021626304626464844, 0.021735872268676758, 0.021807552337646485, 0.021638944625854493, 0.021737695693969727, 0.02171494483947754, 0.021702655792236326, 0.02164246368408203, 0.021635776519775392, 0.021640960693359374, 0.021758304595947266, 0.021859392166137696, 0.021659936904907227, 0.021680351257324218, 0.021682624816894532, 0.021702655792236326, 0.0216246395111084, 0.021647552490234374, 0.021708351135253906, 0.021815423965454103, 0.021800607681274415, 0.021651199340820312, 0.021666048049926757, 0.021796607971191408, 0.021805631637573243, 0.021721439361572267, 0.021719039916992186, 0.023518239974975586, 0.022268896102905274, 0.022314687728881837, 0.02189548873901367, 0.02169209671020508, 0.021776704788208007, 0.021862464904785155, 0.02280195236206055, 0.022339712142944335, 0.02185158348083496, 0.02192470359802246, 0.023119871139526366, 0.021951583862304686, 0.021842464447021485, 0.02233526420593262, 0.021801567077636717, 0.021747007369995117, 0.021731264114379884, 0.021822208404541014, 0.02186444854736328, 0.021693952560424806, 0.021674495697021484, 0.02170675277709961, 0.021650848388671876, 0.0215285758972168, 0.021638624191284178, 0.021666336059570312, 0.02191155242919922, 0.022071296691894532, 0.021729280471801758, 0.02182143974304199, 0.021962751388549806, 0.02219558334350586, 0.02194905662536621, 0.021573631286621094, 0.022183263778686523, 0.022041248321533202, 0.021810623168945314, 0.021717567443847657, 0.02151628875732422, 0.02166783905029297, 0.021630975723266603, 0.021551103591918946, 0.021606399536132814, 0.021725120544433593, 0.021624895095825197, 0.022220800399780274, 0.021761407852172853, 0.021645055770874024, 0.021658496856689455, 0.022054912567138672, 0.021857343673706054, 0.02160736083984375, 0.02161664009094238, 0.02165555191040039, 0.02163020706176758, 0.02169113540649414, 0.02168832015991211, 0.021745664596557617, 0.021581760406494142, 0.021643327713012694, 0.021544960021972655, 0.02166374397277832, 0.021656671524047853, 0.021617183685302733, 0.021739007949829102, 0.02171993637084961, 0.02175763130187988, 0.022328800201416015, 0.0216595516204834, 0.021586048126220704, 0.0216297607421875, 0.02161840057373047, 0.02173529624938965, 0.021710687637329102, 0.02169913673400879, 0.021722688674926757, 0.021744064331054688, 0.021610591888427736, 0.02164931106567383, 0.021600160598754883, 0.021573728561401367, 0.021700607299804688, 0.021753631591796874, 0.021758176803588866, 0.021740736007690428, 0.021823392868041993, 0.021796960830688477, 0.021600255966186522, 0.021719039916992186, 0.02209916877746582, 0.021606943130493165, 0.02166364860534668, 0.02247923278808594, 0.021751455307006836, 0.022157215118408204, 0.021613983154296874, 0.021647552490234374, 0.02169523239135742, 0.021692480087280273, 0.021607456207275392, 0.021865440368652345, 0.021879840850830078, 0.021788703918457032, 0.02175276756286621, 0.021710847854614256, 0.021811199188232423, 0.021740543365478517, 0.021724159240722657, 0.021958656311035156, 0.022087135314941407, 0.021795360565185547, 0.02202134323120117, 0.021977184295654296, 0.022110464096069336, 0.021756000518798828, 0.02187913513183594, 0.02189107131958008, 0.021702655792236326, 0.021755903244018555, 0.021766143798828123, 0.021985279083251954, 0.021724863052368162, 0.02176646423339844, 0.021811199188232423, 0.021853504180908204, 0.0216494083404541, 0.021686975479125976, 0.02164531135559082, 0.021593376159667967, 0.021752159118652345, 0.021612800598144532, 0.021571264266967774, 0.021659616470336915, 0.02160688018798828, 0.021768192291259765, 0.0220446720123291, 0.021542015075683593, 0.021588863372802733, 0.021660703659057617, 0.02201081657409668, 0.021806976318359376, 0.0216615047454834, 0.02158153533935547, 0.02165376091003418, 0.02155353546142578, 0.021618688583374023, 0.02164646339416504, 0.02154319953918457, 0.0216494083404541, 0.02179782485961914, 0.021716991424560548, 0.02162073516845703, 0.021647104263305662, 0.021686399459838867, 0.021678207397460937, 0.021831680297851562, 0.021622783660888673, 0.021610015869140624, 0.02162719917297363, 0.021780256271362305, 0.021837312698364256, 0.02161097526550293, 0.021682592391967775, 0.021817312240600586, 0.021935583114624024, 0.021770784378051758, 0.021773984909057617, 0.02175424003601074, 0.021764095306396485, 0.02171494483947754, 0.02165555191040039, 0.021774335861206053, 0.021754976272583007, 0.02180803108215332, 0.02189926338195801, 0.021746944427490235, 0.021836544036865236, 0.021635072708129883, 0.021882272720336913, 0.022131296157836915, 0.021833728790283204, 0.021743616104125976, 0.021890111923217773, 0.022125503540039063, 0.022128639221191407, 0.02175935935974121, 0.021770656585693358, 0.021820831298828124, 0.021713727951049804, 0.02169759941101074, 0.021704927444458007, 0.021946239471435546, 0.021916511535644532, 0.021958656311035156, 0.02187264060974121, 0.021985279083251954, 0.02190505599975586, 0.021789024353027344, 0.02174284744262695, 0.02170102310180664, 0.021731679916381835, 0.02234163284301758, 0.021845695495605468, 0.022308511734008787, 0.021745920181274414, 0.02169487953186035, 0.021780288696289063, 0.02175200080871582, 0.021767391204833984, 0.021616928100585936, 0.021826047897338868, 0.021772287368774415, 0.022202655792236327, 0.021602432250976564, 0.021749824523925782, 0.02171638488769531, 0.02166192054748535, 0.021701055526733397, 0.021741472244262695, 0.021755935668945313, 0.021791135787963867, 0.02211020851135254, 0.022345727920532226, 0.021646495819091796, 0.021586784362792967, 0.02159152030944824, 0.021940671920776367, 0.021757984161376955, 0.02211027145385742, 0.021766143798828123, 0.021819391250610352, 0.021757280349731446, 0.021650079727172852, 0.021721088409423828, 0.021719039916992186, 0.021708799362182618, 0.021708799362182618, 0.02181715202331543, 0.02179212760925293, 0.021663616180419922, 0.021805023193359373, 0.021819711685180664, 0.02245903968811035, 0.023595008850097656, 0.021786624908447266, 0.021882463455200195, 0.02186854362487793, 0.021884639739990233, 0.02163692855834961, 0.021741632461547852, 0.022858560562133787, 0.02177539253234863, 0.0216396484375, 0.021641727447509765, 0.021751808166503905, 0.02168009567260742, 0.021717023849487305, 0.02165551948547363, 0.02182761573791504, 0.02180271911621094, 0.021690656661987304, 0.0216494083404541, 0.02167807960510254, 0.021794815063476563, 0.021669792175292968, 0.021661792755126953, 0.021707807540893555, 0.02168726348876953, 0.022155263900756835, 0.021597824096679687, 0.02163283157348633, 0.021537343978881837, 0.022401023864746093, 0.022345567703247072, 0.02483216094970703, 0.022258880615234376, 0.02166864013671875, 0.021501983642578125, 0.02168627166748047, 0.021604352951049805, 0.021530624389648437, 0.021620031356811523, 0.0216746883392334, 0.021554496765136717, 0.02156819152832031, 0.021587968826293946, 0.021622783660888673, 0.021716991424560548, 0.021579776763916016, 0.02230681610107422, 0.021538400650024415, 0.023273183822631837, 0.021777088165283204, 0.021712799072265625, 0.021631071090698242, 0.021604223251342772, 0.02164748764038086, 0.021630975723266603, 0.02205081558227539, 0.0215817928314209, 0.021788703918457032, 0.021826656341552734, 0.021828128814697267, 0.021688703536987305, 0.021618623733520508, 0.0215982723236084, 0.021581760406494142, 0.021663808822631837, 0.021598207473754884, 0.021562528610229493, 0.021647712707519532, 0.02169465637207031, 0.0218668155670166, 0.021577728271484374, 0.021566848754882812, 0.021719680786132813, 0.02164121627807617, 0.02150809669494629, 0.0218024959564209, 0.021950143814086914, 0.02159062385559082, 0.021915712356567384, 0.0218175048828125, 0.021724512100219726, 0.021647615432739256, 0.021558944702148437, 0.02155801582336426, 0.021628320693969725, 0.021719072341918947, 0.021784160614013674, 0.021880800247192383, 0.02186342430114746, 0.021780479431152345, 0.021811199188232423, 0.0216693115234375, 0.02178060722351074, 0.021729248046875, 0.021746143341064453]",tokens/s,45.86729320488362,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 20211 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,6765.768704,7762.542592,0.0,7367.294976,7351.94368,s,1,12.484943359375,12.484943359375,0.0,12.484943359375,12.484943359375,12.484943359375,12.484943359375,[12.484943359375],,kWh,0.00015955133010832774,1.7592247732297916e-05,5.0381151416003384e-05,0.00022752472925662904,,MB,1654.890496,8372.813824,0.0,7962.886144,7872.44544,s,10,3.217915252685547,0.3217915252685547,0.0013950329344355533,0.3218920440673828,0.32337008056640626,0.32344077758789064,0.3234973352050781,"[0.3185030212402344, 0.32086541748046876, 0.32094903564453126, 0.3215930480957031, 0.3233543701171875, 0.3225209045410156, 0.32187457275390624, 0.3219095153808594, 0.3228338928222656, 0.323511474609375]",tokens/s,795.5461219382715,kWh,9.364775526823147e-06,1.032679123397765e-06,6.21373760987501e-06,1.6611192260095923e-05,tokens/kWh,15411295.949838202,MB,1666.101248,8624.472064,0.0,8214.544384,8118.577152,s,10,27.102155029296874,2.7102155029296875,0.0027258359209844218,2.709692626953125,2.712966064453125,2.7143250732421875,2.7154122802734375,"[2.71568408203125, 2.710020263671875, 2.7065537109375, 2.708927978515625, 2.708125732421875, 2.706820556640625, 2.712405517578125, 2.7126640625, 2.709364990234375, 2.711588134765625]",tokens/s,23.245383967399746,kWh,7.911330669484301e-05,8.72638364765581e-06,5.2567253859325075e-05,0.0001404069442018239,tokens/kWh,448695.7561688863,,s,630,27.098995811462423,0.043014279065813336,0.00037987361758398897,0.04298665618896484,0.04340562782287598,0.04354385929107666,0.044067572555542,"[0.04339251327514648, 0.04290611267089844, 0.042633216857910154, 0.04266726303100586, 0.04283404922485352, 0.04267686462402344, 0.04281257629394531, 0.043326400756835935, 0.04269865417480469, 0.042773696899414064, 0.04270108795166016, 0.04273596954345703, 0.04285414505004883, 0.04289318466186524, 0.042850879669189455, 0.04271641540527344, 0.04280915069580078, 0.04295999908447266, 0.043157215118408206, 0.042928031921386715, 0.04373324966430664, 0.042939937591552735, 0.042885536193847655, 0.04328995132446289, 0.04309030532836914, 0.04312297439575195, 0.042874462127685545, 0.04292959976196289, 0.04283491134643555, 0.04286054229736328, 0.04292607879638672, 0.04326399993896484, 0.04312995147705078, 0.0434246711730957, 0.04319232177734375, 0.04315750503540039, 0.042979328155517575, 0.043210014343261716, 0.043340511322021484, 0.04307353591918945, 0.043063297271728515, 0.043237377166748046, 0.04330825424194336, 0.04332998275756836, 0.043265472412109374, 0.04323612976074219, 0.04312409591674805, 0.0433191032409668, 0.04336304092407227, 0.04312841415405273, 0.0432624626159668, 0.04347097778320313, 0.043364673614501956, 0.043372222900390625, 0.04331958389282226, 0.04315017700195312, 0.043266654968261715, 0.04326019287109375, 0.04331315231323242, 0.043361793518066405, 0.04371916961669922, 0.0433172492980957, 0.043243743896484374, 0.043122783660888675, 0.04278262329101563, 0.04254515075683594, 0.04233420944213867, 0.042259807586669924, 0.04243059158325195, 0.04224873733520508, 0.04244895935058594, 0.042476478576660155, 0.04262400054931641, 0.04294451141357422, 0.04263705444335938, 0.04263935852050781, 0.04265497589111328, 0.04269801712036133, 0.04263497543334961, 0.04261798477172852, 0.04272995376586914, 0.04263468933105469, 0.04276649475097656, 0.042861278533935544, 0.04277193450927735, 0.043126815795898436, 0.043006561279296876, 0.04286435317993164, 0.042698879241943356, 0.04280876922607422, 0.04266876983642578, 0.042718494415283206, 0.04295139312744141, 0.043122432708740235, 0.04289152145385742, 0.042754207611083984, 0.04283564758300781, 0.0429323844909668, 0.042915233612060545, 0.04295363235473633, 0.04291657638549805, 0.04294345474243164, 0.043009151458740236, 0.04309491348266602, 0.04309811019897461, 0.04298115158081055, 0.043237598419189456, 0.04324147033691406, 0.04363673782348633, 0.04364716720581055, 0.0433928337097168, 0.044109825134277345, 0.04691558456420898, 0.04290259170532226, 0.043037151336669924, 0.04327881622314453, 0.04325312042236328, 0.04311011123657227, 0.04304790496826172, 0.04336019134521484, 0.043503456115722657, 0.043474815368652345, 0.04331862258911133, 0.04343190383911133, 0.0434442253112793, 0.04323984146118164, 0.04339212799072266, 0.042651905059814456, 0.04240614318847656, 0.042181087493896485, 0.04228496170043945, 0.0423765754699707, 0.04293222427368164, 0.0434054069519043, 0.04273001480102539, 0.04251238250732422, 0.04238655853271484, 0.04245939254760742, 0.042549888610839845, 0.042694591522216795, 0.04264556884765625, 0.042690464019775394, 0.04290569686889648, 0.04269875335693359, 0.042673728942871095, 0.04281388854980469, 0.04283353424072266, 0.04291945648193359, 0.04421283340454102, 0.04299174499511719, 0.04295897674560547, 0.04284620666503906, 0.04293632125854492, 0.04281865692138672, 0.04263753509521485, 0.04266463851928711, 0.04277043151855469, 0.04276838302612305, 0.04294374465942383, 0.0428223991394043, 0.04297119903564453, 0.04305500793457031, 0.04289750289916992, 0.04289056015014649, 0.04294105529785156, 0.042917728424072266, 0.04286223983764648, 0.04314729690551758, 0.04312063980102539, 0.043178462982177736, 0.0433983039855957, 0.04360665512084961, 0.04335638427734375, 0.04313087844848633, 0.04310835266113281, 0.043022560119628905, 0.04296384048461914, 0.04300601577758789, 0.04301091384887695, 0.043050430297851563, 0.04334988784790039, 0.04344083023071289, 0.04338483047485352, 0.04323904037475586, 0.043399551391601565, 0.043153408050537106, 0.04312396621704102, 0.0434672966003418, 0.04347312164306641, 0.04348108673095703, 0.04288409423828125, 0.042620990753173826, 0.042643455505371096, 0.042474559783935543, 0.042493824005126954, 0.04222390365600586, 0.042982559204101566, 0.04247814559936523, 0.042264575958251956, 0.04261580657958984, 0.042482688903808595, 0.04269465637207031, 0.043224414825439456, 0.04280963134765625, 0.04275971221923828, 0.04259254455566406, 0.042891841888427734, 0.04274176025390625, 0.04267734527587891, 0.04297001647949219, 0.04314835357666016, 0.04338275146484375, 0.043109344482421874, 0.04299760055541992, 0.04289737701416016, 0.04270918273925781, 0.042690208435058594, 0.04307388687133789, 0.04259635162353516, 0.042633216857910154, 0.04281897735595703, 0.04297296142578125, 0.04292076873779297, 0.042799102783203126, 0.04293609619140625, 0.0428812141418457, 0.04285776138305664, 0.042869503021240235, 0.04304012680053711, 0.043036670684814454, 0.04393971252441406, 0.04331292724609375, 0.04334662246704102, 0.0436778564453125, 0.04368396759033203, 0.04348281478881836, 0.04328656005859375, 0.043423168182373045, 0.043213119506835936, 0.04329033660888672, 0.04339206314086914, 0.043089664459228516, 0.04297283172607422, 0.04301017761230469, 0.043087135314941405, 0.04310521697998047, 0.0430544319152832, 0.04329539108276367, 0.0432509765625, 0.043344097137451174, 0.04339548873901367, 0.043599967956542966, 0.043315361022949216, 0.04273196792602539, 0.04244416046142578, 0.0423985595703125, 0.04230579376220703, 0.04235033416748047, 0.04253900909423828, 0.04252492904663086, 0.04249484634399414, 0.04268064117431641, 0.04270153427124023, 0.04263673782348633, 0.0426530876159668, 0.04245743942260742, 0.04241052627563477, 0.04261471939086914, 0.04282294464111328, 0.04270336151123047, 0.042927936553955076, 0.043076416015625, 0.04304851150512695, 0.04296300888061524, 0.04290166473388672, 0.04305920028686523, 0.04289712142944336, 0.04295635223388672, 0.04283583831787109, 0.04289827346801758, 0.04281753540039063, 0.042799102783203126, 0.042774177551269534, 0.04262947082519531, 0.04281753540039063, 0.042995712280273435, 0.04292812728881836, 0.042981342315673826, 0.042939678192138675, 0.04293452835083008, 0.043326976776123044, 0.043121662139892575, 0.04309811019897461, 0.044436702728271486, 0.04318611145019531, 0.04359219360351563, 0.04317219161987305, 0.04318003082275391, 0.0431769905090332, 0.04339811325073242, 0.04340035247802734, 0.04325667190551758, 0.04330624008178711, 0.04327091217041015, 0.04319145584106445, 0.04305599975585937, 0.04314716720581055, 0.043165760040283205, 0.04337238311767578, 0.04333580780029297, 0.04356304168701172, 0.04319622421264648, 0.043141311645507815, 0.04329395294189453, 0.04348169708251953, 0.04351996612548828, 0.042734527587890626, 0.0424672966003418, 0.04235001754760742, 0.04251055908203125, 0.04264550399780274, 0.04276176071166992, 0.04258095932006836, 0.04257756805419922, 0.04234377670288086, 0.04252713775634766, 0.042531265258789065, 0.042461406707763674, 0.04265167999267578, 0.042692352294921875, 0.0427613754272461, 0.042576416015625, 0.04268012619018555, 0.042706497192382814, 0.04284425735473633, 0.042871646881103516, 0.042856449127197264, 0.04312844848632812, 0.04304115295410156, 0.04301728057861328, 0.04295961761474609, 0.042829856872558594, 0.04280073547363281, 0.04336838531494141, 0.042945152282714845, 0.042942272186279294, 0.042750175476074216, 0.042774494171142576, 0.04271104049682617, 0.04279276657104492, 0.04293036651611328, 0.042985183715820316, 0.04294646453857422, 0.042871166229248046, 0.043028480529785154, 0.04294451141357422, 0.04311859130859375, 0.043259552001953125, 0.043964126586914065, 0.043610305786132814, 0.04332793426513672, 0.04339199829101562, 0.04333465576171875, 0.0432470703125, 0.043098464965820316, 0.04309811019897461, 0.043378273010253904, 0.043190113067626955, 0.04317068862915039, 0.04312255859375, 0.04317593765258789, 0.04312390518188477, 0.04307254409790039, 0.043183902740478515, 0.04321673583984375, 0.043149471282958984, 0.04325900650024414, 0.04354547119140625, 0.04343603134155274, 0.043044864654541014, 0.04262911987304688, 0.042618881225585936, 0.04242432022094727, 0.04251155090332031, 0.04255007934570312, 0.04272700881958008, 0.042664352416992186, 0.042675392150878906, 0.042658622741699216, 0.04277679824829102, 0.042898944854736325, 0.04281689453125, 0.042703777313232424, 0.0433438720703125, 0.04309161758422852, 0.04275439834594726, 0.04297926330566406, 0.04299782562255859, 0.042858558654785155, 0.043007232666015624, 0.04306208038330078, 0.0430365104675293, 0.043097248077392576, 0.042957118988037106, 0.042900032043457034, 0.042821632385253904, 0.04264755249023437, 0.042681888580322264, 0.04283391952514649, 0.04318435287475586, 0.04304054260253906, 0.042872512817382816, 0.04275609588623047, 0.04273017501831055, 0.042807392120361325, 0.04308777618408203, 0.04311843109130859, 0.04308812713623047, 0.04323680114746094, 0.04336627197265625, 0.04362924957275391, 0.04319427108764649, 0.0433787841796875, 0.04427775955200195, 0.04339244842529297, 0.04331577682495117, 0.043205760955810545, 0.04380070495605469, 0.043447040557861326, 0.043227134704589845, 0.04311859130859375, 0.04305100631713867, 0.04318790435791016, 0.04325203323364258, 0.043003902435302735, 0.04312441635131836, 0.04326041412353516, 0.04318598556518555, 0.04331267166137695, 0.043326976776123044, 0.04392035293579102, 0.04344841766357422, 0.04281520080566406, 0.04252467346191406, 0.04247836685180664, 0.04235212707519531, 0.04245532989501953, 0.042557823181152345, 0.042552513122558595, 0.04264432144165039, 0.042547168731689455, 0.04255744171142578, 0.042593441009521484, 0.042514686584472654, 0.04277104187011719, 0.04268147277832031, 0.04259929656982422, 0.04274748611450195, 0.042839969635009766, 0.042888702392578124, 0.04286975860595703, 0.04295206451416016, 0.042945152282714845, 0.043060543060302735, 0.042961631774902344, 0.042874462127685545, 0.0429714241027832, 0.04287088012695312, 0.043012096405029294, 0.04292227172851563, 0.042979198455810545, 0.04278572845458985, 0.04279004669189453, 0.04276172637939453, 0.042753280639648436, 0.04296214294433594, 0.04308505630493164, 0.04327062225341797, 0.04319347381591797, 0.04313932800292969, 0.04369887924194336, 0.04321279907226563, 0.043235294342041014, 0.04354188919067383, 0.04347561645507812, 0.043407615661621095, 0.043398944854736325, 0.04346262359619141, 0.0434442253112793, 0.04338687896728516, 0.04336608123779297, 0.04365548706054687, 0.04322844696044922, 0.043254016876220706, 0.043208351135253904, 0.04333663940429688, 0.04348483276367188, 0.04350950241088867, 0.043557342529296876, 0.04349747085571289, 0.043401214599609376, 0.04358473587036133, 0.043574047088623044, 0.04361833572387695, 0.043200416564941405, 0.04267948913574219, 0.04248617553710937, 0.04257388687133789, 0.04260892868041992, 0.042546688079833986, 0.042656383514404296, 0.042503807067871095, 0.04257628631591797, 0.04242147064208984, 0.042463134765625, 0.04256835174560547, 0.04272355270385742, 0.04278217697143555, 0.04293072128295899, 0.042805248260498044, 0.0428807373046875, 0.043251232147216795, 0.042922592163085936, 0.04285660934448242, 0.04308992004394531, 0.043175838470458985, 0.043081825256347656, 0.0432735366821289, 0.04299436950683594, 0.042833759307861326, 0.04280131149291992, 0.04268841552734375, 0.042749248504638675, 0.04283689498901367, 0.04282969665527344, 0.04277967834472656, 0.04297808074951172, 0.04299766540527344, 0.042842174530029295, 0.04284438323974609, 0.04301801681518555, 0.04305667114257813, 0.04286124801635742, 0.04293632125854492, 0.04308515167236328, 0.043240097045898436, 0.04328646469116211, 0.04334393692016601, 0.04345187377929687, 0.04347123336791992, 0.043284160614013675, 0.043159839630126956, 0.04311648178100586, 0.044163326263427734, 0.04320857620239258, 0.04326518249511719, 0.04327657699584961, 0.043313343048095705, 0.04326383972167969, 0.043231903076171876, 0.043218238830566406, 0.04324739074707031, 0.043192607879638675, 0.043342464447021486, 0.0432988166809082, 0.043225086212158204, 0.04327324676513672, 0.04348108673095703, 0.042967041015625, 0.042674175262451174, 0.04249151992797852, 0.04243084716796875, 0.042240001678466796, 0.04242371368408203, 0.042477535247802733, 0.042566272735595705, 0.0425615348815918, 0.042909183502197266, 0.04448470306396484, 0.04250848007202149, 0.04247087860107422, 0.042474239349365235, 0.04253488159179687, 0.04274998474121094, 0.04283596801757812, 0.042894432067871094, 0.04296099090576172, 0.04316447830200195, 0.043119873046875, 0.043125503540039065, 0.04311859130859375, 0.0432125129699707, 0.04272911834716797, 0.042740352630615236, 0.042782718658447266, 0.04290505599975586, 0.04291619110107422, 0.04298553466796875, 0.04299993515014648, 0.04304256057739258, 0.042987777709960935, 0.042907615661621094, 0.04304899215698242, 0.042874881744384766, 0.04354048156738281, 0.043030143737792965, 0.043358592987060546, 0.043412830352783205, 0.04321267318725586, 0.04331302261352539, 0.043273120880126956, 0.043401214599609376, 0.04361612701416016, 0.04336579132080078, 0.04337148666381836, 0.04324121475219726, 0.04315702438354492, 0.042947040557861325, 0.04310806274414063, 0.043299423217773435, 0.04313631820678711, 0.04316128158569336, 0.04308819198608398, 0.04321446228027344, 0.043071231842041015, 0.0432281608581543, 0.04335520172119141, 0.043235679626464844, 0.043499393463134764, 0.04387311935424805]",tokens/s,23.24809392876178,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,3027.853312,3638.427648,0.0,3235.905536,3224.4864,s,1,10.586095703125,10.586095703125,0.0,10.586095703125,10.586095703125,10.586095703125,10.586095703125,[10.586095703125],,kWh,9.099158401666803e-05,1.00279261499437e-05,2.878474524999758e-05,0.00012980425541660932,,MB,2711.937024,4066.246656,0.0,3649.04448,3598.945792,s,10,1.353534454345703,0.1353534454345703,0.0002783040521456175,0.13534259033203125,0.13568589477539061,0.1357268051147461,0.13575953338623048,"[0.13493235778808593, 0.13491842651367186, 0.13517999267578126, 0.13541612243652343, 0.1356768035888672, 0.13529721069335937, 0.13566064453125, 0.13576771545410157, 0.13534498596191405, 0.13534019470214845]",tokens/s,1891.3445400527323,kWh,4.022923556278399e-06,4.4352107544889726e-07,2.6703255761371055e-06,7.136770207864401e-06,tokens/kWh,35870567.85405525,MB,2724.827136,4068.343808,0.0,3651.141632,3598.948352,s,10,21.643119628906252,2.164311962890625,0.016358933132875387,2.164907958984375,2.1791092041015623,2.1855914672851564,2.1907772778320314,"[2.131059326171875, 2.159093505859375, 2.14446240234375, 2.17620849609375, 2.166535400390625, 2.16167529296875, 2.163280517578125, 2.19207373046875, 2.177668701171875, 2.171062255859375]",tokens/s,29.108557860511972,kWh,6.249101308497152e-05,6.89247432819363e-06,3.808551258406243e-05,0.00010746899999722756,tokens/kWh,586215.5598509826,,s,630,21.639196624755847,0.03434793115040613,0.0007713354760390236,0.03427043342590332,0.034916091156005864,0.03514373130798339,0.036662798805236825,"[0.03390915298461914, 0.03377580642700195, 0.033726207733154295, 0.03392115020751953, 0.033726593017578126, 0.03366009521484375, 0.033630367279052734, 0.03358294296264648, 0.03394140625, 0.033575553894042966, 0.03373289489746094, 0.033930335998535156, 0.033653663635253905, 0.03350527954101563, 0.0339046401977539, 0.03451264190673828, 0.03405235290527344, 0.034363327026367185, 0.03371990585327148, 0.0339681282043457, 0.03386127853393555, 0.033662910461425784, 0.03369631958007813, 0.034428478240966794, 0.03375180816650391, 0.03359743881225586, 0.033537662506103516, 0.03334764862060547, 0.03352608108520508, 0.033585151672363284, 0.033852577209472656, 0.033753887176513675, 0.03354220962524414, 0.033683456420898435, 0.03385343933105469, 0.03385299301147461, 0.03387142562866211, 0.0336761589050293, 0.03418035125732422, 0.033919742584228516, 0.03382271957397461, 0.03398860931396484, 0.03381248092651367, 0.03387104034423828, 0.03397715377807617, 0.033686912536621094, 0.034382080078125, 0.03417468643188477, 0.03396470260620117, 0.03421184158325195, 0.03379721450805664, 0.033580287933349606, 0.03372201538085937, 0.034625022888183594, 0.03370444869995117, 0.033732608795166014, 0.033654624938964844, 0.0337020492553711, 0.03382444763183594, 0.033624256134033206, 0.03360166549682617, 0.0335596809387207, 0.03370278549194336, 0.033868095397949216, 0.033980224609375, 0.03377590560913086, 0.033726367950439456, 0.03368153762817383, 0.03370595169067383, 0.03390012741088867, 0.033720767974853516, 0.034172672271728516, 0.034812286376953126, 0.034352062225341796, 0.03408579254150391, 0.034035552978515626, 0.03401334381103516, 0.0340582389831543, 0.03484467315673828, 0.03423231887817383, 0.03391078567504883, 0.03404604721069336, 0.03421971130371094, 0.03483670425415039, 0.03422822570800781, 0.034088958740234376, 0.03425414276123047, 0.03460076904296875, 0.03441139221191406, 0.034146305084228515, 0.03472089767456055, 0.03410345458984375, 0.03419209671020508, 0.03429097747802735, 0.03449520111083984, 0.034326526641845705, 0.03446089553833008, 0.03471644973754883, 0.03418902587890625, 0.03454185485839844, 0.034401729583740236, 0.03455852890014648, 0.03421388626098633, 0.034095104217529294, 0.034173057556152346, 0.03404751968383789, 0.034019454956054684, 0.03383091354370117, 0.03463507080078125, 0.03374991989135742, 0.033511425018310545, 0.03375654220581055, 0.033581344604492185, 0.033851360321044924, 0.04459360122680664, 0.034043872833251956, 0.03390214538574219, 0.034289600372314454, 0.03374127960205078, 0.03377151870727539, 0.03379715347290039, 0.033855552673339846, 0.03367593765258789, 0.03369174575805664, 0.03351945495605469, 0.03369219207763672, 0.03488991928100586, 0.03434524917602539, 0.03403571319580078, 0.03382271957397461, 0.034159934997558594, 0.03386038589477539, 0.03377961730957031, 0.03480297470092773, 0.03393199920654297, 0.03387968063354492, 0.03576460647583008, 0.034050048828125, 0.033871646881103515, 0.0339376335144043, 0.03390054321289063, 0.0335994873046875, 0.03515110397338867, 0.03376950454711914, 0.03430915069580078, 0.03373590469360352, 0.033825248718261716, 0.033889728546142577, 0.03358367919921875, 0.033701889038085936, 0.03364863967895508, 0.03369779205322266, 0.033724800109863284, 0.03403737640380859, 0.033900382995605466, 0.03434921646118164, 0.03456393432617187, 0.03400719833374023, 0.03419910430908203, 0.03407302474975586, 0.034270881652832035, 0.03417641448974609, 0.03514815902709961, 0.03418307113647461, 0.033976768493652346, 0.03385161590576172, 0.03416585540771484, 0.03388304138183594, 0.034060001373291016, 0.03433091354370117, 0.03395993423461914, 0.034181121826171876, 0.0345475845336914, 0.03391910552978516, 0.033976417541503906, 0.033856575012207034, 0.033805023193359374, 0.03372249603271484, 0.0336824951171875, 0.03351443099975586, 0.03344713592529297, 0.03348486328125, 0.03364502334594727, 0.03428787231445313, 0.03352313613891601, 0.034406654357910155, 0.03397820663452149, 0.03382921600341797, 0.033516735076904294, 0.03383091354370117, 0.033791999816894534, 0.033708030700683594, 0.033841022491455076, 0.03373881530761719, 0.033857406616210936, 0.03397036743164063, 0.033659934997558594, 0.03429679870605469, 0.033955230712890624, 0.035103424072265625, 0.03486505508422852, 0.03390003204345703, 0.03390105438232422, 0.03360563278198242, 0.033587200164794925, 0.03350527954101563, 0.03364012908935547, 0.03330694580078125, 0.033857536315917966, 0.03394118499755859, 0.033796127319335935, 0.03487696075439453, 0.034586463928222656, 0.03480873489379883, 0.03448627090454102, 0.03430732727050781, 0.03454217529296875, 0.03534998321533203, 0.03471750259399414, 0.03461414337158203, 0.035110912322998046, 0.03690854263305664, 0.03487753677368164, 0.034673118591308595, 0.034911518096923826, 0.03484748840332031, 0.0351396484375, 0.03516400146484375, 0.03507814407348633, 0.03494105529785156, 0.035071361541748045, 0.03503769683837891, 0.03490611267089844, 0.034915393829345706, 0.03457734298706055, 0.03458377456665039, 0.03463452911376953, 0.035013664245605466, 0.03436825561523438, 0.0348419189453125, 0.03502710342407227, 0.03490687942504883, 0.03462092971801758, 0.034912769317626956, 0.03461119842529297, 0.03485011291503906, 0.03550278472900391, 0.03500236892700195, 0.034598911285400394, 0.0347770881652832, 0.03470131301879883, 0.034680831909179685, 0.034165409088134764, 0.03441897583007812, 0.034338527679443356, 0.034724929809570315, 0.03446623992919922, 0.03448064041137695, 0.03464755249023437, 0.0357022705078125, 0.034484222412109376, 0.034409313201904296, 0.034611358642578124, 0.03432447814941406, 0.034203647613525394, 0.03470528030395508, 0.03436662292480469, 0.03438691329956055, 0.03421811294555664, 0.0342239990234375, 0.03453692626953125, 0.03651366424560547, 0.034718368530273436, 0.03442892837524414, 0.03462963104248047, 0.034223617553710936, 0.034359809875488284, 0.034506752014160154, 0.03433062362670898, 0.03448627090454102, 0.034551742553710935, 0.0343675537109375, 0.03425606536865235, 0.034519008636474606, 0.03467724609375, 0.034419040679931644, 0.034815711975097655, 0.03451728057861328, 0.034495487213134765, 0.03435007858276367, 0.03455507278442383, 0.034526016235351564, 0.03452108764648437, 0.0344719352722168, 0.03436684799194336, 0.034431617736816404, 0.034285568237304685, 0.03420479965209961, 0.03408505630493164, 0.03409171295166016, 0.03497942352294922, 0.03445817565917969, 0.03435708618164063, 0.03412377548217774, 0.03383660888671875, 0.033896255493164065, 0.03372412872314453, 0.03374076843261719, 0.03372742462158203, 0.0337259521484375, 0.03374540710449219, 0.033754463195800784, 0.033798305511474606, 0.03396448135375976, 0.03419452667236328, 0.03405923080444336, 0.034102718353271486, 0.03406905746459961, 0.03398451232910156, 0.03424870300292969, 0.034732032775878906, 0.03486105728149414, 0.03461036682128906, 0.034474815368652344, 0.034435073852539064, 0.034410110473632814, 0.0341978874206543, 0.03423027038574219, 0.03423846435546875, 0.03443097686767578, 0.034344928741455075, 0.03402140808105469, 0.03416064071655273, 0.033718273162841796, 0.03372851181030274, 0.033718273162841796, 0.03382067108154297, 0.03384627151489258, 0.03391743850708008, 0.03401779174804687, 0.033744255065917966, 0.03393983840942383, 0.03395772933959961, 0.034052383422851565, 0.034246177673339845, 0.034038368225097655, 0.03404751968383789, 0.034076831817626954, 0.034244735717773436, 0.034793663024902347, 0.03496102523803711, 0.03432076644897461, 0.03431628799438476, 0.034393280029296876, 0.03416681671142578, 0.03508649444580078, 0.03427945709228516, 0.034144287109375, 0.03421651077270508, 0.0344719352722168, 0.03443916702270508, 0.03442892837524414, 0.03426438522338867, 0.035078849792480465, 0.03470336151123047, 0.03445145416259766, 0.034326526641845705, 0.03432556915283203, 0.03448723220825195, 0.034320384979248046, 0.03441459274291992, 0.034402175903320314, 0.03419340896606445, 0.03481340789794922, 0.03455766296386719, 0.03456694412231445, 0.03480969619750977, 0.03478969573974609, 0.03410761642456055, 0.033972225189208984, 0.03462963104248047, 0.033949695587158206, 0.0344719352722168, 0.03363379287719727, 0.03381436920166016, 0.03393833541870117, 0.0338337287902832, 0.03386262512207031, 0.0337940788269043, 0.03389440155029297, 0.03383017730712891, 0.033628959655761716, 0.03366902542114258, 0.033996318817138674, 0.03358156967163086, 0.03369068908691406, 0.033904895782470704, 0.033951454162597657, 0.03408556747436523, 0.034172286987304686, 0.034165664672851564, 0.03426438522338867, 0.0343328971862793, 0.03457276916503906, 0.03394303894042969, 0.03416320037841797, 0.03547452926635742, 0.03509884643554687, 0.03515456008911133, 0.035657184600830075, 0.034979713439941405, 0.03582041549682617, 0.03672371292114258, 0.0348737907409668, 0.034791263580322265, 0.034699840545654295, 0.034356609344482425, 0.03465024185180664, 0.034433536529541016, 0.03400908660888672, 0.03420774459838867, 0.034100223541259765, 0.03459107208251953, 0.03430255889892578, 0.034387199401855466, 0.034372222900390624, 0.03429529571533203, 0.03453392028808594, 0.03455740737915039, 0.03454431915283203, 0.034269184112548826, 0.03431817626953125, 0.03424179077148438, 0.03440505599975586, 0.034037982940673825, 0.03414425659179687, 0.03405414581298828, 0.03399190521240234, 0.03487619018554688, 0.03409100723266602, 0.03400294494628906, 0.03382886505126953, 0.034203647613525394, 0.03415011215209961, 0.03403804779052735, 0.03400233459472656, 0.034009246826171874, 0.0337946891784668, 0.034004768371582034, 0.03411964797973633, 0.03415359878540039, 0.033997760772705075, 0.03387801742553711, 0.033964031219482424, 0.03476889419555664, 0.034471168518066406, 0.03492236709594727, 0.03511180877685547, 0.034928417205810545, 0.035078369140625, 0.035059711456298825, 0.03495734405517578, 0.03479702377319336, 0.03478374481201172, 0.035471359252929685, 0.034783233642578126, 0.03426892852783203, 0.03453567886352539, 0.03440425491333008, 0.03447532653808594, 0.03426998519897461, 0.0347848014831543, 0.03454387283325195, 0.03574192047119141, 0.042979328155517575, 0.0347911376953125, 0.03455516815185547, 0.034614273071289066, 0.03466998291015625, 0.03435580825805664, 0.034473983764648435, 0.0343900146484375, 0.0352624626159668, 0.03520512008666992, 0.03541779327392578, 0.03526688003540039, 0.03520880126953125, 0.03534070587158203, 0.034887680053710936, 0.03499622344970703, 0.034738174438476564, 0.03477939224243164, 0.03539971160888672, 0.034543327331542965, 0.03440435028076172, 0.03433062362670898, 0.03845939254760742, 0.03466070556640625, 0.034272064208984376, 0.0339035530090332, 0.03426617431640625, 0.03451785659790039, 0.03445145416259766, 0.03423040008544922, 0.03402751922607422, 0.03385139083862305, 0.03366912078857422, 0.033980415344238284, 0.03381452941894531, 0.03399270248413086, 0.034121856689453126, 0.03434659194946289, 0.03420393753051758, 0.03404723358154297, 0.03446246337890625, 0.0344535026550293, 0.03463734436035156, 0.03458915328979492, 0.03450470352172851, 0.03486268615722656, 0.03513494491577149, 0.03815459060668945, 0.0352116813659668, 0.0351470718383789, 0.03500649642944336, 0.03489673614501953, 0.0359277458190918, 0.034668865203857424, 0.03468902587890625, 0.034336769104003906, 0.03510476684570313, 0.034565441131591795, 0.03471763229370117, 0.03698723220825195, 0.03476316833496094, 0.03440166473388672, 0.034568511962890625, 0.03458838272094727, 0.034552417755126956, 0.034581600189208986, 0.03466867065429687, 0.03492534255981445, 0.03453366470336914, 0.03471331024169922, 0.03442073440551758, 0.034408447265625, 0.035972129821777346, 0.03467939376831055, 0.03462131118774414, 0.03392563247680664, 0.03395500946044922, 0.03411231994628906, 0.03412566375732422, 0.03409641647338867, 0.03422911834716797, 0.034108768463134764, 0.03425551986694336, 0.03429075241088867, 0.03424761581420899, 0.0341280632019043, 0.0339884147644043, 0.03385887908935547, 0.034490047454833986, 0.033745918273925785, 0.0348419189453125, 0.03481391906738281, 0.03357974243164062, 0.03366099166870117, 0.03376947021484375, 0.033979583740234375, 0.034419040679931644, 0.03399084854125976, 0.034060577392578124, 0.034269184112548826, 0.03364371109008789, 0.033651168823242185, 0.033503585815429685, 0.03368755340576172, 0.03356675338745117, 0.03402537536621094, 0.03376092910766602, 0.033812896728515625, 0.03415980911254883, 0.034277408599853516, 0.0346583366394043, 0.0346324462890625, 0.03462963104248047, 0.03439616012573242, 0.03470336151123047, 0.034385921478271485, 0.03437516784667969, 0.03437206268310547, 0.0343449935913086, 0.03425215911865234, 0.03483097457885742, 0.03498940658569336, 0.03463043212890625, 0.03449433517456055, 0.03466559982299805, 0.03459980773925781, 0.034696769714355466, 0.03466080093383789, 0.03466035079956055, 0.0347586555480957, 0.03465216064453125, 0.034476032257080076, 0.03451654434204102, 0.03448262405395508, 0.0346049919128418, 0.034622657775878904, 0.03481011199951172, 0.03479001617431641, 0.03479324722290039, 0.034975425720214844, 0.03471414566040039, 0.034631679534912106, 0.034697216033935545, 0.03470336151123047, 0.03468281555175781, 0.03499238586425781, 0.03483628845214844, 0.034566143035888675, 0.03466649627685547, 0.0349224967956543, 0.034797344207763675, 0.034745887756347654, 0.03461382293701172, 0.034809982299804684, 0.03488972854614258, 0.03462361526489258]",tokens/s,29.113834996963885,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4259.454976,6278.742016,0.0,5876.219904,5603.426816,s,1,11.38801171875,11.38801171875,0.0,11.38801171875,11.38801171875,11.38801171875,11.38801171875,[11.38801171875],,kWh,0.00011678511690417583,1.2874891010286475e-05,3.725725202799457e-05,0.00016691725994245687,,MB,2218.94656,6605.897728,0.0,6188.695552,6011.44064,s,10,1.9970962829589844,0.19970962829589844,0.00037835611710366075,0.19966580963134767,0.20005115203857424,0.20027096786499024,0.20044682052612303,"[0.19911148071289062, 0.19963162231445314, 0.20000230407714845, 0.20049078369140624, 0.19958026123046874, 0.1991742706298828, 0.19969209289550782, 0.1996395263671875, 0.19989999389648438, 0.1998739471435547]",tokens/s,1281.8610809324591,kWh,5.887342262666758e-06,6.492660888288897e-07,3.893347559120047e-06,1.0429955910615696e-05,tokens/kWh,24544686.68841074,MB,2230.25152,6605.897728,0.0,6188.695552,6011.4432,s,10,20.850870605468746,2.0850870605468748,0.0037966980162135898,2.083559814453125,2.089785791015625,2.089999584960937,2.0901706201171875,"[2.08313037109375, 2.079779296875, 2.082039794921875, 2.089588134765625, 2.08293798828125, 2.088509033203125, 2.080945068359375, 2.0839892578125, 2.08973828125, 2.09021337890625]",tokens/s,30.214565709057933,kWh,6.105160577191517e-05,6.732779213202655e-06,4.054793799387988e-05,0.00010833232297899772,tokens/kWh,581543.8852189457,,s,630,20.847355461120596,0.033091040414477156,0.00029963322098415453,0.033041648864746094,0.033405566406250004,0.03363331222534179,0.03439135467529297,"[0.03387308883666992, 0.03327280044555664, 0.033261566162109374, 0.03280467224121094, 0.0348551025390625, 0.03425484848022461, 0.03276515197753906, 0.032721694946289064, 0.032613887786865234, 0.03269683074951172, 0.03263283157348633, 0.03257753753662109, 0.03270800018310547, 0.03268259048461914, 0.03297830581665039, 0.03290582275390625, 0.03274550247192383, 0.0328458251953125, 0.03288399887084961, 0.03303004837036133, 0.03282412719726562, 0.032900638580322265, 0.03284630584716797, 0.032796287536621095, 0.03275539016723633, 0.03288864135742187, 0.03289177703857422, 0.032848064422607424, 0.032957630157470705, 0.032998016357421875, 0.03302604675292969, 0.033142784118652346, 0.033146209716796875, 0.033168033599853514, 0.03314483261108398, 0.03304447937011719, 0.03289702224731445, 0.032927486419677736, 0.032825599670410155, 0.03288678359985352, 0.03277545547485351, 0.03275644683837891, 0.03293523025512695, 0.03287839889526367, 0.03289177703857422, 0.033142784118652346, 0.033342849731445315, 0.03288143920898438, 0.03311600112915039, 0.03313459014892578, 0.033109409332275394, 0.03319801712036133, 0.03310377502441406, 0.033110431671142575, 0.03310217666625977, 0.03322230529785156, 0.03433916854858399, 0.03304857635498047, 0.0332042236328125, 0.03340003204345703, 0.033274654388427735, 0.03332505416870117, 0.03345993423461914, 0.034001121520996096, 0.033363967895507815, 0.03378956985473633, 0.03298495864868164, 0.03289868927001953, 0.03278144073486328, 0.03285760116577149, 0.03292390441894531, 0.03279872131347656, 0.0327823371887207, 0.03282441711425781, 0.03278688049316406, 0.03276335906982422, 0.03270953750610352, 0.032827262878417966, 0.03267216110229492, 0.03322451019287109, 0.03277619171142578, 0.03277619171142578, 0.03268403244018555, 0.032694271087646484, 0.03282534408569336, 0.03297689437866211, 0.032712158203125, 0.03287503814697266, 0.03296614456176758, 0.032858623504638675, 0.032911361694335936, 0.03295449447631836, 0.032993152618408204, 0.03305392074584961, 0.032914207458496096, 0.03313049697875976, 0.03305795288085937, 0.03302678298950195, 0.03295846557617187, 0.03296063995361328, 0.03298870468139648, 0.0328089599609375, 0.03307356643676758, 0.0329601936340332, 0.033016193389892576, 0.032845569610595704, 0.03331843185424805, 0.032946910858154294, 0.03306275177001953, 0.033433761596679684, 0.032919551849365236, 0.033119873046875, 0.03313292694091797, 0.032925697326660154, 0.03304447937011719, 0.03356412887573242, 0.032950401306152344, 0.03336438369750976, 0.03306886291503906, 0.03298428726196289, 0.03302908706665039, 0.03290249633789062, 0.033331871032714844, 0.03338572692871094, 0.033186561584472654, 0.033023998260498046, 0.033989215850830076, 0.03391836929321289, 0.033170303344726565, 0.033025344848632815, 0.032852672576904295, 0.032833534240722655, 0.03280038452148438, 0.03280729675292969, 0.032888320922851565, 0.033083614349365235, 0.03293008041381836, 0.03298070526123047, 0.0327754898071289, 0.03287343978881836, 0.03306905746459961, 0.03296460723876953, 0.033000511169433595, 0.03287340927124023, 0.03297280120849609, 0.03347455978393555, 0.03303219223022461, 0.03312572860717773, 0.03292550277709961, 0.03295113754272461, 0.03296051025390625, 0.03295004653930664, 0.032936065673828126, 0.032882686614990234, 0.032960289001464846, 0.03287907028198242, 0.03322185516357422, 0.032979358673095704, 0.03301808166503906, 0.033165313720703124, 0.03307283020019531, 0.033039806365966796, 0.03307350540161133, 0.03295695877075195, 0.03280486297607422, 0.03278438568115234, 0.03308099365234375, 0.03327830505371094, 0.03301171112060547, 0.032949695587158205, 0.0329733772277832, 0.032984127044677736, 0.03291388702392578, 0.03297289657592774, 0.03294393539428711, 0.03294425582885742, 0.032898624420166014, 0.03307404708862305, 0.0331673583984375, 0.03317145538330078, 0.033107231140136716, 0.03321315383911133, 0.033355777740478515, 0.0331192626953125, 0.032932830810546876, 0.03309743881225586, 0.033128734588623046, 0.03313644790649414, 0.03326790237426758, 0.03373619079589844, 0.03344598388671875, 0.03358761596679687, 0.0330618896484375, 0.03293798446655274, 0.03273830413818359, 0.032694271087646484, 0.0328458251953125, 0.03273225784301758, 0.03276073455810547, 0.032767105102539065, 0.03280160140991211, 0.03274710464477539, 0.03283811187744141, 0.03274665451049805, 0.03288150405883789, 0.0330401611328125, 0.03296412658691406, 0.03285881423950195, 0.032901119232177735, 0.03312639999389649, 0.03309260940551758, 0.03307827377319336, 0.033030208587646485, 0.03301744079589844, 0.032990879058837894, 0.03301651382446289, 0.033140735626220705, 0.03312432098388672, 0.03313052749633789, 0.033339359283447265, 0.03442454528808594, 0.033603809356689454, 0.03352134323120117, 0.03326377487182617, 0.03306316757202148, 0.03296460723876953, 0.03323484802246094, 0.033003616333007815, 0.033075199127197266, 0.03295439910888672, 0.03305673599243164, 0.03309363174438477, 0.03314483261108398, 0.03327616119384766, 0.0332756462097168, 0.03318716812133789, 0.033205215454101565, 0.03320563125610351, 0.03320380783081055, 0.03326435089111328, 0.03326371383666992, 0.03327958297729492, 0.03343801498413086, 0.03328812789916992, 0.033357887268066405, 0.0334189453125, 0.03345849609375, 0.03351267242431641, 0.033375007629394535, 0.033373790740966795, 0.03371388626098633, 0.033555103302001954, 0.0337652473449707, 0.03325955200195312, 0.03284950256347656, 0.032813983917236327, 0.032661502838134765, 0.03261609649658203, 0.03272451019287109, 0.03261318588256836, 0.03266336059570313, 0.03266169738769531, 0.0326247673034668, 0.03278137588500977, 0.03283222579956055, 0.03278243255615235, 0.032871551513671875, 0.03287849426269531, 0.03280380630493164, 0.03294003295898437, 0.03295846557617187, 0.03302604675292969, 0.03295577621459961, 0.032881278991699216, 0.03303424072265625, 0.03305267333984375, 0.03305017471313477, 0.033169857025146486, 0.03312591934204102, 0.03294460678100586, 0.033030174255371095, 0.03299939346313477, 0.03315865707397461, 0.03441267013549805, 0.03299776077270508, 0.03309148788452149, 0.03301385498046875, 0.032857440948486326, 0.032869022369384764, 0.03304156875610351, 0.03308614349365235, 0.0329317741394043, 0.032936161041259765, 0.03298627090454102, 0.03299359893798828, 0.03298972702026367, 0.033181697845458984, 0.033314815521240236, 0.033172897338867184, 0.03313881683349609, 0.03306079864501953, 0.0348573112487793, 0.03323513412475586, 0.033339393615722655, 0.03312432098388672, 0.033263519287109376, 0.03326569747924805, 0.03328623962402344, 0.03325059127807617, 0.03302883148193359, 0.033094688415527346, 0.03307209777832031, 0.032993152618408204, 0.03307942581176758, 0.033124225616455075, 0.03397452926635742, 0.03357519912719727, 0.03296851348876953, 0.03291155242919922, 0.032814113616943356, 0.033008609771728516, 0.03298822402954102, 0.03286316680908203, 0.03282329559326172, 0.03295155334472656, 0.03294675064086914, 0.03288288116455078, 0.032841632843017575, 0.033058910369873046, 0.034500606536865236, 0.032863712310791014, 0.03298144149780274, 0.03334748840332031, 0.03304671859741211, 0.03313663864135742, 0.03336191940307617, 0.03301923370361328, 0.03311644744873047, 0.03307107162475586, 0.032962974548339845, 0.033097728729248044, 0.03307724761962891, 0.03305472183227539, 0.03459686279296875, 0.03370569610595703, 0.03335561752319336, 0.03331526565551758, 0.03308748626708984, 0.03300556945800781, 0.03296460723876953, 0.03299532699584961, 0.03299123382568359, 0.033005374908447266, 0.03293203353881836, 0.03302227020263672, 0.032965599060058595, 0.03299401473999024, 0.033643680572509764, 0.03308185577392578, 0.033130657196044924, 0.033167102813720706, 0.03309587097167969, 0.033097984313964844, 0.033073150634765625, 0.033070465087890626, 0.033215423583984376, 0.03312403106689453, 0.03305401611328125, 0.03302649688720703, 0.03306012725830078, 0.03312329483032227, 0.033320960998535154, 0.03322880172729492, 0.033080768585205075, 0.033203903198242186, 0.03317174530029297, 0.03306124877929688, 0.032973182678222655, 0.03340044784545899, 0.03327571105957031, 0.03283820724487305, 0.032728702545166015, 0.032809345245361325, 0.032786113739013675, 0.03286982345581055, 0.03288348770141602, 0.0328070068359375, 0.032935935974121096, 0.03280486297607422, 0.032868446350097655, 0.03293788909912109, 0.03282534408569336, 0.032895263671875, 0.03287590408325195, 0.032853633880615234, 0.032850654602050784, 0.033017024993896485, 0.03297977447509766, 0.03315302276611328, 0.03289702224731445, 0.032866302490234374, 0.032833534240722655, 0.032812416076660154, 0.032914207458496096, 0.032943710327148434, 0.03292390441894531, 0.033132545471191405, 0.03302809524536133, 0.03293503952026367, 0.03306076812744141, 0.03307167816162109, 0.033235359191894534, 0.03308310317993164, 0.033089183807373045, 0.03305331039428711, 0.03311820983886719, 0.03315043258666992, 0.03323139190673828, 0.03311820983886719, 0.033083393096923826, 0.03311740875244141, 0.03312083053588867, 0.03312384033203125, 0.032991966247558596, 0.03312144088745117, 0.033303390502929686, 0.03316323089599609, 0.03308547210693359, 0.033087295532226564, 0.03312812805175781, 0.03311219024658203, 0.03313907241821289, 0.03319807815551758, 0.03308687973022461, 0.03323088073730469, 0.033116737365722654, 0.03306086349487305, 0.033058815002441407, 0.03306496047973633, 0.03323411178588867, 0.0331124153137207, 0.03379148864746094, 0.03334774398803711, 0.03300572967529297, 0.03297289657592774, 0.03297849655151367, 0.0328072624206543, 0.03278176116943359, 0.03286732864379883, 0.032868350982666016, 0.03287420654296875, 0.03280057525634766, 0.03284835052490234, 0.032804256439208986, 0.03283209609985351, 0.03287039947509766, 0.03292160034179688, 0.03292470550537109, 0.03284681701660156, 0.0328785285949707, 0.03302201461791992, 0.03288585662841797, 0.03286508941650391, 0.03291555023193359, 0.03289436721801758, 0.03278294372558594, 0.03297689437866211, 0.032942081451416014, 0.032892929077148435, 0.03282329559326172, 0.03303577423095703, 0.03312198257446289, 0.033065792083740234, 0.03305472183227539, 0.03311820983886719, 0.032974529266357425, 0.03288636779785156, 0.032948959350585935, 0.03308547210693359, 0.03316646575927734, 0.03322502517700195, 0.033128990173339846, 0.033054046630859375, 0.033043102264404295, 0.03313843154907226, 0.03318342590332031, 0.03308806228637695, 0.03309072113037109, 0.0331393928527832, 0.033151134490966794, 0.03318783950805664, 0.033148609161376956, 0.033093471527099606, 0.03301987075805664, 0.03314729690551758, 0.033132640838623044, 0.03321855926513672, 0.033105918884277344, 0.033662368774414066, 0.03331891250610351, 0.03340758514404297, 0.03372166442871094, 0.03379270553588867, 0.03384476852416992, 0.03405411148071289, 0.03362063980102539, 0.033119712829589844, 0.033017375946044925, 0.03290214538574219, 0.03296444702148438, 0.03280297470092773, 0.03292979049682617, 0.032860160827636715, 0.03294003295898437, 0.03281107330322266, 0.032986400604248046, 0.03291398239135742, 0.03293596649169922, 0.032818496704101564, 0.032922367095947265, 0.03298867034912109, 0.03300534439086914, 0.0329284782409668, 0.032994430541992185, 0.03307609558105469, 0.033004673004150394, 0.032917984008789064, 0.03293225479125977, 0.03302604675292969, 0.03340857696533203, 0.032946624755859376, 0.03293750381469727, 0.03314883041381836, 0.03330460739135742, 0.03312489700317383, 0.0330748176574707, 0.03303667068481445, 0.033175552368164066, 0.033105247497558596, 0.03324105453491211, 0.033099937438964847, 0.03317609786987305, 0.033051902770996094, 0.0329832649230957, 0.03301839828491211, 0.033180896759033206, 0.033317665100097656, 0.03319564819335938, 0.033046783447265624, 0.033226497650146486, 0.033065345764160155, 0.03309059143066406, 0.03321136093139648, 0.03320419311523438, 0.03325491333007812, 0.03346611022949219, 0.033323585510253904, 0.03343113708496094, 0.033458721160888674, 0.03341731262207031, 0.0339046401977539, 0.033957889556884766, 0.03341516876220703, 0.03355350494384766, 0.03347040176391602, 0.03341961669921875, 0.03340534210205078, 0.03345654296875, 0.03332896041870117, 0.03321225738525391, 0.033030174255371095, 0.03296588897705078, 0.03304127883911133, 0.03296249771118164, 0.032885120391845706, 0.03301465606689453, 0.03290985488891601, 0.03299151992797852, 0.03279030227661133, 0.03277619171142578, 0.03315529632568359, 0.03284377670288086, 0.0330601921081543, 0.03325609588623047, 0.03307334518432617, 0.03305862426757813, 0.033060737609863285, 0.033083518981933596, 0.03304857635498047, 0.03296051025390625, 0.03278460693359375, 0.03288022232055664, 0.03299142456054688, 0.03302195358276367, 0.03296255874633789, 0.03299942398071289, 0.033315967559814454, 0.03285286331176758, 0.03300966262817383, 0.033181472778320314, 0.03309385681152344, 0.033041728973388675, 0.03304422378540039, 0.03308163070678711, 0.032919681549072266, 0.03281155014038086, 0.033013534545898435, 0.03311967849731445, 0.03417168045043945, 0.03287449645996094, 0.03291926574707031, 0.033046817779541014, 0.03459446334838867, 0.033099681854248046, 0.03307881546020508, 0.032920478820800785, 0.033285377502441406, 0.03336262512207031, 0.03349407958984375, 0.03333017730712891, 0.033261566162109374, 0.03398585510253906, 0.033684158325195314, 0.03397561645507813, 0.03336431884765625, 0.03341347122192383, 0.033495040893554685, 0.03341849517822266, 0.03338678359985352, 0.03357718276977539]",tokens/s,30.21966029096218,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,8219.447296,11243.814912,0.0,10848.567296,10616.027648,s,1,14.3384755859375,14.3384755859375,0.0,14.3384755859375,14.3384755859375,14.3384755859375,14.3384755859375,[14.3384755859375],,kWh,0.00021537803253750856,2.375057297665813e-05,6.718449819199945e-05,0.00030631310370616617,,MB,3748.20864,11675.828224,0.0,11265.900544,11070.470656,s,10,3.8320139770507815,0.38320139770507816,0.0009032618281221941,0.38316490173339846,0.3842410583496094,0.3842586364746094,0.38427269897460936,"[0.38118585205078126, 0.3822434387207031, 0.38324179077148435, 0.3830880126953125, 0.3830860595703125, 0.3837288513183594, 0.383967041015625, 0.38295956420898436, 0.38427621459960937, 0.3842371520996094]",tokens/s,668.0560184099962,kWh,1.123143302435866e-05,1.2378677839720334e-06,7.456020922077341e-06,1.9925321730408035e-05,tokens/kWh,12847973.220393144,MB,3752.505344,11677.925376,0.0,11267.997696,11070.473216,s,10,29.213083251953126,2.921308325195313,0.005078015239350936,2.9213482666015627,2.928300830078125,2.9298215576171875,2.9310381396484377,"[2.913442138671875, 2.914806884765625, 2.91856884765625, 2.92129833984375, 2.921822509765625, 2.921398193359375, 2.920527587890625, 2.92191357421875, 2.927962890625, 2.93134228515625]",tokens/s,21.565679821142446,kWh,8.544600416355846e-05,9.425667577272549e-06,5.6726058201323676e-05,0.00015159772994215466,tokens/kWh,415573.5051180449,,s,630,29.210234874725337,0.046365452182103715,0.0005080789365927493,0.04631377410888672,0.04681200180053711,0.047042915725708005,0.0487489128112793,"[0.048790336608886715, 0.04646899032592773, 0.04569283294677735, 0.045532512664794925, 0.045716415405273436, 0.04563561630249023, 0.04543689727783203, 0.04720435333251953, 0.0456022720336914, 0.045744670867919925, 0.04590182495117188, 0.04625612640380859, 0.04597350311279297, 0.04576412963867187, 0.045816287994384766, 0.04609027099609375, 0.04579459381103516, 0.045824703216552735, 0.0462479362487793, 0.04623769760131836, 0.04613449478149414, 0.046492446899414064, 0.04641772842407227, 0.04601084899902344, 0.04596294403076172, 0.04625324630737305, 0.04609251022338867, 0.045832416534423825, 0.04616640090942383, 0.0460904655456543, 0.04602163314819336, 0.04585916900634766, 0.04627299118041992, 0.046105983734130856, 0.04595321655273438, 0.046145408630371094, 0.04643289566040039, 0.04617004776000976, 0.046257568359375, 0.046723648071289064, 0.04649168014526367, 0.046352161407470706, 0.046532833099365234, 0.04640335845947265, 0.04617443084716797, 0.04630220794677734, 0.04660889434814453, 0.046338558197021484, 0.04629459381103516, 0.04653100967407227, 0.04646092987060547, 0.04606083297729492, 0.046250720977783204, 0.0463474235534668, 0.04615584182739258, 0.04610332870483398, 0.04664700698852539, 0.04654265594482422, 0.04643068695068359, 0.04651987075805664, 0.04682387161254883, 0.04659823989868164, 0.04704655838012695, 0.04849436950683594, 0.045856990814208985, 0.045760353088378905, 0.04563779067993164, 0.04557136154174805, 0.0453966064453125, 0.04576051330566406, 0.04577494430541992, 0.04589337539672852, 0.04617824172973633, 0.04609260940551758, 0.0460063362121582, 0.04573791885375977, 0.0458526725769043, 0.04605878448486328, 0.04585340881347656, 0.04607385635375977, 0.046284801483154295, 0.04641177749633789, 0.04589158248901367, 0.046339870452880856, 0.046549217224121094, 0.046089599609375, 0.046029312133789066, 0.046604415893554685, 0.04609843063354492, 0.04571136093139649, 0.046080001831054686, 0.04604927825927734, 0.04588937759399414, 0.04609244918823242, 0.04640547180175781, 0.04619484710693359, 0.04591820907592774, 0.04609638214111328, 0.0461844482421875, 0.046129150390625, 0.046112415313720706, 0.046440799713134764, 0.04639059066772461, 0.04634636688232422, 0.04662694549560547, 0.0466682243347168, 0.046292545318603516, 0.04635030364990234, 0.046590431213378906, 0.04654694366455078, 0.04628889465332031, 0.04647651290893555, 0.04639372634887695, 0.04637500762939453, 0.04646547317504883, 0.04658153533935547, 0.04627648162841797, 0.04639148712158203, 0.046540191650390625, 0.04642060852050781, 0.04643430328369141, 0.04674883270263672, 0.046596481323242185, 0.04646089553833008, 0.04818380737304687, 0.04646297454833984, 0.04876428985595703, 0.046567649841308595, 0.04588409423828125, 0.04565606307983398, 0.04578700637817383, 0.045811038970947265, 0.0456866569519043, 0.045822784423828124, 0.04606576156616211, 0.0460184326171875, 0.04595312118530273, 0.045953056335449216, 0.04581343841552735, 0.045603134155273437, 0.04593385696411133, 0.0460645751953125, 0.04620470428466797, 0.046022014617919924, 0.04642390441894531, 0.04616825485229492, 0.046156063079833984, 0.04656671905517578, 0.04694262313842774, 0.04645334243774414, 0.04596902465820313, 0.046036865234375, 0.04615756988525391, 0.0457665901184082, 0.0458535041809082, 0.04624595260620117, 0.04643600082397461, 0.04603017425537109, 0.04606662368774414, 0.0461578254699707, 0.04599427032470703, 0.0460715217590332, 0.0461844482421875, 0.046283008575439454, 0.046300926208496095, 0.046112159729003906, 0.04639599990844727, 0.046373119354248045, 0.04655487823486328, 0.0468823356628418, 0.046819808959960935, 0.046683265686035154, 0.04654751968383789, 0.046458976745605465, 0.04815027236938477, 0.046118465423583985, 0.046375839233398435, 0.0461475830078125, 0.04639664077758789, 0.04657625579833984, 0.04638022232055664, 0.04643324661254883, 0.0466165771484375, 0.046665950775146486, 0.04657888031005859, 0.04669091033935547, 0.046671871185302735, 0.04688057708740234, 0.0468988151550293, 0.047982593536376954, 0.04583212661743164, 0.045819713592529294, 0.04576623916625976, 0.045891807556152346, 0.045908416748046875, 0.045778942108154294, 0.045641727447509765, 0.045707263946533204, 0.045889537811279295, 0.04615695953369141, 0.04605414581298828, 0.04594073486328125, 0.04598147201538086, 0.04613561630249023, 0.04597964859008789, 0.046327457427978516, 0.04626617431640625, 0.046129409790039065, 0.04613891220092774, 0.04638956832885742, 0.04623353576660156, 0.046079872131347656, 0.04601715087890625, 0.046186496734619144, 0.04604313659667969, 0.04586697769165039, 0.04620233535766602, 0.046114398956298826, 0.04604140853881836, 0.04619676971435547, 0.04623593521118164, 0.046164321899414065, 0.04602460861206055, 0.0489780158996582, 0.04586700820922852, 0.04600831985473633, 0.04661862564086914, 0.04712582397460938, 0.04619251251220703, 0.04662748718261719, 0.04663056182861328, 0.0463171501159668, 0.04629391860961914, 0.04646527862548828, 0.04650086212158203, 0.04625641632080078, 0.04618428802490234, 0.0464920654296875, 0.04635670471191406, 0.046386558532714846, 0.046545536041259765, 0.04799798583984375, 0.04644745635986328, 0.046678142547607424, 0.04610611343383789, 0.048622081756591794, 0.046265567779541016, 0.04658179092407227, 0.046567169189453125, 0.04706816101074219, 0.0469483528137207, 0.04675945663452148, 0.04882886505126953, 0.04620243072509766, 0.045801921844482424, 0.04576870346069336, 0.04572159957885742, 0.045706783294677734, 0.04565164947509766, 0.04824758529663086, 0.045402111053466795, 0.0462110710144043, 0.04657356643676758, 0.04604108810424805, 0.04570492935180664, 0.04580585479736328, 0.04612300872802735, 0.04591001510620117, 0.04602243041992188, 0.04643862533569336, 0.0463804817199707, 0.046113086700439454, 0.04674492645263672, 0.04651046371459961, 0.045932830810546874, 0.045879550933837894, 0.046179550170898434, 0.04605199813842773, 0.045873279571533206, 0.04630656051635742, 0.04609231948852539, 0.04791984176635742, 0.046088191986083986, 0.04621692657470703, 0.046051265716552735, 0.046008544921875, 0.04627628707885742, 0.04636102294921875, 0.04607183837890625, 0.046161441802978515, 0.04635692977905274, 0.04637654495239258, 0.04651459121704102, 0.046811134338378906, 0.04673715209960937, 0.04655100631713867, 0.04662643051147461, 0.04648313522338867, 0.04627062225341797, 0.04651295852661133, 0.04782694244384766, 0.045956287384033206, 0.046285377502441404, 0.04653286361694336, 0.04636467361450195, 0.046389183044433596, 0.04651424026489258, 0.0465629768371582, 0.046460990905761716, 0.04677824020385742, 0.04654735946655274, 0.04641487884521484, 0.0467465934753418, 0.04689113616943359, 0.04667174530029297, 0.049076255798339845, 0.046418975830078125, 0.047290943145751954, 0.0452694091796875, 0.04568262481689453, 0.04560294342041016, 0.04567830276489258, 0.04598601531982422, 0.045980960845947265, 0.04584499359130859, 0.04589206314086914, 0.04603398513793945, 0.045857471466064455, 0.04581171035766601, 0.04628227233886719, 0.04616649627685547, 0.045980926513671874, 0.04651084899902344, 0.04636444854736328, 0.046199039459228514, 0.04647727966308594, 0.04694940948486328, 0.04656768035888672, 0.04591036987304688, 0.04625801467895508, 0.046170654296875, 0.046047233581542966, 0.045943904876708984, 0.04630764770507813, 0.04627926254272461, 0.04607590484619141, 0.04590137481689453, 0.046219711303710935, 0.04603289413452148, 0.04607555389404297, 0.046546367645263674, 0.04630137634277344, 0.046203617095947266, 0.04643840026855469, 0.04635647964477539, 0.046383102416992186, 0.0466346549987793, 0.04706089782714844, 0.046680511474609374, 0.046583168029785155, 0.046516094207763675, 0.04643507385253906, 0.04624163055419922, 0.04672262573242188, 0.04647366333007812, 0.04632592010498047, 0.04637247848510742, 0.046615230560302735, 0.04656300735473633, 0.04637286376953125, 0.04642780685424805, 0.04661897659301758, 0.0466063346862793, 0.0470362548828125, 0.04669164657592773, 0.04658380889892578, 0.04698633575439453, 0.04720614242553711, 0.04878131103515625, 0.04636262512207031, 0.04572332763671875, 0.045598751068115236, 0.04585004806518555, 0.04594748687744141, 0.045784351348876956, 0.046113471984863284, 0.045926593780517576, 0.045942367553710936, 0.0459901123046875, 0.04608227157592774, 0.04585055923461914, 0.04611830520629883, 0.04619251251220703, 0.04632473754882813, 0.04633795166015625, 0.04605523300170898, 0.04623782348632813, 0.04636876678466797, 0.046120960235595705, 0.04638508987426758, 0.046556480407714845, 0.04625075149536133, 0.045958656311035156, 0.04608256149291992, 0.04616396713256836, 0.04612505722045898, 0.04609843063354492, 0.046325759887695314, 0.04616396713256836, 0.046050846099853514, 0.04623203277587891, 0.04628684616088867, 0.04603289413452148, 0.046534400939941406, 0.04664140701293945, 0.04655104064941406, 0.04653875350952148, 0.04665753555297852, 0.04640134429931641, 0.04645497512817383, 0.046749855041503904, 0.046782047271728515, 0.04628828811645508, 0.04619452667236328, 0.04653276824951172, 0.04633481597900391, 0.046258113861083985, 0.04644358444213867, 0.046496768951416016, 0.04641299057006836, 0.04677510452270508, 0.04676403045654297, 0.04657356643676758, 0.046429664611816406, 0.04663059234619141, 0.04654489517211914, 0.046711647033691406, 0.04682137680053711, 0.04682080078125, 0.04681084823608399, 0.04669731140136719, 0.0480662727355957, 0.0462372817993164, 0.04597564697265625, 0.0458260498046875, 0.04557065582275391, 0.04574784088134766, 0.046035232543945315, 0.045902976989746096, 0.046201919555664064, 0.0462591667175293, 0.04632057571411133, 0.04592025756835937, 0.04595238494873047, 0.04604079818725586, 0.04600924682617188, 0.045946880340576174, 0.04645273590087891, 0.04635548782348633, 0.04605436706542969, 0.0463724479675293, 0.046548511505126955, 0.04615667343139648, 0.0463296012878418, 0.046319774627685543, 0.04617331314086914, 0.04604988861083984, 0.04626265716552734, 0.04628400039672852, 0.04620556640625, 0.046100639343261716, 0.04659404754638672, 0.04641523361206055, 0.046125473022460936, 0.0463218879699707, 0.04637081527709961, 0.04614144134521484, 0.04640768051147461, 0.04671897506713867, 0.046430206298828124, 0.04642598342895508, 0.04682099151611328, 0.04670105743408203, 0.04625408172607422, 0.046516063690185544, 0.046543006896972654, 0.04632156753540039, 0.046443809509277345, 0.04660921478271484, 0.04670230484008789, 0.04668592071533203, 0.04665401458740234, 0.04637696075439453, 0.04642406463623047, 0.04664934539794922, 0.04658585739135742, 0.04609344100952149, 0.04669289779663086, 0.04698556900024414, 0.04679065704345703, 0.04670259094238281, 0.0467672004699707, 0.046811038970947266, 0.046855167388916014, 0.04887347030639649, 0.046295455932617184, 0.04589753723144531, 0.045660926818847654, 0.045930240631103514, 0.04567270278930664, 0.04603903961181641, 0.046086143493652344, 0.04598566436767578, 0.04605964660644531, 0.046252033233642575, 0.046309375762939455, 0.04600831985473633, 0.04635359954833984, 0.04610307312011719, 0.04591030502319336, 0.046166015625, 0.04637007904052735, 0.046233375549316405, 0.04629190444946289, 0.04699955368041992, 0.04694182586669922, 0.04652671813964844, 0.04645465469360351, 0.04648905563354492, 0.046287647247314455, 0.04605952072143555, 0.04634982299804687, 0.046166526794433595, 0.04617375946044922, 0.04634259033203125, 0.046307582855224606, 0.04625081634521484, 0.04653091049194336, 0.046400096893310545, 0.04623769760131836, 0.04621516799926758, 0.046671775817871096, 0.04653884887695312, 0.046386302947998045, 0.04676492691040039, 0.04684185409545898, 0.04671241760253906, 0.04657398223876953, 0.04682732772827149, 0.046796062469482425, 0.04646601486206055, 0.04657670211791992, 0.04637760162353516, 0.046323966979980466, 0.046755840301513675, 0.046695838928222655, 0.04634048080444336, 0.04807113647460937, 0.04646169662475586, 0.04631039810180664, 0.04656899261474609, 0.046688735961914064, 0.046516223907470705, 0.046823135375976564, 0.04717391967773438, 0.0470384635925293, 0.04715919876098633, 0.048711265563964844, 0.04620569610595703, 0.04592832183837891, 0.046012542724609376, 0.04587519836425781, 0.04585443115234375, 0.04607372665405273, 0.04580188751220703, 0.04584844970703125, 0.04600640106201172, 0.046723072052001956, 0.04641308975219727, 0.04597423934936523, 0.045956863403320315, 0.0464667854309082, 0.04749756622314453, 0.04595859146118164, 0.04626918411254883, 0.04628246307373047, 0.04636716842651367, 0.04692342376708984, 0.0466761589050293, 0.04644432067871094, 0.04650620651245117, 0.04657084655761719, 0.0461473617553711, 0.04612160110473633, 0.046217247009277346, 0.04620918273925781, 0.04617737579345703, 0.046556224822998045, 0.04630108642578125, 0.04623791885375977, 0.04615484619140625, 0.046932640075683596, 0.046529888153076175, 0.04633603286743164, 0.04660451126098633, 0.046502334594726566, 0.04640768051147461, 0.046683391571044924, 0.04759164810180664, 0.04658777618408203, 0.04670083236694336, 0.04724057769775391, 0.04684505462646484, 0.046696319580078124, 0.04652236938476562, 0.046581760406494144, 0.04631961441040039, 0.04665753555297852, 0.04655702209472656, 0.046473377227783205, 0.04669161605834961, 0.0466778564453125, 0.04694515228271484, 0.04668620681762695, 0.04698521423339844, 0.04693561553955078, 0.04694633483886719, 0.04706550216674805, 0.046941665649414065, 0.04687129592895508]",tokens/s,21.567782754979433,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8806.985728,10152.18176,0.0,9749.659648,9714.717184,s,1,14.500876953125,14.500876953125,0.0,14.500876953125,14.500876953125,14.500876953125,14.500876953125,[14.500876953125],,kWh,0.0002100704608041648,2.3161825936238284e-05,6.408894015999533e-05,0.0002973212269003984,,MB,1873.637376,10580.000768,0.0,10162.798592,10037.824,s,10,4.6236782531738285,0.46236782531738274,0.0004647463172391825,0.46233811950683595,0.4630218353271484,0.46305596160888673,0.46308326263427735,"[0.46163937377929687, 0.46235504150390627, 0.463090087890625, 0.46210296630859377, 0.4626707458496094, 0.46301425170898436, 0.46232119750976564, 0.4622552185058594, 0.46255322265625, 0.4616761474609375]",tokens/s,553.6717435394086,kWh,1.3528553554355982e-05,1.4919736555904223e-06,9.02868146536356e-06,2.4049208675309965e-05,tokens/kWh,10644840.89502793,MB,1885.925376,10791.81312,0.0,10374.610944,10299.609088,s,10,34.45157177734375,3.4451571777343752,0.0029347249595274515,3.4456467285156247,3.448758935546875,3.4489090087890624,3.4490290673828126,"[3.440833740234375, 3.440480224609375, 3.44821142578125, 3.44553466796875, 3.44905908203125, 3.44396337890625, 3.44284619140625, 3.4457587890625, 3.4487255859375, 3.44615869140625]",tokens/s,18.28653868310021,kWh,0.0001005578825048101,1.1089824029380255e-05,6.664212907123668e-05,0.000178289835605427,tokens/kWh,353357.2162769011,,s,630,34.447690570831305,0.05467887392195444,0.0004015603324851577,0.05468820762634277,0.0551680305480957,0.05526932697296143,0.05553693641662598,"[0.05422867202758789, 0.05403680038452149, 0.053889022827148435, 0.05436969757080078, 0.05394729614257812, 0.0540013427734375, 0.05393337631225586, 0.05396960067749024, 0.054132095336914064, 0.0543238410949707, 0.05414281463623047, 0.05396319961547852, 0.05421635055541992, 0.05435776138305664, 0.054083904266357424, 0.05413283157348633, 0.05435382461547852, 0.05403987121582031, 0.05417644882202149, 0.054144832611083986, 0.054126270294189455, 0.05419641494750976, 0.05427347183227539, 0.054242176055908205, 0.053991073608398436, 0.054182239532470707, 0.054335487365722655, 0.054788097381591794, 0.05479423904418945, 0.05427414321899414, 0.054812576293945314, 0.05461401748657226, 0.054687679290771486, 0.054570079803466794, 0.05455152130126953, 0.054542335510253906, 0.05467340850830078, 0.05479171371459961, 0.054767902374267576, 0.054835391998291017, 0.05471171188354492, 0.0545715217590332, 0.0548037109375, 0.05501628875732422, 0.05492326354980469, 0.05503385543823242, 0.055075904846191404, 0.05503478240966797, 0.05489433670043945, 0.05501571273803711, 0.05505862426757813, 0.05501318359375, 0.05729008102416992, 0.054873825073242184, 0.05520851135253906, 0.05571014404296875, 0.05506041717529297, 0.05511526489257813, 0.05516534423828125, 0.054934974670410155, 0.05519187164306641, 0.0551440315246582, 0.05511052703857422, 0.05452979278564453, 0.054069503784179684, 0.05406412887573242, 0.05388828659057617, 0.05407209777832031, 0.054017982482910155, 0.0539634895324707, 0.05396300888061523, 0.05396892929077148, 0.05406105422973633, 0.054247390747070315, 0.05419772720336914, 0.05432582473754883, 0.05438873672485352, 0.05432831954956055, 0.05408256149291992, 0.05439667129516602, 0.05438694381713867, 0.05415273666381836, 0.05425392150878906, 0.05429056167602539, 0.054636737823486325, 0.054169185638427736, 0.05411862564086914, 0.054042945861816405, 0.0547402229309082, 0.05473110580444336, 0.05432640075683594, 0.05428499221801758, 0.054504928588867185, 0.05441004943847656, 0.05456076812744141, 0.05482291030883789, 0.05465673446655273, 0.05474540710449219, 0.05442464065551758, 0.05468867111206055, 0.054725887298583985, 0.0544796142578125, 0.05485548782348633, 0.05480291366577148, 0.05477481460571289, 0.054815425872802734, 0.05493958282470703, 0.05487558364868164, 0.05462694549560547, 0.05485327911376953, 0.05483536148071289, 0.055124160766601565, 0.05516070556640625, 0.055140384674072264, 0.05512611389160156, 0.055183361053466794, 0.05506582260131836, 0.05506252670288086, 0.05516777420043945, 0.05552489471435547, 0.05520566558837891, 0.055392704010009765, 0.05529792022705078, 0.05515302276611328, 0.05511167907714844, 0.055283008575439455, 0.05487891387939453, 0.054320510864257814, 0.05424361419677735, 0.05436000061035156, 0.05403692626953125, 0.0542845458984375, 0.05420019149780273, 0.05389340972900391, 0.054220638275146484, 0.05421593475341797, 0.05417180633544922, 0.05410060882568359, 0.054269119262695314, 0.05423321533203125, 0.05419411087036133, 0.05419260787963867, 0.05469619369506836, 0.054798336029052735, 0.054599681854248044, 0.05461552047729492, 0.05455887985229492, 0.05444441604614258, 0.054763553619384765, 0.05457936096191406, 0.0545780143737793, 0.054526142120361325, 0.0546822395324707, 0.054507678985595706, 0.05463846588134766, 0.05465100860595703, 0.05472079849243164, 0.05477056121826172, 0.05475619125366211, 0.05477151870727539, 0.054732990264892575, 0.054799392700195314, 0.05491145706176758, 0.05478665542602539, 0.05494889450073242, 0.05495487976074219, 0.05500723266601563, 0.05493532943725586, 0.05484703826904297, 0.055112350463867185, 0.0549048957824707, 0.05468582534790039, 0.05478790283203125, 0.05514358520507812, 0.05515536117553711, 0.05496425628662109, 0.055167137145996095, 0.05528575897216797, 0.05487161636352539, 0.05527606582641602, 0.05547407913208008, 0.05535129547119141, 0.05512188720703125, 0.05523401641845703, 0.055173408508300784, 0.05508931350708008, 0.05532175827026367, 0.05506351852416992, 0.05526528167724609, 0.054561279296875, 0.05416803359985352, 0.05452137756347656, 0.05408406448364258, 0.05402828979492187, 0.054196224212646485, 0.05430476760864258, 0.05415932846069336, 0.05404249572753906, 0.05420048141479492, 0.054178081512451175, 0.05418307113647461, 0.0540513916015625, 0.05433135986328125, 0.054165695190429686, 0.05416739273071289, 0.05414912033081055, 0.054179840087890625, 0.05439712142944336, 0.05451865768432617, 0.05475219345092774, 0.05465305709838867, 0.054534015655517576, 0.05444723129272461, 0.055089473724365234, 0.05471398544311523, 0.05463260650634766, 0.054733600616455075, 0.05460969543457031, 0.05443587112426758, 0.05481059265136719, 0.054593856811523435, 0.05472451019287109, 0.05462835311889649, 0.054596702575683595, 0.05482998275756836, 0.05470316696166992, 0.05495868682861328, 0.05511612701416015, 0.05501705551147461, 0.05501724624633789, 0.054977153778076174, 0.05511782455444336, 0.05480652618408203, 0.05489984130859375, 0.054930145263671876, 0.054943904876708985, 0.055005184173583986, 0.05479423904418945, 0.05497651290893555, 0.054967647552490236, 0.055076641082763673, 0.05514704132080078, 0.05495820617675781, 0.05513811111450195, 0.055070560455322264, 0.05519571304321289, 0.05525705718994141, 0.05512860870361328, 0.05526483154296875, 0.05518819046020508, 0.055076160430908204, 0.05499871826171875, 0.05475788879394531, 0.05428838348388672, 0.054073345184326174, 0.0543744010925293, 0.05444124984741211, 0.05418057632446289, 0.05402582550048828, 0.05440457534790039, 0.05408428955078125, 0.05410022354125977, 0.05410508728027344, 0.05418937683105469, 0.054425281524658205, 0.05421612930297852, 0.05429510498046875, 0.054341087341308596, 0.05445788955688476, 0.05505513763427734, 0.05453155136108399, 0.054469310760498046, 0.05445228958129883, 0.05475532913208008, 0.054617729187011715, 0.05439936065673828, 0.05480448150634765, 0.05475459289550781, 0.05444796752929688, 0.05467014312744141, 0.054675201416015624, 0.054565185546875, 0.054687744140625, 0.05468716812133789, 0.05485193634033203, 0.05464863967895508, 0.05489673614501953, 0.05481292724609375, 0.054906143188476565, 0.05482086563110351, 0.054919681549072265, 0.05514473724365234, 0.054973567962646484, 0.055081855773925784, 0.05508505630493164, 0.054908512115478515, 0.0549851188659668, 0.05502975845336914, 0.05497635269165039, 0.054884449005126956, 0.05501139068603516, 0.05503315353393555, 0.05488505554199219, 0.05498783874511719, 0.05519279861450195, 0.055131744384765625, 0.055103488922119144, 0.055050048828125, 0.05523072052001953, 0.0552817268371582, 0.05514767837524414, 0.05559487915039062, 0.05527616119384766, 0.05522876739501953, 0.055244800567626956, 0.0548636474609375, 0.05409571075439453, 0.054019329071044925, 0.05399849700927734, 0.05418598556518555, 0.05427977752685547, 0.05416592025756836, 0.05439897537231445, 0.05431296157836914, 0.05428838348388672, 0.05447679901123047, 0.054252830505371094, 0.05438723373413086, 0.0544134407043457, 0.05428025436401367, 0.05436985778808594, 0.054258113861083986, 0.05426499176025391, 0.0545145263671875, 0.05446665573120117, 0.054494239807128905, 0.054379390716552733, 0.05434777450561523, 0.05465087890625, 0.054583297729492185, 0.054583297729492185, 0.054291679382324216, 0.054429664611816406, 0.05454726409912109, 0.05448198318481445, 0.05450643157958984, 0.0544747200012207, 0.054695968627929685, 0.054728607177734374, 0.05450486373901367, 0.054598495483398436, 0.05472857666015625, 0.054757377624511716, 0.05475052642822266, 0.0548267822265625, 0.05504499053955078, 0.05487129592895508, 0.054745025634765625, 0.05465740966796875, 0.054751201629638674, 0.05481468963623047, 0.05480089569091797, 0.05505340957641602, 0.055094432830810544, 0.0552564811706543, 0.05487849426269531, 0.05515433502197266, 0.05529958343505859, 0.05523305511474609, 0.055201793670654295, 0.05501776123046875, 0.05498860931396484, 0.05498268890380859, 0.05513628768920899, 0.054937824249267575, 0.05505843353271484, 0.055373825073242185, 0.0554967041015625, 0.054488895416259765, 0.05436643218994141, 0.05398233413696289, 0.05410639953613281, 0.054319839477539066, 0.05415449523925781, 0.05394476699829102, 0.0542496337890625, 0.05439078521728516, 0.054101215362548825, 0.054126529693603515, 0.0545513916015625, 0.054166622161865234, 0.054540542602539065, 0.05425219345092774, 0.054197502136230466, 0.05405756759643555, 0.05424991989135742, 0.05428396987915039, 0.0543047981262207, 0.05437055969238281, 0.054626049041748045, 0.05439683151245117, 0.05430089569091797, 0.05444905471801758, 0.0543590087890625, 0.05442348861694336, 0.05462432098388672, 0.05442995071411133, 0.054488929748535156, 0.05444588851928711, 0.05457929611206055, 0.054467742919921874, 0.05461030578613281, 0.054821342468261716, 0.05444598388671875, 0.054719841003417966, 0.05487094497680664, 0.0552077751159668, 0.05505023956298828, 0.054724094390869144, 0.055001823425292966, 0.05473462295532226, 0.054863872528076174, 0.054871585845947264, 0.05499542236328125, 0.05492700958251953, 0.054745246887207034, 0.054898880004882813, 0.05495548629760742, 0.05500163269042969, 0.05486796951293945, 0.0549747200012207, 0.05493119812011719, 0.05503395080566406, 0.05523436737060547, 0.05554185485839844, 0.05527104187011719, 0.05526528167724609, 0.05541926574707031, 0.05526131057739258, 0.05505356979370117, 0.05541542434692383, 0.054540542602539065, 0.05407731246948242, 0.05421433639526367, 0.054038177490234374, 0.05396115112304688, 0.05423235321044922, 0.05424851226806641, 0.054321025848388674, 0.05421811294555664, 0.05415385437011719, 0.054658241271972656, 0.05447161483764648, 0.05420854568481445, 0.05442521667480469, 0.054403297424316405, 0.05436620712280273, 0.05441641616821289, 0.054313793182373046, 0.05418627166748047, 0.05416934585571289, 0.05446886444091797, 0.054682945251464846, 0.0543012466430664, 0.05470003128051758, 0.05468979263305664, 0.05455257415771484, 0.054568225860595704, 0.054488895416259765, 0.054801055908203125, 0.05481510543823242, 0.054725631713867184, 0.05454342269897461, 0.054787487030029294, 0.054599422454833985, 0.054647262573242185, 0.05469427108764648, 0.054812480926513675, 0.05462015914916992, 0.054880256652832034, 0.0547690544128418, 0.054741600036621096, 0.05480857467651367, 0.05469510269165039, 0.054839649200439454, 0.054626785278320315, 0.054691841125488284, 0.05504719924926758, 0.055167007446289065, 0.054895294189453124, 0.05513593673706055, 0.05500163269042969, 0.055760929107666016, 0.05532223892211914, 0.05503219223022461, 0.05518163299560547, 0.055128864288330075, 0.05511209487915039, 0.05508761596679688, 0.0554886703491211, 0.055000896453857424, 0.055107616424560545, 0.055387359619140625, 0.05535004806518555, 0.0547627182006836, 0.054207134246826175, 0.054159038543701174, 0.05399728012084961, 0.054238239288330076, 0.05430662536621094, 0.05446995162963867, 0.054715072631835934, 0.054279296875, 0.05429094314575195, 0.05422732925415039, 0.05437849426269531, 0.05457619094848633, 0.0545863037109375, 0.05452339172363281, 0.05420051193237305, 0.05446073532104492, 0.05459987258911133, 0.05426764678955078, 0.05463065719604492, 0.05453392028808594, 0.05453337478637695, 0.054507518768310545, 0.05434758377075195, 0.054811614990234375, 0.054591487884521485, 0.05446246337890625, 0.054755233764648435, 0.054685791015625, 0.05441151809692383, 0.054596641540527346, 0.054739456176757816, 0.05488662338256836, 0.05462953567504883, 0.055607872009277345, 0.05470566558837891, 0.05504415893554687, 0.05482364654541016, 0.05493468856811524, 0.05493251037597656, 0.05472204971313477, 0.05488393783569336, 0.05503664016723633, 0.054873855590820315, 0.05483750534057617, 0.054834175109863284, 0.0550463981628418, 0.055098110198974606, 0.054798336029052735, 0.05486796951293945, 0.05496937561035156, 0.055247840881347654, 0.055139583587646486, 0.05506233596801758, 0.05506963348388672, 0.05492940902709961, 0.05496319961547851, 0.05514518356323242, 0.05518159866333008, 0.055145633697509765, 0.055433311462402345, 0.055188255310058595, 0.055441375732421874, 0.05469516754150391, 0.05441126251220703, 0.0555975341796875, 0.05420800018310547, 0.0541561279296875, 0.054037567138671874, 0.053887966156005856, 0.05432115173339844, 0.05425139236450195, 0.054179969787597655, 0.05408534240722656, 0.054173534393310546, 0.05427449417114258, 0.05431449508666992, 0.054456832885742185, 0.054599681854248044, 0.054474750518798826, 0.05450342559814453, 0.05444607925415039, 0.05459487915039062, 0.0546044807434082, 0.05462393569946289, 0.054253089904785154, 0.05457180786132813, 0.054740993499755856, 0.05520582580566406, 0.05418399810791016, 0.054476001739501956, 0.054627105712890624, 0.05456617736816406, 0.054489505767822265, 0.054427391052246095, 0.05457158279418945, 0.05467356872558594, 0.05491017532348633, 0.05498470306396484, 0.054825599670410154, 0.054712352752685545, 0.05476665496826172, 0.05467228698730469, 0.05457920074462891, 0.05467136001586914, 0.05513651275634766, 0.05512371063232422, 0.05459715270996094, 0.055002784729003905, 0.054712703704833984, 0.05477830505371094, 0.05497158432006836, 0.0549343376159668, 0.05483107376098633, 0.054978591918945316, 0.05523244857788086, 0.055170337677001954, 0.05513296127319336, 0.05526723098754883, 0.05528995132446289, 0.05498793411254883, 0.055022369384765624, 0.05521414566040039, 0.05546416091918945, 0.05501721572875977, 0.05511734390258789]",tokens/s,18.28859901956547,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1180.52864,987.62752,0.0,585.105408,557.135872,s,1,7.99470458984375,7.99470458984375,0.0,7.99470458984375,7.99470458984375,7.99470458984375,7.99470458984375,[7.99470458984375],,kWh,2.852487020832844e-05,3.1392542437428922e-06,7.834728490002663e-06,3.949885294207399e-05,,MB,1415.221248,1147.011072,0.0,729.808896,689.092096,s,10,0.35479804611206056,0.035479804611206055,0.00023749420585450604,0.03540359878540039,0.03567667045593261,0.03587897510528564,0.03604081882476807,"[0.03608127975463867, 0.035317855834960936, 0.035357185363769535, 0.03541324615478516, 0.035393951416015625, 0.035485118865966794, 0.0356317138671875, 0.03561145782470703, 0.035198398590087894, 0.035307838439941404]",tokens/s,7215.372316879788,kWh,1.0630116109241498e-06,1.1717939588354838e-07,7.026844005308448e-07,1.8828754073385428e-06,tokens/kWh,135962262.2942735,MB,1448.52992,1293.811712,0.0,876.609536,689.094656,s,10,15.365469116210939,1.536546911621094,0.01590380070833513,1.5357119750976562,1.560058642578125,1.5626862426757813,1.5647883227539061,"[1.51957666015625, 1.539539794921875, 1.5273941650390626, 1.5139232177734374, 1.5594747314453126, 1.546079833984375, 1.522742919921875, 1.5653138427734374, 1.5351138916015625, 1.53631005859375]",tokens/s,41.001026082264865,kWh,4.5717064741992396e-05,5.042235755770898e-06,1.7766524819269097e-05,6.852582531703238e-05,tokens/kWh,919361.4189764613,,s,630,15.359377002716053,0.024379963496374706,0.0005870997016605543,0.02421564769744873,0.025156179428100587,0.025314120578765868,0.025981662845611573,"[0.023754783630371094, 0.02390012741088867, 0.023748767852783202, 0.024022880554199218, 0.023781152725219728, 0.023604480743408204, 0.02354854393005371, 0.023556447982788085, 0.023836671829223634, 0.02386751937866211, 0.023820159912109375, 0.023808000564575195, 0.024053567886352538, 0.023920831680297853, 0.02437507247924805, 0.024106880187988282, 0.023861536026000975, 0.02393328094482422, 0.025743135452270506, 0.02531315231323242, 0.024536319732666016, 0.024564544677734376, 0.024368703842163084, 0.023953855514526366, 0.02431558418273926, 0.02383443260192871, 0.023924448013305663, 0.024039392471313478, 0.024804288864135743, 0.0241232967376709, 0.024733312606811525, 0.024463712692260744, 0.024184127807617188, 0.024040224075317383, 0.023983200073242186, 0.023896352767944336, 0.023802112579345704, 0.02401299285888672, 0.023830623626708985, 0.023803903579711915, 0.02377244758605957, 0.023957887649536134, 0.024281440734863283, 0.024321184158325196, 0.024096704483032225, 0.024373952865600585, 0.023786815643310547, 0.02390019226074219, 0.023851903915405273, 0.0238056640625, 0.023920352935791016, 0.024211360931396485, 0.02412611198425293, 0.024404096603393554, 0.02434239959716797, 0.024712671279907228, 0.024233919143676758, 0.024140384674072264, 0.0243240966796875, 0.0243240966796875, 0.024419456481933593, 0.024050559997558594, 0.02396272087097168, 0.024312639236450197, 0.02477670478820801, 0.024715520858764647, 0.024433855056762696, 0.024557119369506837, 0.024558591842651366, 0.026072608947753907, 0.025129375457763673, 0.025032032012939454, 0.024963808059692384, 0.02507161521911621, 0.02513100814819336, 0.025604223251342772, 0.025353919982910155, 0.02526380729675293, 0.02545510482788086, 0.025149440765380858, 0.025218463897705077, 0.02537504005432129, 0.025487648010253907, 0.024898591995239257, 0.02529520034790039, 0.024912511825561524, 0.024647680282592774, 0.024707071304321288, 0.02444419288635254, 0.024324832916259767, 0.024559616088867187, 0.02429952049255371, 0.024202816009521483, 0.02411689567565918, 0.025572128295898437, 0.024591808319091798, 0.024187456130981444, 0.02388991928100586, 0.023973535537719727, 0.024201568603515626, 0.023887136459350585, 0.02381452751159668, 0.023729759216308592, 0.023816287994384764, 0.023836320877075195, 0.023802879333496094, 0.02383695983886719, 0.02378927993774414, 0.023840768814086914, 0.023725759506225585, 0.023785791397094726, 0.024010112762451172, 0.024234399795532227, 0.02373859214782715, 0.02396329689025879, 0.02368729591369629, 0.023885343551635744, 0.023662912368774415, 0.02375718307495117, 0.02385228729248047, 0.02415804862976074, 0.02410793685913086, 0.02411897659301758, 0.0237890567779541, 0.02381507110595703, 0.023939039230346678, 0.023869728088378905, 0.024274431228637695, 0.024156063079833985, 0.023761152267456054, 0.023977472305297853, 0.02384230422973633, 0.02380022430419922, 0.023830976486206055, 0.025990432739257812, 0.02510323143005371, 0.02425164794921875, 0.02396236801147461, 0.023795007705688476, 0.023919296264648438, 0.023932832717895508, 0.024090143203735353, 0.023977855682373046, 0.024031328201293944, 0.02385980796813965, 0.023953535079956054, 0.023674751281738283, 0.02537676811218262, 0.024120832443237306, 0.024010784149169923, 0.023859071731567382, 0.023847135543823242, 0.023742719650268553, 0.023715999603271483, 0.023822303771972655, 0.023909887313842772, 0.024375167846679688, 0.024342880249023438, 0.023900287628173828, 0.023965856552124024, 0.02392268753051758, 0.025228511810302733, 0.026054784774780272, 0.024394079208374022, 0.024157888412475587, 0.02404524803161621, 0.024797151565551758, 0.02417148780822754, 0.023895551681518554, 0.0241744327545166, 0.024338623046875, 0.024219423294067382, 0.02419731140136719, 0.024197792053222655, 0.024208415985107423, 0.02398489570617676, 0.02426214408874512, 0.02439593505859375, 0.029141664505004883, 0.0245798397064209, 0.024278976440429687, 0.023932992935180666, 0.023779327392578126, 0.023760896682739258, 0.023688671112060546, 0.023702047348022462, 0.023781024932861328, 0.024179264068603514, 0.024269887924194336, 0.023875680923461914, 0.024315967559814453, 0.025227199554443358, 0.02407219123840332, 0.02388787269592285, 0.023983903884887695, 0.02376316833496094, 0.023833887100219726, 0.023655296325683594, 0.024433855056762696, 0.02421187210083008, 0.024281343460083007, 0.02387753677368164, 0.02396940803527832, 0.02403171157836914, 0.02412748718261719, 0.024018495559692384, 0.023897695541381835, 0.023745248794555664, 0.02385932731628418, 0.023982080459594726, 0.02383251190185547, 0.023860576629638672, 0.02376697540283203, 0.023794464111328125, 0.02408358383178711, 0.023755647659301757, 0.02370969581604004, 0.023746496200561525, 0.02397558403015137, 0.023734592437744142, 0.023801088333129883, 0.023612255096435546, 0.023769088745117187, 0.023783199310302733, 0.023822559356689452, 0.02369945526123047, 0.023804927825927736, 0.02390323257446289, 0.023830528259277343, 0.024078336715698243, 0.02427494430541992, 0.024465152740478516, 0.024133600234985352, 0.02419331169128418, 0.024133247375488283, 0.023854911804199217, 0.024132160186767577, 0.02409062385559082, 0.023963647842407225, 0.02398543930053711, 0.023833087921142578, 0.023816064834594728, 0.023959903717041015, 0.024268800735473633, 0.02434252738952637, 0.023990272521972656, 0.02474393653869629, 0.024788448333740234, 0.025345823287963868, 0.023915008544921876, 0.02402943992614746, 0.023736320495605468, 0.024101247787475587, 0.02415123176574707, 0.023908512115478515, 0.0239332160949707, 0.02439014434814453, 0.024059295654296875, 0.02396406364440918, 0.02409267234802246, 0.023959104537963866, 0.02407676887512207, 0.02444697570800781, 0.024457216262817383, 0.024471296310424804, 0.02433196830749512, 0.02461529541015625, 0.024701120376586914, 0.02474393653869629, 0.02467430305480957, 0.02476851272583008, 0.02483404731750488, 0.024754175186157225, 0.025024511337280272, 0.02529484748840332, 0.025063039779663086, 0.024928640365600586, 0.025069568634033205, 0.02500819206237793, 0.02537376022338867, 0.025100671768188476, 0.025313024520874024, 0.024981279373168946, 0.02520969581604004, 0.02517955207824707, 0.02539913558959961, 0.02533260726928711, 0.025253215789794923, 0.025323904037475586, 0.024975263595581054, 0.024750463485717772, 0.02454732894897461, 0.024473472595214842, 0.024625280380249023, 0.02474720001220703, 0.02513983917236328, 0.024418495178222657, 0.02450022315979004, 0.024475648880004884, 0.024505504608154295, 0.02434748840332031, 0.024389631271362306, 0.02454528045654297, 0.024576000213623047, 0.024922239303588868, 0.024964160919189453, 0.02500896072387695, 0.02505491256713867, 0.025141504287719725, 0.024959039688110352, 0.025020416259765626, 0.02531491279602051, 0.02492630386352539, 0.024925504684448242, 0.025177087783813477, 0.02477564811706543, 0.02483344078063965, 0.025048927307128908, 0.02518070411682129, 0.02455766487121582, 0.024368576049804688, 0.02424176025390625, 0.024263200759887697, 0.024195295333862304, 0.02423583984375, 0.024045984268188478, 0.024953983306884767, 0.024385568618774413, 0.024247135162353516, 0.023952768325805663, 0.024133567810058595, 0.023898399353027344, 0.023864927291870116, 0.023830720901489258, 0.02421004867553711, 0.0243056640625, 0.02458006477355957, 0.02430918312072754, 0.024126047134399413, 0.023942783355712892, 0.02401728057861328, 0.023918207168579102, 0.024228223800659178, 0.024457216262817383, 0.024575008392333984, 0.02447417640686035, 0.02434499168395996, 0.024176639556884767, 0.024477855682373047, 0.024137567520141602, 0.02430771255493164, 0.02450227165222168, 0.024414207458496092, 0.02431385612487793, 0.024051263809204103, 0.023853504180908203, 0.023877695083618165, 0.0239881591796875, 0.023875583648681642, 0.02429747200012207, 0.02405990409851074, 0.024715167999267578, 0.024952512741088867, 0.025047391891479493, 0.02641107177734375, 0.02596019172668457, 0.025278879165649415, 0.025071327209472655, 0.024942720413208008, 0.025622751235961912, 0.02522015953063965, 0.025100095748901367, 0.025090560913085938, 0.02513968086242676, 0.025103872299194335, 0.02492255973815918, 0.02497350311279297, 0.024950847625732422, 0.024426496505737305, 0.024598527908325195, 0.02446950340270996, 0.024453119277954103, 0.024653823852539062, 0.024147199630737304, 0.023906208038330077, 0.023894784927368164, 0.02430544090270996, 0.024787391662597656, 0.024456800460815428, 0.024253856658935546, 0.024267520904541016, 0.023799808502197265, 0.02422902488708496, 0.023878623962402343, 0.02392064094543457, 0.02385628890991211, 0.0237455997467041, 0.02408016014099121, 0.02391164779663086, 0.023886623382568358, 0.023833856582641602, 0.023806720733642577, 0.023939296722412108, 0.024112159729003907, 0.023823104858398437, 0.024163455963134767, 0.024048511505126952, 0.023975936889648438, 0.023705568313598633, 0.023744543075561522, 0.023930912017822267, 0.02431564712524414, 0.023808000564575195, 0.02374064064025879, 0.02375856018066406, 0.024064287185668946, 0.02405171203613281, 0.02392064094543457, 0.02366054344177246, 0.023817312240600585, 0.025684576034545898, 0.024918336868286133, 0.02418012809753418, 0.02387824058532715, 0.023916543960571288, 0.023958944320678712, 0.023962207794189453, 0.02392268753051758, 0.023783584594726563, 0.023916128158569337, 0.023718143463134767, 0.023785472869873047, 0.024845951080322264, 0.02388006401062012, 0.02388582420349121, 0.024517919540405272, 0.02480406379699707, 0.02479641532897949, 0.025011039733886718, 0.024786943435668944, 0.025667488098144533, 0.025204383850097656, 0.025483776092529296, 0.025309247970581053, 0.025024415969848633, 0.024950815200805665, 0.025304096221923828, 0.025101280212402342, 0.024861696243286133, 0.025439231872558594, 0.025151327133178712, 0.025288768768310547, 0.02510630416870117, 0.025045087814331055, 0.025071744918823243, 0.02513100814819336, 0.025001983642578125, 0.02514739227294922, 0.024705024719238283, 0.0247193603515625, 0.024672256469726563, 0.024550912857055664, 0.02439414405822754, 0.024170463562011718, 0.024294944763183595, 0.02442505645751953, 0.02466217613220215, 0.024520544052124022, 0.024389055252075194, 0.024384063720703127, 0.024412160873413087, 0.024461023330688475, 0.024291616439819336, 0.02464358329772949, 0.024188928604125977, 0.024066047668457033, 0.02425833511352539, 0.024256479263305663, 0.02464793586730957, 0.024954879760742187, 0.0250614070892334, 0.024946815490722658, 0.025097280502319335, 0.025332511901855467, 0.025025823593139648, 0.024993663787841798, 0.025142112731933595, 0.025098304748535156, 0.025112512588500977, 0.02511075210571289, 0.025210655212402344, 0.02513225555419922, 0.025114463806152343, 0.025275327682495116, 0.025341888427734376, 0.024936511993408204, 0.024995840072631836, 0.02484947204589844, 0.02495788764953613, 0.024887296676635744, 0.024721567153930663, 0.024592031478881837, 0.02412972831726074, 0.024053760528564453, 0.023625728607177734, 0.02425961685180664, 0.02414499282836914, 0.02406380844116211, 0.024084543228149412, 0.023959648132324218, 0.02415785598754883, 0.024137279510498048, 0.024338911056518555, 0.025013664245605468, 0.02505401611328125, 0.025040416717529296, 0.024904159545898436, 0.025288703918457032, 0.02508185577392578, 0.025165727615356445, 0.025038240432739257, 0.024918527603149415, 0.024838336944580076, 0.025200639724731445, 0.025167455673217775, 0.02545929527282715, 0.025161535263061523, 0.025118719100952147, 0.024815200805664062, 0.025102752685546875, 0.024629247665405272, 0.024631296157836914, 0.024503711700439454, 0.024629728317260742, 0.024549472808837892, 0.024528863906860352, 0.026712127685546875, 0.02407219123840332, 0.023938432693481445, 0.02379635238647461, 0.023767040252685546, 0.023838464736938476, 0.02363577651977539, 0.023809471130371095, 0.023722879409790038, 0.023742303848266602, 0.023670751571655272, 0.023816511154174803, 0.023824384689331055, 0.023773183822631837, 0.02369945526123047, 0.02376835250854492, 0.023695167541503907, 0.02384988784790039, 0.02382441520690918, 0.0238427848815918, 0.02369331169128418, 0.02371174430847168, 0.023835935592651368, 0.02372652816772461, 0.023783424377441405, 0.02481718444824219, 0.02490438461303711, 0.024610464096069335, 0.023886240005493165, 0.024236032485961914, 0.024024927139282226, 0.023784223556518554, 0.02379743957519531, 0.024052223205566405, 0.02401411247253418, 0.023816511154174803, 0.023737760543823243, 0.023862176895141602, 0.023841760635375978, 0.02417043113708496, 0.023983104705810547, 0.024118560791015625, 0.024085376739501955, 0.024223583221435547, 0.02433990478515625, 0.02413011169433594, 0.024984735488891602, 0.024973440170288085, 0.024629375457763673, 0.024160863876342774, 0.02391628837585449, 0.024225984573364258, 0.023947328567504884, 0.023790719985961915, 0.024066560745239256, 0.024691072463989258, 0.026410560607910156, 0.02406345558166504, 0.023828927993774413, 0.02374665641784668, 0.023818687438964845, 0.023946464538574217, 0.02393779182434082, 0.02378550338745117, 0.02388582420349121, 0.023897247314453127, 0.024122207641601563, 0.02381827163696289, 0.02383830451965332, 0.023925119400024415, 0.02389401626586914, 0.02433603286743164, 0.023869792938232423, 0.023816287994384764, 0.023977664947509764, 0.02420672035217285, 0.024679231643676757, 0.024752159118652344, 0.02524064064025879, 0.024650623321533203, 0.024747583389282228, 0.025327840805053712, 0.025088287353515624, 0.024979135513305665, 0.025161983489990235, 0.024998176574707032, 0.025007904052734373, 0.02526585578918457, 0.02508438491821289, 0.025120607376098635, 0.025341951370239257, 0.02515558433532715, 0.025243648529052733, 0.025208927154541014]",tokens/s,41.017288649702024,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 58.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 128057 has 14.69 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 193.68 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,7184.67072,7954.366464,0.0,7551.844352,7485.12768,s,1,13.348291015625,13.348291015625,0.0,13.348291015625,13.348291015625,13.348291015625,13.348291015625,[13.348291015625],,kWh,0.00016912642074999742,1.8641764245350682e-05,5.167670800799995e-05,0.00023944489300334803,,MB,2836.815872,8254.2592,0.0,7837.057024,7735.356416,s,10,3.5698594055175783,0.3569859405517578,0.0006797533766635876,0.3568690185546875,0.3578030975341797,0.3580457443237305,0.3582398617553711,"[0.35638201904296873, 0.3559376220703125, 0.3563666687011719, 0.35697579956054687, 0.35676223754882813, 0.35670449829101564, 0.35828839111328126, 0.3575758361816406, 0.35711715698242186, 0.35774917602539064]",tokens/s,717.115076308961,kWh,1.0443046111458898e-05,1.1516788124956802e-06,6.901791235713982e-06,1.8496516159668562e-05,tokens/kWh,13840444.210688984,MB,2848.227328,8275.23072,0.0,7858.028544,7759.281152,s,10,26.05070458984375,2.605070458984375,0.002942106391489705,2.6057374267578126,2.60719091796875,2.6091190185546878,2.6106614990234376,"[2.60067529296875, 2.601996337890625, 2.6046591796875, 2.601273193359375, 2.60668408203125, 2.606132080078125, 2.605736083984375, 2.606762451171875, 2.60573876953125, 2.611047119140625]",tokens/s,24.18360692806807,kWh,7.622703675854231e-05,8.406616894408237e-06,5.078447713548613e-05,0.00013541813078843666,tokens/kWh,465225.7392211735,,s,630,26.047346401214625,0.041344994287642226,0.00034456014773579045,0.04135203170776367,0.04173660049438477,0.041860217094421386,0.04218701431274414,"[0.041823486328125, 0.04088908767700195, 0.04122009658813477, 0.041095230102539064, 0.041069854736328126, 0.040913566589355466, 0.040971328735351566, 0.041081153869628906, 0.04114847946166992, 0.041546207427978515, 0.04120380783081055, 0.04098988723754883, 0.040919391632080075, 0.041054561614990236, 0.04170969772338867, 0.040897697448730466, 0.04093571090698242, 0.04090460968017578, 0.04087875366210938, 0.04076044845581055, 0.040833919525146485, 0.04087126541137695, 0.0408458251953125, 0.04084124755859375, 0.04072051239013672, 0.04093952178955078, 0.04095721435546875, 0.040873729705810546, 0.04097529602050781, 0.0411033935546875, 0.041522720336914065, 0.04186505508422852, 0.041966209411621096, 0.041801727294921875, 0.04155596923828125, 0.04124262237548828, 0.04124415969848633, 0.041263614654541016, 0.041365505218505856, 0.041323711395263675, 0.04127008056640625, 0.04136489486694336, 0.04125347137451172, 0.041292926788330075, 0.04117971038818359, 0.0411446418762207, 0.04115660858154297, 0.041062400817871096, 0.04117913436889648, 0.04117913436889648, 0.04117708969116211, 0.04133811187744141, 0.04143299102783203, 0.04171830368041992, 0.041707679748535155, 0.041621246337890626, 0.04225475311279297, 0.04212736129760742, 0.041859073638916014, 0.041700801849365234, 0.042146366119384764, 0.04152902221679688, 0.04152352142333984, 0.04172627258300781, 0.0421165771484375, 0.04046435165405273, 0.040634944915771486, 0.04074697494506836, 0.04096207809448242, 0.04084735870361328, 0.040787967681884765, 0.04096326446533203, 0.04099155044555664, 0.041947105407714846, 0.04091907119750977, 0.041000640869140625, 0.0411794548034668, 0.04095340728759766, 0.04090236663818359, 0.04091107177734375, 0.04101990509033203, 0.04108000183105469, 0.04117756652832031, 0.04130031967163086, 0.04134502410888672, 0.04110259246826172, 0.04129167938232422, 0.041167713165283205, 0.04111478424072266, 0.04108505630493164, 0.04124671936035156, 0.04122185516357422, 0.04135785675048828, 0.04118985748291016, 0.04124172973632813, 0.04154662322998047, 0.0414062728881836, 0.04130134582519531, 0.041188190460205075, 0.04138140869140625, 0.041587169647216794, 0.041111553192138675, 0.04107059097290039, 0.04113958358764649, 0.04106304168701172, 0.04109056091308594, 0.04140697479248047, 0.041646080017089845, 0.04165631866455078, 0.04153343963623047, 0.04135321426391601, 0.04139759826660156, 0.04146448135375977, 0.04141791915893555, 0.041767742156982424, 0.041388031005859374, 0.041395614624023434, 0.041640094757080075, 0.04157894515991211, 0.041606399536132814, 0.04148096084594727, 0.04136735916137695, 0.04152896118164062, 0.041640254974365236, 0.04232128143310547, 0.04220361709594726, 0.04155779266357422, 0.040981857299804685, 0.04134716796875, 0.041321182250976564, 0.040882240295410155, 0.041323936462402344, 0.04137948989868164, 0.04152764892578125, 0.041230625152587894, 0.041122112274169925, 0.04137984085083008, 0.04138131332397461, 0.04130464172363281, 0.04139212799072266, 0.04131840133666992, 0.041150463104248046, 0.040888065338134764, 0.040927326202392575, 0.04086387252807617, 0.04083100891113281, 0.04085113525390625, 0.04106067276000976, 0.04086579132080078, 0.04085964965820312, 0.04091904067993164, 0.04114944076538086, 0.04091392135620117, 0.04095129776000977, 0.041396736145019535, 0.04165836715698242, 0.04156415939331055, 0.04140419387817383, 0.04139440155029297, 0.04125900650024414, 0.04165427017211914, 0.04169113540649414, 0.0415797119140625, 0.04152812957763672, 0.04170751953125, 0.04156825637817383, 0.04137779235839844, 0.04119657516479492, 0.04127638244628906, 0.04118479919433594, 0.04116118240356445, 0.04122623825073242, 0.04128268814086914, 0.0413642578125, 0.04145142364501953, 0.041560256958007816, 0.04153343963623047, 0.041575519561767575, 0.0414381103515625, 0.041570175170898435, 0.04176012802124023, 0.041624256134033207, 0.04158575820922852, 0.04199318313598633, 0.041816062927246093, 0.04154163360595703, 0.041447425842285154, 0.041578495025634765, 0.04170547103881836, 0.04166451263427735, 0.040687614440917966, 0.040714241027832034, 0.040826305389404294, 0.040878654479980465, 0.04097782516479492, 0.04080495834350586, 0.04085964965820312, 0.04102348709106445, 0.04090265655517578, 0.0408616943359375, 0.040890369415283206, 0.0408616943359375, 0.04100912094116211, 0.041137630462646485, 0.04103414535522461, 0.040827041625976564, 0.041011104583740236, 0.04112188720703125, 0.04131132888793945, 0.04140329742431641, 0.04134902572631836, 0.04129596710205078, 0.041017345428466793, 0.04117708969116211, 0.04126924896240235, 0.04126012802124023, 0.04122412872314453, 0.04112617492675781, 0.04129193496704102, 0.04130665588378906, 0.04118272018432617, 0.04126358413696289, 0.041347103118896486, 0.04121923065185547, 0.041283584594726565, 0.04122505569458008, 0.0414780158996582, 0.04142623901367187, 0.041063232421875, 0.04122214508056641, 0.041605121612548826, 0.04153939056396484, 0.04181139373779297, 0.04184140777587891, 0.04182015991210938, 0.04140851211547852, 0.04122175979614258, 0.0414252815246582, 0.04157763290405273, 0.041338783264160156, 0.04149462509155273, 0.04154249572753906, 0.04140236663818359, 0.04139212799072266, 0.04144947052001953, 0.041562110900878906, 0.04136959838867187, 0.04144879913330078, 0.0419617919921875, 0.04203472137451172, 0.041951679229736326, 0.04189206314086914, 0.04187283325195312, 0.04129849624633789, 0.041199615478515625, 0.04113612747192383, 0.04108902359008789, 0.04123852920532227, 0.04118297576904297, 0.04102345657348633, 0.0409213752746582, 0.04103123092651367, 0.041226688385009765, 0.04111260986328125, 0.041140575408935544, 0.04149657440185547, 0.04127942276000977, 0.04242406463623047, 0.040639423370361326, 0.04079206466674805, 0.04080640029907227, 0.04093273544311524, 0.040992416381835935, 0.04087209701538086, 0.040843776702880856, 0.04109465789794922, 0.04136223983764648, 0.04142835235595703, 0.04129446411132812, 0.041250720977783206, 0.04164201736450195, 0.04169644927978516, 0.04157235336303711, 0.041286529541015624, 0.04131964874267578, 0.041401119232177735, 0.04133270263671875, 0.041514785766601565, 0.041576351165771484, 0.041580894470214846, 0.04145151901245117, 0.04116614532470703, 0.04104876708984375, 0.041156063079833986, 0.041294143676757815, 0.04138729476928711, 0.04157244873046875, 0.04184288024902344, 0.04151772689819336, 0.04139987182617187, 0.041277023315429685, 0.041304927825927734, 0.041479873657226565, 0.041553825378417966, 0.041553791046142576, 0.04175209426879883, 0.04157952117919922, 0.04155392074584961, 0.041555583953857424, 0.04174678421020508, 0.041823486328125, 0.04161516952514648, 0.04180886459350586, 0.04202700805664063, 0.04198329544067383, 0.04202550506591797, 0.041264446258544925, 0.04139596939086914, 0.04126201629638672, 0.04110259246826172, 0.041124191284179684, 0.04095795059204101, 0.041355262756347655, 0.041216064453125, 0.04129177474975586, 0.04125321578979492, 0.0412938232421875, 0.041207809448242184, 0.04118937683105469, 0.04117504119873047, 0.04134656143188477, 0.04125337600708008, 0.041132030487060545, 0.040828929901123044, 0.04086579132080078, 0.040871166229248045, 0.0409536018371582, 0.04110438537597656, 0.040986625671386716, 0.0409354248046875, 0.04102963256835938, 0.04087094497680664, 0.04112278366088867, 0.04127283096313476, 0.04146636962890625, 0.04165779113769531, 0.04165689468383789, 0.04154150390625, 0.04151919937133789, 0.041682975769042965, 0.04159897613525391, 0.04155327987670898, 0.04157299041748047, 0.0413757438659668, 0.04136675262451172, 0.04137859344482422, 0.041431041717529295, 0.04127948760986328, 0.041239871978759765, 0.04118115234375, 0.0413779182434082, 0.04141731262207031, 0.0413757438659668, 0.041768959045410156, 0.041562110900878906, 0.041506816864013675, 0.04155910491943359, 0.041397182464599606, 0.04158867263793945, 0.04170307159423828, 0.041619873046875, 0.04173619079589844, 0.041859073638916014, 0.04167007827758789, 0.041622081756591794, 0.041842369079589846, 0.04135712051391602, 0.04164220809936523, 0.041632766723632815, 0.04069622421264649, 0.04078243255615235, 0.040779518127441405, 0.04088857650756836, 0.04090265655517578, 0.04086579132080078, 0.040906593322753905, 0.04110147094726563, 0.041113536834716795, 0.04117919921875, 0.04131568145751953, 0.041357982635498045, 0.04147609710693359, 0.04148348617553711, 0.04142947387695312, 0.04156447982788086, 0.04159078216552734, 0.041190849304199216, 0.041085502624511716, 0.04108083343505859, 0.0410880012512207, 0.04138905715942383, 0.041545726776123046, 0.04098252868652344, 0.04110287857055664, 0.04093916702270508, 0.041113536834716795, 0.04114931106567383, 0.04103372955322265, 0.04099612808227539, 0.04100543975830078, 0.041101055145263674, 0.04118080139160156, 0.04130915069580078, 0.04172991943359375, 0.04165027236938477, 0.041570335388183596, 0.041783294677734374, 0.04165836715698242, 0.04146380615234375, 0.04160019302368164, 0.04166729736328125, 0.04146799850463867, 0.041703041076660154, 0.041834400177001956, 0.04147964859008789, 0.041460704803466794, 0.041543712615966795, 0.04122214508056641, 0.04122320175170899, 0.04122723388671875, 0.04136297607421875, 0.04172643280029297, 0.041603038787841794, 0.04150479888916016, 0.041545726776123046, 0.04176009750366211, 0.04170819091796875, 0.041735584259033204, 0.04184329605102539, 0.042076160430908206, 0.041885696411132815, 0.042001216888427735, 0.04093308639526367, 0.040959648132324215, 0.04100364685058594, 0.0408166389465332, 0.04087305450439453, 0.04083804702758789, 0.04083302307128906, 0.040808254241943356, 0.04080652618408203, 0.04080031967163086, 0.040954944610595706, 0.04097251129150391, 0.041454303741455076, 0.04136140823364258, 0.04109212875366211, 0.040825214385986325, 0.0408623046875, 0.04086374282836914, 0.040921089172363284, 0.040887710571289065, 0.04099046325683594, 0.041048702239990235, 0.041312065124511715, 0.041337249755859375, 0.041406112670898436, 0.04162595367431641, 0.04168294525146484, 0.04150271987915039, 0.041474048614501956, 0.0413941764831543, 0.04127942276000977, 0.04115564727783203, 0.04135628890991211, 0.041307456970214845, 0.04135084915161133, 0.041384033203125, 0.0414299201965332, 0.04141043090820312, 0.04165030288696289, 0.04170697784423828, 0.041466400146484374, 0.04157235336303711, 0.04160921478271484, 0.041686080932617185, 0.04143590545654297, 0.04148038482666016, 0.041256351470947264, 0.0416794548034668, 0.04164156723022461, 0.04154761505126953, 0.04157868957519531, 0.04152358245849609, 0.04160921478271484, 0.041596927642822266, 0.04145356750488281, 0.0416255989074707, 0.04352000045776367, 0.04163993453979492, 0.04200387191772461, 0.041957984924316405, 0.04182624053955078, 0.04204662322998047, 0.04184627151489258, 0.04130390548706055, 0.0412979850769043, 0.0412283821105957, 0.04112057495117188, 0.04111769485473633, 0.04101059341430664, 0.04135347366333008, 0.041226593017578125, 0.041053791046142575, 0.041023902893066407, 0.04108031845092774, 0.041081344604492184, 0.041393310546875, 0.041372512817382814, 0.041016544342041016, 0.04093622589111328, 0.040908798217773434, 0.040855552673339846, 0.0407993278503418, 0.040786304473876954, 0.04092764663696289, 0.04092121505737305, 0.04082796859741211, 0.040821697235107424, 0.040927230834960936, 0.0408350715637207, 0.04106854248046875, 0.041301280975341796, 0.0416734390258789, 0.04159897613525391, 0.04140236663818359, 0.04168860626220703, 0.04165660858154297, 0.04143737411499023, 0.04161740875244141, 0.041516288757324216, 0.04167248153686524, 0.041632640838623045, 0.04143065643310547, 0.04141718292236328, 0.04150476837158203, 0.04151830291748047, 0.041379966735839845, 0.04136207962036133, 0.04166652679443359, 0.04129315185546875, 0.04136582565307617, 0.04147439956665039, 0.04168092727661133, 0.041540767669677736, 0.04154249572753906, 0.04156825637817383, 0.04254105758666992, 0.041387489318847656, 0.04169977569580078, 0.04169452667236328, 0.04172012710571289, 0.04170800018310547, 0.0416357421875, 0.04159638214111328, 0.04162214279174805, 0.04170342254638672, 0.04156655883789063, 0.040716289520263675, 0.04076748657226562, 0.04299935913085937, 0.040642879486083985, 0.041001087188720704, 0.04105625534057617, 0.04104748916625976, 0.04121452713012695, 0.04146995162963867, 0.04148633575439453, 0.04160921478271484, 0.041202686309814454, 0.04190105438232422, 0.04184473419189453, 0.04160102462768555, 0.04134272003173828, 0.041385696411132815, 0.04111619186401367, 0.041281089782714844, 0.04117923355102539, 0.04112828826904297, 0.041129566192626955, 0.04098003387451172, 0.040976287841796875, 0.04124358367919922, 0.04116432189941406, 0.04115824127197266, 0.04115305709838867, 0.04118972778320312, 0.041377601623535154, 0.04148652648925781, 0.041447425842285154, 0.04147711944580078, 0.041229312896728515, 0.04174028778076172, 0.04163948822021484, 0.04176057434082031, 0.04182694244384766, 0.04155596923828125, 0.04135027313232422, 0.041436031341552736, 0.04148598480224609, 0.041460033416748046, 0.04116892623901367, 0.04120927810668945, 0.041284160614013674, 0.04132044982910156, 0.041416191101074216, 0.04168038558959961, 0.04162857437133789, 0.0418059196472168, 0.04165017700195312, 0.04172390365600586, 0.04150630569458008, 0.04176736068725586, 0.041607231140136716, 0.04157785415649414, 0.04186115264892578, 0.04197785568237305, 0.041852767944335935, 0.041804542541503904, 0.04200227355957031]",tokens/s,24.186724831617504,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4975.235072,7338.917888,0.0,6943.670272,6539.1744,s,1,11.480953125,11.480953125,0.0,11.480953125,11.480953125,11.480953125,11.480953125,[11.480953125],,kWh,0.00012999175551251484,1.4331755563570499e-05,4.0486421278004836e-05,0.00018480993235409017,,MB,5060.62848,7634.61632,0.0,7224.68864,6917.39904,s,10,2.0691580657958983,0.20691580657958983,0.0004209720328200674,0.20692723083496095,0.20733978881835938,0.20746485443115234,0.2075649069213867,"[0.20605894470214844, 0.20640179443359374, 0.20696316528320313, 0.2075899200439453, 0.20731199645996093, 0.20689129638671874, 0.2068658905029297, 0.2072939453125, 0.20697042846679686, 0.20681068420410156]",tokens/s,1237.2181914557118,kWh,6.0760903196431905e-06,6.700888447467249e-07,4.044135888367268e-06,1.0790315052757185e-05,tokens/kWh,23724979.182566673,MB,5063.942144,7636.713472,0.0,7226.785792,6917.4016,s,10,21.998330322265623,2.1998330322265622,0.014466196460380025,2.1997823486328123,2.2205612060546875,2.2218045288085935,2.2227991870117187,"[2.18112548828125, 2.211341064453125, 2.181974609375, 2.200316650390625, 2.182593994140625, 2.1949794921875, 2.199248046875, 2.203418212890625, 2.220284912109375, 2.2230478515625]",tokens/s,28.63853714217325,kWh,6.324594263535734e-05,6.9759540446782756e-06,4.201862318403299e-05,0.00011224051986406862,tokens/kWh,561294.6204837393,,s,630,21.99556670379637,0.034913597942533944,0.0006599943159116003,0.034829471588134764,0.03534640693664551,0.03560117568969727,0.037297059745788605,"[0.03521535873413086, 0.03477872085571289, 0.034703296661376955, 0.03445158386230469, 0.034527584075927736, 0.034576095581054685, 0.034578720092773435, 0.03460086441040039, 0.034791519165039066, 0.034371166229248046, 0.03463151931762695, 0.03455648040771484, 0.0345354232788086, 0.034764766693115234, 0.03461123275756836, 0.03454771041870117, 0.03483967971801758, 0.03497987365722656, 0.034666591644287106, 0.03485567855834961, 0.034756256103515626, 0.034814304351806644, 0.03549593734741211, 0.03465593719482422, 0.03440671920776367, 0.0343818244934082, 0.034409889221191405, 0.034385631561279294, 0.034474048614501956, 0.034466014862060544, 0.03435299301147461, 0.03466726303100586, 0.03453033447265625, 0.03442787170410156, 0.03449651336669922, 0.03444863891601563, 0.03444607925415039, 0.03432377624511719, 0.034152225494384764, 0.03429059219360352, 0.03423436737060547, 0.03440412902832031, 0.0343732795715332, 0.03443123245239258, 0.03444358444213867, 0.03440390396118164, 0.034601024627685543, 0.03455132675170899, 0.03453628921508789, 0.034699230194091796, 0.03454313659667969, 0.0345687026977539, 0.03464601516723633, 0.03489791870117188, 0.03759241485595703, 0.034898849487304685, 0.034562911987304684, 0.03452143859863281, 0.034504768371582034, 0.034347518920898434, 0.03448825454711914, 0.034359294891357424, 0.03432044982910156, 0.035456417083740234, 0.03482896041870117, 0.03462921524047852, 0.034581214904785156, 0.0345247688293457, 0.03528335952758789, 0.035276321411132815, 0.03515235137939453, 0.034928638458251955, 0.03512115097045899, 0.03500236892700195, 0.03514777755737305, 0.035471359252929685, 0.03536076736450195, 0.03499212646484375, 0.034920448303222655, 0.03488927841186523, 0.034955711364746095, 0.03481727981567383, 0.035074817657470704, 0.03494297790527344, 0.035004417419433595, 0.03753350448608399, 0.035110782623291016, 0.034832191467285153, 0.0350274543762207, 0.03488515090942383, 0.03483081436157227, 0.03486310577392578, 0.035043327331542966, 0.0350346565246582, 0.0348713264465332, 0.034871742248535155, 0.03515379333496094, 0.035268001556396485, 0.0349969596862793, 0.034981727600097656, 0.03513100814819336, 0.03512307357788086, 0.03504604721069336, 0.035168254852294925, 0.036519935607910156, 0.03518259048461914, 0.035092479705810545, 0.03504460906982422, 0.03512393569946289, 0.035084320068359376, 0.035141632080078124, 0.03500543975830078, 0.03513651275634765, 0.03497107315063477, 0.034954975128173825, 0.03515068817138672, 0.03570278549194336, 0.0351819839477539, 0.03493334579467773, 0.034933856964111325, 0.03511145782470703, 0.03499808120727539, 0.03495379257202148, 0.03491839981079101, 0.034936832427978515, 0.03486515045166016, 0.03579616165161133, 0.03493561553955078, 0.03487948989868164, 0.0348012809753418, 0.03468896102905274, 0.034781631469726564, 0.03470905685424805, 0.0347360954284668, 0.035027423858642576, 0.034756607055664065, 0.03456979370117187, 0.03468495941162109, 0.0343842887878418, 0.034364990234375, 0.03438572692871094, 0.034189151763916015, 0.03418396759033203, 0.03417059326171875, 0.03414163208007812, 0.03418812942504883, 0.034369598388671874, 0.03435843276977539, 0.034231071472167966, 0.03454489517211914, 0.03430476760864258, 0.03423846435546875, 0.03464191818237305, 0.03484649658203125, 0.03477731323242188, 0.03525212860107422, 0.03492572784423828, 0.03495558547973633, 0.03499264144897461, 0.03487091064453125, 0.03488614273071289, 0.034737438201904294, 0.034662593841552736, 0.034544158935546875, 0.03468902587890625, 0.0346951675415039, 0.034664447784423826, 0.03450182342529297, 0.03440313720703125, 0.034289600372314454, 0.034643199920654295, 0.034614078521728514, 0.03444326400756836, 0.03446169662475586, 0.0344268798828125, 0.03450454330444336, 0.03466233444213867, 0.034445537567138675, 0.03462883377075195, 0.03464707183837891, 0.034349822998046876, 0.03457356643676758, 0.03457129669189453, 0.034949569702148436, 0.03492496109008789, 0.034822017669677734, 0.03482998275756836, 0.03465660858154297, 0.03480303955078125, 0.03550419235229492, 0.03490553665161133, 0.0349639663696289, 0.03454972839355469, 0.03608992004394531, 0.03459081649780273, 0.03458038330078125, 0.03466153717041016, 0.03497046279907227, 0.03467468643188477, 0.03502489471435547, 0.03617792129516602, 0.034904064178466795, 0.03671817779541016, 0.034744384765625, 0.034566497802734374, 0.03504067230224609, 0.03584470367431641, 0.0352624626159668, 0.034907520294189455, 0.034780799865722654, 0.03473100662231445, 0.03508803176879883, 0.034533729553222654, 0.034604385375976564, 0.03453744125366211, 0.034622142791748044, 0.034492416381835936, 0.03444121551513672, 0.03470745468139649, 0.03444736099243164, 0.034442878723144534, 0.03438406372070312, 0.034350975036621094, 0.03442313766479492, 0.03454563140869141, 0.03432447814941406, 0.03498188781738281, 0.03460710525512695, 0.03444697570800781, 0.034786750793457034, 0.042973407745361326, 0.03458736038208008, 0.034492416381835936, 0.0343895378112793, 0.03483286285400391, 0.03556700897216797, 0.034654815673828124, 0.034633438110351564, 0.03458070373535156, 0.0347213134765625, 0.034800159454345704, 0.03468492889404297, 0.03473721694946289, 0.03479180908203125, 0.03454435348510742, 0.034534912109375, 0.0345211181640625, 0.03454790496826172, 0.03468505477905273, 0.03456972885131836, 0.03453593444824219, 0.03470876693725586, 0.03544473648071289, 0.03471142578125, 0.034687103271484374, 0.03440639877319336, 0.03443302536010742, 0.03487539291381836, 0.03506380844116211, 0.03456409454345703, 0.03483766555786133, 0.03451171112060547, 0.034393630981445315, 0.0345432014465332, 0.03451279830932617, 0.03457942581176758, 0.03454556655883789, 0.03456742477416992, 0.034581344604492185, 0.03465824127197266, 0.03459619140625, 0.03457904052734375, 0.03448025512695312, 0.03452467346191406, 0.03460966491699219, 0.034659713745117185, 0.034734718322753905, 0.034849918365478516, 0.03468991851806641, 0.03492240142822266, 0.03468092727661133, 0.034797569274902344, 0.03452630233764648, 0.03454249572753906, 0.034516990661621096, 0.034353153228759765, 0.034482177734375, 0.03464524841308594, 0.034627647399902345, 0.03476889419555664, 0.03471548843383789, 0.03447407913208008, 0.034448127746582034, 0.03452099227905273, 0.034627681732177736, 0.03467468643188477, 0.03459612655639648, 0.0345807991027832, 0.03457788848876953, 0.03462649536132813, 0.03467654418945312, 0.03466463851928711, 0.03471155166625976, 0.03466239929199219, 0.03460095977783203, 0.034514942169189454, 0.03473612976074219, 0.03481151962280273, 0.03454399871826172, 0.03488668823242187, 0.034982879638671874, 0.03446742248535156, 0.034447742462158204, 0.03462742233276367, 0.034619583129882815, 0.03534592056274414, 0.034777599334716795, 0.034492416381835936, 0.034471359252929684, 0.03468291091918945, 0.03468534469604492, 0.03465017700195312, 0.03460835266113281, 0.03465407943725586, 0.03477542495727539, 0.03451555252075195, 0.03464361572265625, 0.03447129440307617, 0.034514942169189454, 0.034581470489501956, 0.03466035079956055, 0.034510593414306644, 0.03468835067749024, 0.03451580810546875, 0.03452035140991211, 0.034558303833007814, 0.034558048248291014, 0.03428908920288086, 0.03477596664428711, 0.03466035079956055, 0.0344268798828125, 0.035315711975097655, 0.034523136138916014, 0.034476032257080076, 0.03460300827026367, 0.034530433654785156, 0.03437862396240234, 0.034377727508544925, 0.0348768310546875, 0.03615804672241211, 0.03508844757080078, 0.03473196792602539, 0.03461072158813477, 0.035152191162109374, 0.03448758316040039, 0.03436537551879883, 0.03442988967895508, 0.03431161499023438, 0.03433324813842773, 0.034485408782958984, 0.03441955184936524, 0.034354911804199216, 0.035159934997558595, 0.0342020149230957, 0.0343164176940918, 0.03439807891845703, 0.03435868835449219, 0.034855518341064456, 0.04340140914916992, 0.03504518508911133, 0.03523993682861328, 0.03497091293334961, 0.03571376037597656, 0.03504127883911133, 0.03502191925048828, 0.03481670379638672, 0.03511225509643555, 0.03501558303833008, 0.03563724899291992, 0.0348521614074707, 0.03474684906005859, 0.03461491012573242, 0.03471420669555664, 0.03482771301269531, 0.034885440826416016, 0.034751102447509764, 0.034516990661621096, 0.03445772933959961, 0.034402305603027344, 0.03466761779785156, 0.03486521530151367, 0.034681121826171876, 0.03464249420166016, 0.03463987350463867, 0.03508329772949219, 0.03465315246582031, 0.03468259048461914, 0.034936832427978515, 0.03454185485839844, 0.034582080841064455, 0.03433840179443359, 0.03450556945800781, 0.034408447265625, 0.034418689727783204, 0.03465331268310547, 0.034833278656005856, 0.03484262466430664, 0.03498393630981445, 0.03497267150878906, 0.03495219039916992, 0.035043327331542966, 0.03511270523071289, 0.03506982421875, 0.03540224075317383, 0.03508211135864258, 0.03506393432617187, 0.03507187271118164, 0.034938880920410156, 0.034904064178466795, 0.03587481689453125, 0.03533795166015625, 0.03521292877197266, 0.03518531036376953, 0.03523993682861328, 0.03520512008666992, 0.03498780822753906, 0.03486537551879883, 0.03490140914916992, 0.03491900634765625, 0.035158271789550784, 0.036060928344726566, 0.034985984802246094, 0.03489791870117188, 0.034864158630371095, 0.0349766731262207, 0.035098464965820315, 0.03474163055419922, 0.03474723052978516, 0.03498140716552734, 0.03487100982666016, 0.03484748840332031, 0.03605136108398437, 0.036176990509033204, 0.03821660614013672, 0.035208480834960934, 0.035055553436279294, 0.03508099365234375, 0.03508006286621094, 0.035299457550048825, 0.034958560943603514, 0.03519772720336914, 0.034988033294677735, 0.03514080047607422, 0.0348221435546875, 0.034947265625, 0.03466656112670898, 0.034934337615966794, 0.03475500869750976, 0.034900478363037106, 0.03463993453979492, 0.034547168731689455, 0.03449910354614258, 0.03466854476928711, 0.03477913665771484, 0.03516387176513672, 0.03607759857177734, 0.03535078430175781, 0.03493273544311523, 0.0346561279296875, 0.034861183166503905, 0.035628223419189455, 0.03563788986206055, 0.03515615844726563, 0.0347955207824707, 0.03473158264160156, 0.03474460983276367, 0.034739967346191405, 0.03447235107421875, 0.03468902587890625, 0.03438095855712891, 0.03455897521972656, 0.03439187240600586, 0.03482422256469726, 0.03461705780029297, 0.0346319694519043, 0.034678558349609374, 0.034656478881835935, 0.03483852767944336, 0.034709087371826174, 0.0346929931640625, 0.034544158935546875, 0.03430310440063476, 0.03460800170898438, 0.03525823974609375, 0.038235649108886716, 0.03474905776977539, 0.034457599639892575, 0.03462348937988281, 0.034770942687988284, 0.034523136138916014, 0.034799232482910156, 0.034449790954589846, 0.034245887756347654, 0.03438614273071289, 0.03605196762084961, 0.03536489486694336, 0.035118049621582034, 0.03517779159545899, 0.0351459846496582, 0.035400127410888674, 0.03498393630981445, 0.03503923034667969, 0.03498745727539063, 0.035326526641845706, 0.03511500930786133, 0.03508643341064453, 0.03542416000366211, 0.035221214294433596, 0.03507843017578125, 0.03522150421142578, 0.034977790832519534, 0.03553500747680664, 0.03514761734008789, 0.03560857772827149, 0.035098400115966794, 0.03524630355834961, 0.03494297790527344, 0.034854911804199216, 0.035067680358886716, 0.03512137603759766, 0.03496345520019531, 0.034961406707763674, 0.03523321533203125, 0.035111488342285155, 0.035194881439208986, 0.0349648323059082, 0.03514835357666016, 0.03498166275024414, 0.03512351989746094, 0.035149822235107424, 0.03530710220336914, 0.035155681610107424, 0.03498851013183594, 0.03592822265625, 0.03558931350708008, 0.035248897552490235, 0.03516761779785156, 0.03527552032470703, 0.035417823791503905, 0.035641502380371094, 0.03513763046264649, 0.035697792053222654, 0.03559212875366211, 0.03523433685302734, 0.035531200408935544, 0.035373054504394534, 0.035227649688720705, 0.03541846466064453, 0.03502851104736328, 0.035201152801513674, 0.035209217071533204, 0.03525632095336914, 0.035337345123291015, 0.03540057754516602, 0.035216480255126956, 0.03517737579345703, 0.035074047088623043, 0.03572947311401367, 0.035259166717529294, 0.035192001342773435, 0.03518288040161133, 0.03522361755371094, 0.03553308868408203, 0.035031070709228514, 0.03539148712158203, 0.03551216125488281, 0.035284385681152344, 0.035199745178222656, 0.03527715301513672, 0.03506857681274414, 0.035097599029541016, 0.035110912322998046, 0.03540079879760742, 0.03515075302124023, 0.0351723518371582, 0.03514486312866211, 0.03523478317260742, 0.03523366546630859, 0.03516976165771484, 0.03505411148071289, 0.035281982421875, 0.0351690559387207, 0.03494659042358399, 0.03512998580932617, 0.035074047088623043, 0.035227649688720705, 0.035225601196289064, 0.03506790542602539, 0.035084320068359376, 0.03491632080078125, 0.035416065216064455, 0.03510067367553711, 0.03506175994873047, 0.03547312164306641, 0.03528729629516601, 0.03527478408813477, 0.034909313201904296, 0.03487814331054687, 0.03505376052856445, 0.035210784912109376, 0.03772777557373047, 0.03585263824462891, 0.03502569580078125, 0.03530118560791016, 0.03526860809326172, 0.03512633514404297, 0.03526332855224609, 0.03515811157226562, 0.035296607971191406, 0.03535529708862305, 0.03523379135131836, 0.035229057312011716, 0.03527743911743164, 0.035532417297363283, 0.03547926330566406, 0.03544316864013672, 0.035246273040771485, 0.03509964752197266, 0.03539209747314453, 0.03591004943847656]",tokens/s,28.642135412281213,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,2773.303296,3346.92352,0.0,2944.401408,2910.225408,s,1,9.7837451171875,9.7837451171875,0.0,9.7837451171875,9.7837451171875,9.7837451171875,9.7837451171875,[9.7837451171875],,kWh,6.789464412499153e-05,7.4820595178542364e-06,2.0263071765988516e-05,9.563977540883428e-05,,MB,1698.578432,3565.027328,0.0,3147.825152,3105.082368,s,10,0.9459989776611328,0.09459989776611329,0.0004260734411019858,0.09449955368041993,0.09525063705444337,0.09526538276672364,0.09527717933654786,"[0.09482077026367187, 0.09428339385986328, 0.09528012847900391, 0.09397996520996094, 0.0942825927734375, 0.09460489654541016, 0.09439421081542969, 0.09490831756591797, 0.09419734191894531, 0.09524736022949219]",tokens/s,2706.134002733584,kWh,2.809178438702221e-06,3.098009320270349e-07,1.857314520038493e-06,4.976293890767749e-06,tokens/kWh,51443906.97561956,MB,1717.374976,3565.027328,0.0,3147.825152,3105.084928,s,10,10.359073974609375,1.0359073974609376,0.005298745729766857,1.035884521484375,1.042463806152344,1.0440049133300782,1.0452377990722657,"[1.0396986083984374, 1.0455460205078124, 1.042121337890625, 1.0286123046875, 1.03782373046875, 1.034143310546875, 1.0324678955078126, 1.03151513671875, 1.0295198974609374, 1.037625732421875]",tokens/s,60.81624685219571,kWh,2.971692540171271e-05,3.277337221440649e-06,1.929200795496185e-05,5.2286270578115205e-05,tokens/kWh,1204905.2132314271,,s,630,10.35682389640808,0.016439403010171556,0.000283050164533932,0.016405776023864747,0.016617640686035155,0.016737557315826414,0.01777828205108643,"[0.01652777671813965, 0.01639628791809082, 0.016434207916259765, 0.016701919555664062, 0.016426719665527344, 0.016495264053344727, 0.016437280654907228, 0.01639971160888672, 0.016593120574951173, 0.016421407699584962, 0.01652854347229004, 0.016421728134155274, 0.016471967697143555, 0.01668636894226074, 0.016466047286987303, 0.016513727188110353, 0.016586751937866212, 0.016467967987060548, 0.016430463790893555, 0.0164768009185791, 0.016549888610839843, 0.016560319900512696, 0.016662336349487303, 0.0164517765045166, 0.016404287338256836, 0.01648031997680664, 0.01650396728515625, 0.016376640319824217, 0.016379871368408204, 0.016465919494628906, 0.01641062355041504, 0.01637990379333496, 0.01651638412475586, 0.016446176528930663, 0.01656012725830078, 0.01641062355041504, 0.016508928298950197, 0.016394111633300783, 0.01643712043762207, 0.016519136428833008, 0.016443679809570313, 0.016459775924682618, 0.016459775924682618, 0.01642438316345215, 0.016399263381958008, 0.016463808059692383, 0.016661279678344725, 0.016683008193969725, 0.01644419288635254, 0.016587936401367187, 0.016452096939086915, 0.016435712814331056, 0.016459775924682618, 0.016486560821533203, 0.017518112182617188, 0.016550304412841797, 0.016412576675415038, 0.01636355209350586, 0.01640649604797363, 0.016643871307373048, 0.016529632568359376, 0.016447263717651366, 0.016429344177246095, 0.016781951904296873, 0.016484640121459962, 0.01645510482788086, 0.016443679809570313, 0.016404991149902345, 0.01934889602661133, 0.016878047943115235, 0.016443296432495116, 0.01647372817993164, 0.01653334426879883, 0.016485984802246095, 0.016462015151977538, 0.01644825553894043, 0.016498687744140626, 0.016415903091430664, 0.01641526412963867, 0.016453952789306642, 0.01634841537475586, 0.016334943771362305, 0.01641948890686035, 0.016354496002197266, 0.016425535202026366, 0.01642214393615723, 0.01641721534729004, 0.016408767700195313, 0.01646835136413574, 0.016439199447631836, 0.01640185546875, 0.01637238311767578, 0.01646988868713379, 0.01651433563232422, 0.016533920288085938, 0.016387584686279297, 0.017727615356445313, 0.016564863204956055, 0.01647577667236328, 0.016525888442993165, 0.016471263885498046, 0.01637660789489746, 0.01664614486694336, 0.016531455993652345, 0.01641628837585449, 0.01633228874206543, 0.01647654342651367, 0.01634160041809082, 0.016407743453979492, 0.016653120040893556, 0.01638118362426758, 0.01637571144104004, 0.01644220733642578, 0.01802047920227051, 0.018671743392944334, 0.01687321662902832, 0.016547840118408205, 0.016390144348144533, 0.016474111557006836, 0.01644051170349121, 0.01637868881225586, 0.016343040466308592, 0.01641913604736328, 0.016617088317871093, 0.016395423889160158, 0.016459680557250975, 0.017046464920043945, 0.016593055725097658, 0.01645961570739746, 0.016365568161010743, 0.01648192024230957, 0.016435583114624022, 0.016438592910766603, 0.01644745635986328, 0.01643382453918457, 0.016434431076049805, 0.01642144012451172, 0.016387807846069337, 0.016408607482910155, 0.01629644775390625, 0.016330751419067382, 0.016292095184326172, 0.016320127487182617, 0.016445087432861327, 0.016383615493774414, 0.016554399490356444, 0.016490943908691408, 0.01654764747619629, 0.016394176483154298, 0.016395872116088867, 0.016519775390625, 0.01645369529724121, 0.016534879684448243, 0.016509599685668945, 0.01647132873535156, 0.01674220848083496, 0.016751712799072265, 0.01654969596862793, 0.01649580764770508, 0.016544832229614257, 0.016799711227416993, 0.01800783920288086, 0.01714361572265625, 0.0166808967590332, 0.016664831161499024, 0.01665622329711914, 0.01666473579406738, 0.01639366340637207, 0.016484928131103516, 0.01659619140625, 0.01661756706237793, 0.016587455749511718, 0.016455680847167968, 0.01649807929992676, 0.016480192184448243, 0.016473983764648436, 0.016565023422241212, 0.016447519302368162, 0.01687855911254883, 0.01646460723876953, 0.016427295684814453, 0.01638400077819824, 0.016342048645019532, 0.016562175750732423, 0.016353759765625, 0.01662544059753418, 0.016435487747192383, 0.016410848617553712, 0.016416128158569337, 0.0167127685546875, 0.01647407913208008, 0.016412927627563478, 0.016357599258422853, 0.01636924743652344, 0.0165100154876709, 0.016351167678833007, 0.01646771240234375, 0.016514976501464843, 0.016444671630859376, 0.016482688903808593, 0.016457855224609377, 0.01648486328125, 0.01635750389099121, 0.016488384246826172, 0.016448831558227538, 0.016505151748657226, 0.01641913604736328, 0.01644339179992676, 0.016334848403930666, 0.016336896896362304, 0.01653727912902832, 0.016475711822509766, 0.016349952697753908, 0.01631398391723633, 0.016326047897338866, 0.016323007583618165, 0.016259168624877928, 0.01628313636779785, 0.016240671157836915, 0.01616169548034668, 0.016171039581298827, 0.016224191665649413, 0.01616489601135254, 0.016185184478759766, 0.016133951187133788, 0.016083295822143556, 0.016103391647338868, 0.0161177921295166, 0.01617411231994629, 0.016048799514770506, 0.01604649543762207, 0.016200799942016602, 0.01601728057861328, 0.016053247451782226, 0.01622819137573242, 0.016201824188232423, 0.01621401596069336, 0.016161983489990234, 0.016241056442260742, 0.016302495956420898, 0.016291839599609375, 0.01640243148803711, 0.016252799987792967, 0.01634111976623535, 0.016330976486206055, 0.01645065689086914, 0.01633523178100586, 0.016467456817626954, 0.01642588806152344, 0.016447391510009766, 0.016390144348144533, 0.016535072326660155, 0.01682431983947754, 0.016631807327270508, 0.01657347106933594, 0.016550176620483397, 0.0164685115814209, 0.016455263137817384, 0.016493152618408204, 0.016641759872436525, 0.016627519607543946, 0.016701887130737305, 0.016742399215698242, 0.01665763282775879, 0.016561216354370117, 0.01673187255859375, 0.016504735946655275, 0.01662371253967285, 0.0164617919921875, 0.01655401611328125, 0.01642483139038086, 0.016359712600708006, 0.016584096908569337, 0.016439519882202148, 0.016543968200683594, 0.016590143203735353, 0.016575328826904295, 0.01650217628479004, 0.016514528274536134, 0.016559135437011718, 0.01674847984313965, 0.01669868850708008, 0.01659075164794922, 0.016479007720947264, 0.016619775772094728, 0.01644927978515625, 0.016287551879882813, 0.016330047607421874, 0.01635152053833008, 0.016444000244140625, 0.016429119110107422, 0.016385984420776368, 0.016464223861694337, 0.0164737606048584, 0.016390144348144533, 0.016384031295776365, 0.016398303985595702, 0.0162541446685791, 0.016618303298950195, 0.01632204818725586, 0.01638457679748535, 0.016232383728027343, 0.0163636474609375, 0.016294944763183595, 0.016264032363891602, 0.016332799911499024, 0.01642291259765625, 0.016174240112304686, 0.016267776489257812, 0.01620403289794922, 0.016211776733398436, 0.016603456497192384, 0.016398303985595702, 0.016225536346435546, 0.016204544067382812, 0.01645891189575195, 0.016390783309936523, 0.01636582374572754, 0.016549856185913085, 0.01645756721496582, 0.016395872116088867, 0.016319040298461915, 0.01624608039855957, 0.016345792770385743, 0.016246400833129882, 0.016222015380859375, 0.01621583938598633, 0.01627235221862793, 0.0163305606842041, 0.01634873580932617, 0.017111488342285156, 0.016400640487670898, 0.01627519989013672, 0.016359424591064452, 0.016275583267211916, 0.016293760299682616, 0.016267488479614258, 0.016506752014160155, 0.016263071060180663, 0.01624678421020508, 0.01625644874572754, 0.016305824279785157, 0.01631119918823242, 0.016760831832885743, 0.016340991973876954, 0.01642905616760254, 0.016387231826782228, 0.016427871704101562, 0.016323680877685546, 0.01636627197265625, 0.016371936798095704, 0.016203840255737303, 0.016214208602905275, 0.01631545639038086, 0.016462528228759765, 0.016336160659790037, 0.0162576961517334, 0.016435583114624022, 0.016263872146606444, 0.01703014373779297, 0.0165317440032959, 0.019433183670043944, 0.01640652847290039, 0.01647135925292969, 0.016302751541137694, 0.01635740852355957, 0.016179168701171875, 0.016276832580566405, 0.016231103897094725, 0.0162794246673584, 0.01632217597961426, 0.016365631103515625, 0.016271808624267577, 0.01634646415710449, 0.016305919647216796, 0.016296159744262694, 0.0162412166595459, 0.016317760467529297, 0.016594144821166994, 0.01628620719909668, 0.016309568405151367, 0.016204416275024416, 0.01619798469543457, 0.01613212776184082, 0.016211423873901367, 0.016199264526367187, 0.01617616081237793, 0.016164735794067384, 0.016236799240112305, 0.016129823684692384, 0.01627337646484375, 0.016256832122802736, 0.01643142318725586, 0.016165792465209963, 0.01625187110900879, 0.016234495162963866, 0.016301664352416992, 0.016159200668334962, 0.016376895904541014, 0.016499584197998046, 0.016764320373535157, 0.01779897689819336, 0.01638652801513672, 0.016206144332885742, 0.017049503326416016, 0.016179231643676757, 0.016280927658081056, 0.016222944259643556, 0.016377824783325196, 0.016250911712646483, 0.016295520782470704, 0.016380031585693358, 0.016361759185791015, 0.016381952285766603, 0.01641881561279297, 0.016318464279174806, 0.016309823989868164, 0.01622060775756836, 0.016350400924682616, 0.017802175521850587, 0.01638572883605957, 0.016384191513061523, 0.01641267204284668, 0.016283487319946287, 0.01626723289489746, 0.01628169631958008, 0.016258207321166993, 0.016312448501586915, 0.016313087463378905, 0.016230304718017577, 0.01632681655883789, 0.016445663452148436, 0.016355104446411133, 0.016347135543823242, 0.016357376098632814, 0.016424224853515624, 0.016409311294555664, 0.01683977508544922, 0.016657312393188475, 0.016433151245117187, 0.016369472503662108, 0.016603071212768553, 0.016484128952026368, 0.016422719955444337, 0.016433631896972656, 0.016371488571166992, 0.016283872604370118, 0.016283647537231445, 0.017068031311035157, 0.016469600677490235, 0.01649705505371094, 0.01642905616760254, 0.01634943962097168, 0.01625267219543457, 0.016475200653076172, 0.016321535110473632, 0.016248767852783202, 0.0163319034576416, 0.016350080490112304, 0.016312320709228514, 0.01630112075805664, 0.016214975357055662, 0.016336511611938477, 0.016334720611572266, 0.016243200302124023, 0.01625200080871582, 0.016444320678710937, 0.016556032180786134, 0.016561248779296874, 0.016429759979248046, 0.016310400009155273, 0.0163286075592041, 0.016349376678466795, 0.01632480049133301, 0.01631622314453125, 0.01626713562011719, 0.016493919372558594, 0.01635980796813965, 0.016372127532958983, 0.016465951919555664, 0.016291807174682617, 0.016312320709228514, 0.016277311325073242, 0.016305536270141602, 0.01628553581237793, 0.016426176071166993, 0.016363296508789062, 0.016344768524169922, 0.016308544158935546, 0.01635327911376953, 0.016245983123779298, 0.016388864517211915, 0.016687135696411132, 0.01654374313354492, 0.016297119140625, 0.016310304641723634, 0.016423168182373046, 0.01640505599975586, 0.016312671661376954, 0.016316064834594725, 0.016160127639770507, 0.016199359893798827, 0.016217023849487304, 0.016275007247924803, 0.01665692710876465, 0.016465120315551758, 0.016401056289672852, 0.01637183952331543, 0.016339967727661133, 0.016325056076049806, 0.01639449691772461, 0.01633500862121582, 0.01629523277282715, 0.016243488311767577, 0.01635750389099121, 0.01628169631958008, 0.016287359237670898, 0.016299999237060547, 0.01633417510986328, 0.016277599334716796, 0.016423648834228515, 0.016467456817626954, 0.016449888229370116, 0.016507232666015625, 0.01644028854370117, 0.016360383987426758, 0.01639036750793457, 0.01636467170715332, 0.01635548782348633, 0.01631657600402832, 0.016291616439819336, 0.016341567993164063, 0.01631177520751953, 0.016343008041381835, 0.016464448928833007, 0.01618092727661133, 0.016276927947998048, 0.0164770565032959, 0.016315807342529298, 0.016275680541992188, 0.01625331115722656, 0.016199039459228515, 0.01629248046875, 0.01608412742614746, 0.016187744140625, 0.016046592712402344, 0.01617625617980957, 0.016150688171386717, 0.016355552673339845, 0.01612393569946289, 0.016187328338623047, 0.016304479598999024, 0.016426176071166993, 0.016321535110473632, 0.016472063064575194, 0.01625497627258301, 0.016315744400024413, 0.016425151824951172, 0.016300512313842774, 0.01636284828186035, 0.01638819122314453, 0.016310848236083985, 0.016519168853759765, 0.016262752532958984, 0.01641110420227051, 0.016277439117431642, 0.016859136581420898, 0.016552703857421875, 0.016502880096435548, 0.01653340721130371, 0.01642927932739258, 0.016473407745361327, 0.016501216888427733, 0.016385120391845705, 0.01632758331298828, 0.016526847839355468, 0.016382463455200197, 0.01636966323852539, 0.016459711074829103, 0.016485824584960937, 0.016378368377685547, 0.01636934471130371, 0.016405023574829102, 0.01633679962158203, 0.016328704833984374, 0.01640652847290039, 0.016287967681884764, 0.01633667182922363, 0.016393728256225586, 0.016452096939086915, 0.016542751312255858, 0.01643561553955078, 0.016406848907470704, 0.01741823959350586, 0.016357791900634765, 0.01700003242492676, 0.016517536163330078, 0.016481407165527345, 0.016444128036499025, 0.01645078468322754, 0.016390527725219727, 0.01647657585144043, 0.016708927154541017, 0.01672403144836426, 0.01646236801147461, 0.016517215728759766, 0.016500703811645506, 0.016533439636230468, 0.01671084785461426, 0.016579391479492188, 0.016514528274536134, 0.016673471450805662, 0.01685043144226074, 0.016417280197143554, 0.01635433578491211, 0.016360000610351564, 0.016267616271972655, 0.016285696029663087, 0.01633827209472656, 0.0163720645904541, 0.016414176940917968, 0.01646816062927246, 0.01641312026977539, 0.016474048614501954, 0.01634681510925293, 0.016437984466552733, 0.01621798324584961, 0.016356672286987305, 0.016296640396118164, 0.016261119842529297]",tokens/s,60.829459523637794,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4635.594752,6708.658176,0.0,6306.136064,6200.409088,s,1,11.3858076171875,11.3858076171875,0.0,11.3858076171875,11.3858076171875,11.3858076171875,11.3858076171875,[11.3858076171875],,kWh,0.00012370412398750736,1.3638305950050341e-05,4.024975442200318e-05,0.0001775921843595609,,MB,4709.605376,7736.262656,0.0,7316.963328,6846.674432,s,10,5.665507629394531,0.5665507629394531,0.0015295729153070254,0.5665235290527344,0.5685162658691406,0.5689885406494141,0.5693663604736329,"[0.5653997802734375, 0.5650226440429688, 0.5655225830078126, 0.5641676635742188, 0.5673649291992188, 0.566737548828125, 0.5663095092773438, 0.5694608154296875, 0.5684113159179688, 0.56711083984375]",tokens/s,451.8571269267862,kWh,1.6639113054860335e-05,1.8350050402987041e-06,9.444251382555632e-06,2.791836947771467e-05,tokens/kWh,9169589.94343661,MB,4713.836544,7740.45696,0.0,7321.157632,6846.676992,s,10,35.98861401367187,3.598861401367187,0.016066348470560352,3.5967351074218747,3.61548349609375,3.6256693603515626,3.6338180517578125,"[3.635855224609375, 3.57681640625, 3.57970703125, 3.5979619140625, 3.591634521484375, 3.59550830078125, 3.613219970703125, 3.592607177734375, 3.605935302734375, 3.5993681640625]",tokens/s,17.505536605568267,kWh,0.00010552680713847199,1.1640088657448921e-05,6.774536592464391e-05,0.00018491226172056484,tokens/kWh,340702.12225949706,,s,630,35.98674454116822,0.057121816732013046,0.0007197760080845435,0.056954814910888676,0.057808749008178714,0.05830224151611328,0.05954641883850098,"[0.057751262664794925, 0.057608863830566404, 0.058318241119384766, 0.05815097427368164, 0.05799951934814453, 0.057643070220947265, 0.05809574508666992, 0.058501087188720706, 0.05815500640869141, 0.05740339279174805, 0.05711052703857422, 0.05908835220336914, 0.05907510375976562, 0.059545600891113284, 0.05823836898803711, 0.0579139518737793, 0.058286048889160155, 0.0595467529296875, 0.05856774520874024, 0.05888748931884766, 0.058968734741210935, 0.061755264282226566, 0.0584376335144043, 0.05754470443725586, 0.058810462951660154, 0.05783951950073242, 0.057452289581298825, 0.05734016036987305, 0.058352672576904296, 0.05740227127075195, 0.056803390502929686, 0.05700403213500976, 0.05750281524658203, 0.056902561187744144, 0.05697244644165039, 0.056701793670654296, 0.056501918792724606, 0.05670345687866211, 0.05678067016601562, 0.05674518585205078, 0.056879905700683596, 0.056850433349609375, 0.057063423156738284, 0.05699379348754883, 0.0573704948425293, 0.05822387313842774, 0.05710678482055664, 0.05727900695800781, 0.056938495635986325, 0.056809566497802735, 0.05668268966674805, 0.05680303955078125, 0.05695283126831055, 0.056927711486816406, 0.05778076934814453, 0.056840190887451174, 0.05697945785522461, 0.05717375946044922, 0.05676614379882813, 0.058300991058349606, 0.0599552001953125, 0.05754025650024414, 0.057040287017822267, 0.057690113067626954, 0.05739110565185547, 0.057178112030029295, 0.05706982421875, 0.05714684677124023, 0.05682819366455078, 0.05680031967163086, 0.05739820861816406, 0.05706943893432617, 0.057063552856445314, 0.05751603317260742, 0.057401344299316405, 0.057388641357421874, 0.056600032806396486, 0.056462272644042966, 0.05650431823730469, 0.056567806243896485, 0.05655980682373047, 0.056663871765136715, 0.056248321533203124, 0.056268798828125, 0.05621964645385742, 0.056228992462158206, 0.056556415557861325, 0.05668441772460937, 0.05705516815185547, 0.05778041458129883, 0.056895774841308595, 0.056884960174560545, 0.05689657592773437, 0.057050048828125, 0.056946590423583986, 0.05725155258178711, 0.05684179306030274, 0.056883552551269534, 0.057141056060791014, 0.0568419189453125, 0.05680419158935547, 0.056553600311279296, 0.05662105560302735, 0.05676851272583008, 0.056569854736328126, 0.056436737060546874, 0.056948734283447267, 0.056616958618164064, 0.05644841766357422, 0.05632675170898437, 0.056418113708496094, 0.0563304328918457, 0.056134849548339846, 0.056404800415039064, 0.05635686492919922, 0.05625411224365234, 0.05669308853149414, 0.056389633178710936, 0.056311359405517576, 0.05634297561645508, 0.05662515258789062, 0.05690163040161133, 0.056808830261230465, 0.056807201385498045, 0.056709983825683596, 0.05706073760986328, 0.05685129547119141, 0.05630771255493164, 0.056481151580810546, 0.05662783813476562, 0.05641011047363281, 0.05615110397338867, 0.056249153137207034, 0.05664780807495117, 0.056444927215576174, 0.05629132843017578, 0.05603123092651367, 0.05814227294921875, 0.0566868782043457, 0.056442302703857423, 0.05745532989501953, 0.05656371307373047, 0.0566824951171875, 0.05648179244995117, 0.056207359313964846, 0.05635820770263672, 0.056388286590576174, 0.056339488983154294, 0.05593596649169922, 0.056784896850585936, 0.057512161254882815, 0.05699964904785156, 0.05695699310302734, 0.056662017822265626, 0.057015361785888674, 0.05723436737060547, 0.057475070953369144, 0.05722227096557617, 0.05720153427124024, 0.056944351196289066, 0.057452289581298825, 0.057883262634277344, 0.05736540985107422, 0.05717721557617188, 0.057270145416259764, 0.05811814498901367, 0.05724899291992187, 0.05692623901367187, 0.056830718994140626, 0.05675014495849609, 0.05679494476318359, 0.05697548675537109, 0.05681151962280274, 0.05676464080810547, 0.05667203140258789, 0.05709996795654297, 0.0569911994934082, 0.05801171112060547, 0.056908447265625, 0.056551551818847655, 0.05667567825317383, 0.056457374572753904, 0.05641801452636719, 0.05636995315551758, 0.05638089752197266, 0.056631839752197266, 0.05644902420043945, 0.05651251220703125, 0.05683609771728516, 0.05729788970947266, 0.056840190887451174, 0.057423038482666014, 0.05669561767578125, 0.056602401733398436, 0.0566253776550293, 0.05660611343383789, 0.05687900924682617, 0.05664435195922852, 0.05668627166748047, 0.05658012771606445, 0.058022113800048826, 0.05727577590942383, 0.05651289749145508, 0.05653452682495117, 0.05692895889282226, 0.057255680084228516, 0.06036419296264649, 0.056809921264648434, 0.05675465774536133, 0.05679718399047851, 0.056791038513183595, 0.056629249572753906, 0.05673564910888672, 0.05671926498413086, 0.057388641357421874, 0.056930912017822265, 0.05710153579711914, 0.058303264617919924, 0.056812992095947264, 0.05694521713256836, 0.05731327819824219, 0.05731737518310547, 0.05725593566894531, 0.057180160522460936, 0.05818777465820312, 0.05746419143676758, 0.05766310501098633, 0.05731430435180664, 0.0571690559387207, 0.057195232391357424, 0.057374847412109374, 0.05731891250610351, 0.05774576187133789, 0.057415840148925784, 0.057101982116699215, 0.05743190383911133, 0.05717046356201172, 0.05705519866943359, 0.05704908752441406, 0.05741318511962891, 0.05716851043701172, 0.05692396926879883, 0.056815616607666014, 0.05662054443359375, 0.05657855987548828, 0.0570777587890625, 0.05661187362670898, 0.05652169418334961, 0.05673574447631836, 0.05667020797729492, 0.057178112030029295, 0.05724476623535156, 0.057398975372314455, 0.05715420913696289, 0.05660675048828125, 0.056851966857910156, 0.05661705780029297, 0.05680364990234375, 0.057542304992675784, 0.05776019287109375, 0.05672470474243164, 0.0569516487121582, 0.057187393188476564, 0.0568895378112793, 0.05685647964477539, 0.056761119842529295, 0.05662246322631836, 0.056609153747558594, 0.05660492706298828, 0.056545280456542966, 0.056829982757568356, 0.05685606384277344, 0.05671984100341797, 0.05651897430419922, 0.05651424026489258, 0.05681497573852539, 0.05722175979614258, 0.05716377639770508, 0.058163265228271484, 0.056827392578125, 0.05672390365600586, 0.05693964767456055, 0.05699264144897461, 0.056712223052978517, 0.05662819290161133, 0.056750080108642575, 0.056858623504638675, 0.05676559829711914, 0.056939102172851565, 0.057087520599365234, 0.05741027069091797, 0.05702860641479492, 0.05835353469848633, 0.05812851333618164, 0.05764006423950195, 0.05727667236328125, 0.05730572891235351, 0.05675212860107422, 0.05671120071411133, 0.056950752258300784, 0.05722521591186523, 0.05672550582885742, 0.056799232482910154, 0.05674703979492188, 0.05750268936157227, 0.05717216110229492, 0.05692940902709961, 0.05701244735717773, 0.0568304328918457, 0.05683347320556641, 0.05698537445068359, 0.056838783264160156, 0.05753427124023437, 0.05706073760986328, 0.05718092727661133, 0.05744332885742188, 0.0571514892578125, 0.05682128143310547, 0.05743600082397461, 0.05735488128662109, 0.057073665618896485, 0.05745663833618164, 0.05797235107421875, 0.057661823272705075, 0.05711872100830078, 0.0574771842956543, 0.057733089447021484, 0.05725718307495117, 0.057325855255126956, 0.05706390380859375, 0.05727231979370117, 0.0569587516784668, 0.056866943359375, 0.05677065658569336, 0.056855777740478515, 0.05677340698242188, 0.056915969848632814, 0.05697945785522461, 0.056733695983886716, 0.05699174499511719, 0.05675968170166015, 0.05684492874145508, 0.05667225646972656, 0.056960479736328125, 0.05697334289550781, 0.056733951568603516, 0.057350399017333985, 0.05665776062011719, 0.05683420944213867, 0.0566693115234375, 0.05682470321655273, 0.05665792083740234, 0.05680844879150391, 0.05694105529785156, 0.05704959869384765, 0.05681955337524414, 0.05735993576049805, 0.0572248649597168, 0.05680348968505859, 0.05678934478759766, 0.05689129638671875, 0.05856719970703125, 0.05719574356079102, 0.056801761627197266, 0.05690006256103516, 0.05666390228271485, 0.05718179321289062, 0.05672963333129883, 0.056805183410644534, 0.05730783843994141, 0.056956798553466796, 0.05697241592407227, 0.057065441131591794, 0.05869644927978516, 0.056592544555664065, 0.056952030181884765, 0.056982303619384764, 0.05686886215209961, 0.057419742584228516, 0.05736041641235352, 0.0567562255859375, 0.05809135818481445, 0.05676867294311523, 0.05669478225708008, 0.056815616607666014, 0.05669171142578125, 0.05665689468383789, 0.056659969329833984, 0.05645091247558594, 0.05664316940307617, 0.05640454483032226, 0.057218177795410156, 0.05650057601928711, 0.057170398712158205, 0.056489566802978515, 0.05653334426879883, 0.05651196670532226, 0.05690435028076172, 0.056717311859130856, 0.056645633697509766, 0.05653504180908203, 0.05649347305297851, 0.0568240966796875, 0.05637295913696289, 0.05673020935058594, 0.05688217544555664, 0.05647075271606445, 0.056736766815185545, 0.05702121734619141, 0.05669839859008789, 0.05702444839477539, 0.056576126098632815, 0.05671772766113281, 0.05671321487426758, 0.05662515258789062, 0.05717718505859375, 0.057303966522216795, 0.0575467529296875, 0.05747097778320313, 0.05731068801879883, 0.05737350463867188, 0.0583100814819336, 0.05885577774047852, 0.05768310546875, 0.05803702545166016, 0.0575200309753418, 0.05856060791015625, 0.05768601608276367, 0.058193920135498046, 0.058248287200927736, 0.05803619384765625, 0.058094528198242186, 0.05788467025756836, 0.06585964965820312, 0.05807878494262695, 0.05855382537841797, 0.0590140495300293, 0.057431392669677735, 0.057157825469970704, 0.057731552124023436, 0.05738111877441406, 0.05765091323852539, 0.05705984115600586, 0.05720195388793945, 0.056974048614501956, 0.057128959655761716, 0.05718425750732422, 0.05703615951538086, 0.05682582473754883, 0.056674686431884766, 0.05683020782470703, 0.05714332962036133, 0.05692816162109375, 0.05762262344360351, 0.05736374282836914, 0.056789726257324216, 0.057245697021484375, 0.05730070495605469, 0.0568400650024414, 0.05724764633178711, 0.05684384155273438, 0.05678278350830078, 0.05667327880859375, 0.05655756759643555, 0.05709737777709961, 0.05704767990112305, 0.056589790344238285, 0.05670374298095703, 0.056667423248291014, 0.05657468795776367, 0.0566901741027832, 0.05660073471069336, 0.05646969604492187, 0.05653430557250977, 0.05670963287353516, 0.05678652954101562, 0.05692086410522461, 0.057722270965576174, 0.059808353424072265, 0.056995201110839847, 0.056560256958007815, 0.056551456451416016, 0.058367969512939454, 0.05659881591796875, 0.056624481201171875, 0.05726812744140625, 0.057008609771728516, 0.05657369613647461, 0.05897244644165039, 0.056925472259521485, 0.05644153594970703, 0.05666928100585938, 0.057207199096679685, 0.056775169372558595, 0.05717795181274414, 0.056922271728515626, 0.05708185577392578, 0.05772697448730469, 0.05678079986572265, 0.05669900894165039, 0.057139072418212894, 0.056957183837890626, 0.056841983795166015, 0.05671868896484375, 0.05793958282470703, 0.05679520034790039, 0.05836185455322265, 0.0571514892578125, 0.05712294387817383, 0.058203777313232424, 0.057054527282714845, 0.057055583953857424, 0.05678345489501953, 0.05687311935424805, 0.05672534561157227, 0.05679513549804688, 0.056952545166015625, 0.057372512817382815, 0.05835001754760742, 0.05699728012084961, 0.05678550338745117, 0.05704697418212891, 0.056655937194824216, 0.05671116638183594, 0.056729087829589846, 0.05671782302856445, 0.05674528121948242, 0.05680166244506836, 0.056774974822998044, 0.05680569458007813, 0.05689456176757812, 0.05681417465209961, 0.05680537414550781, 0.05697228622436523, 0.05780582427978516, 0.05755904006958008, 0.05757276916503906, 0.05726883316040039, 0.05776095962524414, 0.057584449768066405, 0.05746799850463867, 0.05750175857543945, 0.05741654586791992, 0.0577325439453125, 0.05786886215209961, 0.05756915283203125, 0.057770111083984374, 0.05745459365844727, 0.057505729675292966, 0.05762259292602539, 0.05747711944580078, 0.05722326278686524, 0.05738643264770508, 0.05711510467529297, 0.05721702575683594, 0.057178112030029295, 0.05718815994262695, 0.05715145492553711, 0.05735161590576172, 0.05737142562866211, 0.05731737518310547, 0.05730303955078125, 0.05756835174560547, 0.057205665588378904, 0.0567459831237793, 0.05673574447631836, 0.05696080017089844, 0.05767782211303711, 0.057634304046630856, 0.05732419204711914, 0.05730303955078125, 0.057835071563720704, 0.05690838241577149, 0.05708736038208008, 0.057173633575439455, 0.056980319976806644, 0.05713919830322266, 0.05747711944580078, 0.05736268615722656, 0.06150300979614258, 0.057567455291748046, 0.05754627227783203, 0.05702915191650391, 0.056715198516845707, 0.05691516876220703, 0.05675212860107422, 0.05672771072387695, 0.05678963088989258, 0.056831905364990234, 0.056974849700927734, 0.05689788818359375, 0.05661017608642578, 0.057654144287109375, 0.05836553573608398, 0.05694300842285156, 0.05650431823730469, 0.05691347122192383, 0.0566993293762207, 0.05647359848022461, 0.05685452651977539, 0.056446624755859376, 0.056414688110351566, 0.05731459045410156, 0.05668508911132813, 0.05675833511352539, 0.05670608139038086, 0.057615329742431644, 0.05732556915283203, 0.05744809722900391, 0.0570863037109375, 0.05681356811523437, 0.0573144645690918, 0.05723222351074219, 0.05691823959350586, 0.05702428817749024, 0.05710233688354492, 0.05713100814819336, 0.056693920135498045, 0.056944480895996095, 0.056736801147460936, 0.056719329833984374, 0.05707571029663086, 0.0570074577331543, 0.05707024002075195, 0.056995361328125, 0.057243167877197264, 0.057088958740234376, 0.056842239379882815, 0.05714495849609375, 0.057103809356689454]",tokens/s,17.506445999284285,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11136.073728,12227.3792,0.0,11848.9088,11814.752256,s,1,16.2592177734375,16.2592177734375,0.0,16.2592177734375,16.2592177734375,16.2592177734375,16.2592177734375,[16.2592177734375],,kWh,0.00026480793421250536,2.9202987075294415e-05,8.6119513340005e-05,0.0003801304346278048,,MB,2084.450304,14033.027072,0.0,13625.196544,13298.00192,s,10,23.3345458984375,2.33345458984375,0.0006364246180502187,2.3333216552734375,2.33433662109375,2.3344462646484376,2.3345339794921878,"[2.33269580078125, 2.333192138671875, 2.332452392578125, 2.334312255859375, 2.334555908203125, 2.333282958984375, 2.333640625, 2.333096923828125, 2.33395654296875, 2.3333603515625]",tokens/s,109.70858447994995,kWh,6.801452597916902e-05,7.501730836656543e-06,4.510703608559819e-05,0.00012062329290142372,tokens/kWh,2122309.8279136634,MB,2088.77568,14184.022016,0.0,13776.191488,13689.859584,s,10,1365.1401875000001,136.51401875,0.043220834864750195,136.508390625,136.56333750000002,136.5687234375,136.57303218750002,"[136.474234375, 136.480890625, 136.51365625, 136.574109375, 136.554078125, 136.443140625, 136.558046875, 136.562140625, 136.476765625, 136.503125]",tokens/s,0.4614910657298337,kWh,0.003982938044971668,0.0004393487290448445,0.0026491697860008046,0.0070714565600173175,tokens/kWh,8909.05564720682,,s,630,1365.1345581054693,2.1668802509610616,0.001184149768945829,2.1668848876953124,2.1684020751953126,2.1688128173828125,2.1695393676757813,"[2.16586181640625, 2.165533447265625, 2.16496337890625, 2.164727783203125, 2.166044677734375, 2.166136474609375, 2.165833251953125, 2.164509033203125, 2.164980224609375, 2.16477294921875, 2.1657529296875, 2.165496826171875, 2.164805419921875, 2.1644208984375, 2.1651064453125, 2.166121826171875, 2.16661279296875, 2.165902587890625, 2.1653994140625, 2.167724609375, 2.1663828125, 2.166310791015625, 2.165684326171875, 2.16618798828125, 2.166972412109375, 2.165771240234375, 2.16618017578125, 2.16656689453125, 2.16574853515625, 2.16681884765625, 2.166228515625, 2.166172119140625, 2.166724609375, 2.167048095703125, 2.165712158203125, 2.165650146484375, 2.16532275390625, 2.166369140625, 2.167191650390625, 2.167224365234375, 2.167033935546875, 2.1665341796875, 2.16774658203125, 2.167560302734375, 2.16664892578125, 2.16663671875, 2.16684326171875, 2.167447509765625, 2.167486328125, 2.16671044921875, 2.167170166015625, 2.16604541015625, 2.1677705078125, 2.166477783203125, 2.166822265625, 2.1664443359375, 2.16675537109375, 2.166044189453125, 2.166268310546875, 2.16554052734375, 2.1667373046875, 2.16703173828125, 2.166902099609375, 2.164823974609375, 2.16512109375, 2.166529296875, 2.166343994140625, 2.16569873046875, 2.166558837890625, 2.167146484375, 2.165773681640625, 2.166070068359375, 2.16474169921875, 2.165238037109375, 2.166134521484375, 2.165922119140625, 2.16620849609375, 2.16500732421875, 2.166781005859375, 2.165747802734375, 2.166453369140625, 2.165948974609375, 2.16657958984375, 2.166655029296875, 2.166045654296875, 2.165488525390625, 2.1654794921875, 2.166298583984375, 2.16550146484375, 2.1659345703125, 2.165116455078125, 2.16589892578125, 2.166954833984375, 2.16605224609375, 2.167164794921875, 2.166157958984375, 2.165796875, 2.16744970703125, 2.1675908203125, 2.166740966796875, 2.166281494140625, 2.16719140625, 2.1674482421875, 2.166792236328125, 2.167214111328125, 2.166931396484375, 2.16813525390625, 2.166753173828125, 2.1673251953125, 2.166135009765625, 2.166437744140625, 2.1670419921875, 2.165987060546875, 2.16810302734375, 2.1652685546875, 2.16776904296875, 2.166192138671875, 2.1670849609375, 2.16669775390625, 2.16688671875, 2.16671630859375, 2.1665361328125, 2.166640625, 2.166073486328125, 2.165718994140625, 2.165845947265625, 2.1654443359375, 2.16521337890625, 2.1656064453125, 2.164690185546875, 2.165152099609375, 2.165153564453125, 2.166433349609375, 2.16533056640625, 2.16590087890625, 2.165445068359375, 2.16591162109375, 2.166353515625, 2.166974853515625, 2.166444091796875, 2.16614501953125, 2.1657333984375, 2.166519775390625, 2.165841552734375, 2.165972412109375, 2.165611572265625, 2.165755615234375, 2.16555322265625, 2.16642724609375, 2.165973388671875, 2.166176025390625, 2.167818359375, 2.16762939453125, 2.167644287109375, 2.16604296875, 2.167754638671875, 2.167920166015625, 2.167478759765625, 2.16732421875, 2.167259765625, 2.168190185546875, 2.167671630859375, 2.167791015625, 2.167031982421875, 2.168864990234375, 2.1674599609375, 2.167080810546875, 2.16809423828125, 2.16811328125, 2.1680869140625, 2.167527587890625, 2.167656005859375, 2.167390869140625, 2.166988037109375, 2.168318603515625, 2.167972900390625, 2.1684541015625, 2.167140380859375, 2.168670166015625, 2.1695263671875, 2.16701123046875, 2.168231201171875, 2.167736572265625, 2.16655078125, 2.166257568359375, 2.166305908203125, 2.166731689453125, 2.167017578125, 2.16652099609375, 2.164148193359375, 2.165614501953125, 2.16821533203125, 2.167324951171875, 2.166708251953125, 2.1669375, 2.16767919921875, 2.1668359375, 2.1665556640625, 2.16671630859375, 2.1664638671875, 2.167177978515625, 2.167428955078125, 2.1676435546875, 2.16705712890625, 2.166687744140625, 2.16604052734375, 2.167203857421875, 2.166679443359375, 2.167146484375, 2.16696142578125, 2.167349365234375, 2.1678125, 2.167871826171875, 2.167183349609375, 2.16703173828125, 2.16832421875, 2.1674189453125, 2.167361572265625, 2.168323974609375, 2.167330810546875, 2.167510009765625, 2.167006103515625, 2.167711669921875, 2.1676904296875, 2.1678779296875, 2.1690947265625, 2.16873583984375, 2.168320068359375, 2.16975146484375, 2.1685224609375, 2.169452880859375, 2.169634521484375, 2.169194580078125, 2.168541259765625, 2.169198974609375, 2.168323974609375, 2.16812890625, 2.169175048828125, 2.168743408203125, 2.16881201171875, 2.166730712890625, 2.1686845703125, 2.16939892578125, 2.168643798828125, 2.169746826171875, 2.168140625, 2.168422119140625, 2.167958740234375, 2.16957373046875, 2.168743896484375, 2.16764599609375, 2.169199462890625, 2.1681357421875, 2.165767822265625, 2.166407470703125, 2.167010986328125, 2.1682080078125, 2.167797607421875, 2.1663662109375, 2.16692333984375, 2.166476806640625, 2.166065185546875, 2.16653759765625, 2.167151123046875, 2.167858154296875, 2.166207275390625, 2.16652197265625, 2.167060302734375, 2.16585009765625, 2.16644970703125, 2.16733544921875, 2.166739013671875, 2.16699853515625, 2.166995361328125, 2.1677744140625, 2.166822998046875, 2.166467529296875, 2.16658935546875, 2.166917236328125, 2.167504638671875, 2.166527587890625, 2.1661435546875, 2.167434326171875, 2.16802099609375, 2.1672333984375, 2.166693359375, 2.166917724609375, 2.166367431640625, 2.167483154296875, 2.167582763671875, 2.167276611328125, 2.167583251953125, 2.168227294921875, 2.16863818359375, 2.168342529296875, 2.167855224609375, 2.1689814453125, 2.169544677734375, 2.16923876953125, 2.1683740234375, 2.167326416015625, 2.168219482421875, 2.168541748046875, 2.16815771484375, 2.168617431640625, 2.168645751953125, 2.169489013671875, 2.167476806640625, 2.1682646484375, 2.168379150390625, 2.1695771484375, 2.1693896484375, 2.16714453125, 2.16802099609375, 2.168919677734375, 2.1644375, 2.16413525390625, 2.16406494140625, 2.164509765625, 2.165660400390625, 2.167010986328125, 2.16608935546875, 2.165963623046875, 2.16654248046875, 2.164913818359375, 2.164343017578125, 2.16390869140625, 2.163916748046875, 2.164716796875, 2.165203125, 2.164300048828125, 2.16482861328125, 2.16473583984375, 2.165071044921875, 2.16362890625, 2.1636845703125, 2.16490380859375, 2.165056396484375, 2.16562841796875, 2.163931640625, 2.164023193359375, 2.165086181640625, 2.165473388671875, 2.165923828125, 2.165321533203125, 2.166739013671875, 2.16607958984375, 2.1651669921875, 2.167163818359375, 2.16523974609375, 2.1653134765625, 2.16590478515625, 2.165904052734375, 2.166857177734375, 2.167341552734375, 2.16569873046875, 2.167005126953125, 2.165802001953125, 2.165296142578125, 2.16627001953125, 2.166312744140625, 2.166623779296875, 2.166095947265625, 2.16608740234375, 2.166962890625, 2.1675576171875, 2.166712890625, 2.166293701171875, 2.16719580078125, 2.167150634765625, 2.167144775390625, 2.16835498046875, 2.166561279296875, 2.16798828125, 2.1664521484375, 2.166530029296875, 2.166826904296875, 2.1669375, 2.166547607421875, 2.16634033203125, 2.16666650390625, 2.16660986328125, 2.167008056640625, 2.166205810546875, 2.166114990234375, 2.166304931640625, 2.166407958984375, 2.167910888671875, 2.16789453125, 2.16703564453125, 2.1695634765625, 2.167732177734375, 2.166179931640625, 2.16768017578125, 2.167562744140625, 2.167038330078125, 2.16815380859375, 2.166386962890625, 2.1661962890625, 2.1672734375, 2.166528076171875, 2.16612451171875, 2.16584814453125, 2.16689208984375, 2.167228759765625, 2.167357421875, 2.16840185546875, 2.168848388671875, 2.168018798828125, 2.1668857421875, 2.1680400390625, 2.167444580078125, 2.166825927734375, 2.166277587890625, 2.1680703125, 2.16814208984375, 2.16769873046875, 2.16819287109375, 2.168285888671875, 2.168908935546875, 2.16764697265625, 2.168127197265625, 2.168122802734375, 2.168683349609375, 2.16888330078125, 2.168537109375, 2.168404052734375, 2.168764404296875, 2.1684013671875, 2.166385009765625, 2.16930908203125, 2.168162353515625, 2.1688134765625, 2.16937255859375, 2.167416748046875, 2.16837744140625, 2.169112548828125, 2.1670537109375, 2.16781884765625, 2.16823193359375, 2.167018798828125, 2.166949951171875, 2.1668740234375, 2.16616357421875, 2.166748291015625, 2.167088134765625, 2.167986083984375, 2.16764599609375, 2.1668125, 2.167314697265625, 2.166613525390625, 2.16709912109375, 2.16738037109375, 2.167388427734375, 2.1675126953125, 2.16755224609375, 2.167644287109375, 2.166744384765625, 2.167925048828125, 2.168575927734375, 2.1672841796875, 2.16865771484375, 2.168525146484375, 2.1678818359375, 2.16807421875, 2.167703369140625, 2.16839794921875, 2.169016357421875, 2.169155517578125, 2.16738720703125, 2.1685380859375, 2.167985107421875, 2.1684755859375, 2.168439453125, 2.16862255859375, 2.16863818359375, 2.167289794921875, 2.167142333984375, 2.16776708984375, 2.16764013671875, 2.1680947265625, 2.168289306640625, 2.167946533203125, 2.1689287109375, 2.168463623046875, 2.16720703125, 2.16830029296875, 2.16787548828125, 2.1665458984375, 2.167499267578125, 2.166310791015625, 2.168385498046875, 2.166969970703125, 2.1669541015625, 2.1665302734375, 2.167193115234375, 2.166907470703125, 2.168440673828125, 2.166923095703125, 2.166546875, 2.16703564453125, 2.168654052734375, 2.167615478515625, 2.167334716796875, 2.165754150390625, 2.16547900390625, 2.165604248046875, 2.165964599609375, 2.164871337890625, 2.1648740234375, 2.165676025390625, 2.166201416015625, 2.16602734375, 2.165337646484375, 2.166315185546875, 2.164508544921875, 2.164201904296875, 2.1656923828125, 2.165769775390625, 2.165017333984375, 2.1645595703125, 2.16470947265625, 2.165456787109375, 2.165345703125, 2.165402099609375, 2.166044677734375, 2.16509814453125, 2.166001953125, 2.165135498046875, 2.16656884765625, 2.164768798828125, 2.165671875, 2.166693359375, 2.166614501953125, 2.168379150390625, 2.167519287109375, 2.166951904296875, 2.16780712890625, 2.166045654296875, 2.16529296875, 2.16745166015625, 2.166091064453125, 2.16620068359375, 2.16697900390625, 2.16651171875, 2.167469970703125, 2.167701416015625, 2.1664912109375, 2.165646728515625, 2.166743408203125, 2.16632763671875, 2.16810693359375, 2.168990966796875, 2.1682939453125, 2.168727783203125, 2.1678369140625, 2.16667333984375, 2.167680908203125, 2.167146728515625, 2.1657333984375, 2.1679345703125, 2.165698974609375, 2.166783203125, 2.1666943359375, 2.165668212890625, 2.16674609375, 2.16652001953125, 2.164810302734375, 2.1640908203125, 2.16560009765625, 2.16654833984375, 2.16613720703125, 2.166560546875, 2.165886962890625, 2.165761474609375, 2.16423388671875, 2.164981689453125, 2.16667431640625, 2.16605078125, 2.166181884765625, 2.16501806640625, 2.166455078125, 2.165868408203125, 2.166765625, 2.167066650390625, 2.1680078125, 2.1658408203125, 2.166343505859375, 2.1672490234375, 2.167269287109375, 2.1657314453125, 2.16593212890625, 2.166884033203125, 2.166148681640625, 2.167370361328125, 2.166601806640625, 2.1672099609375, 2.1672568359375, 2.167049560546875, 2.16719384765625, 2.16667822265625, 2.167221923828125, 2.16730859375, 2.167490478515625, 2.166222412109375, 2.167810302734375, 2.166908935546875, 2.167332275390625, 2.166823486328125, 2.167975830078125, 2.16768408203125, 2.168091064453125, 2.167548583984375, 2.16677783203125, 2.166906494140625, 2.16699951171875, 2.16765966796875, 2.167762939453125, 2.166803466796875, 2.1678857421875, 2.166867919921875, 2.16678515625, 2.167721923828125, 2.166592529296875, 2.1664208984375, 2.167431884765625, 2.166370361328125, 2.16732861328125, 2.166756591796875, 2.16758544921875]",tokens/s,0.46149296877687485,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3554.562048,4495.179776,0.0,4116.709376,3980.386816,s,1,9.8548896484375,9.8548896484375,0.0,9.8548896484375,9.8548896484375,9.8548896484375,9.8548896484375,[9.8548896484375],,kWh,9.09966390958336e-05,1.0026190653853335e-05,3.093585808200405e-05,0.000131958687831691,,MB,3485.7984,4826.529792,0.0,4418.699264,4245.89568,s,10,6.620557922363281,0.662055792236328,0.000751265648237151,0.6620900573730468,0.6628945556640625,0.6629605834960938,0.6630134057617187,"[0.6616687622070313, 0.6617026977539062, 0.663026611328125, 0.66051611328125, 0.6628349609375, 0.6612864379882812, 0.6621630859375, 0.6620170288085937, 0.6624623413085937, 0.6628798828125]",tokens/s,386.67436038172747,kWh,1.9296033496614424e-05,2.1279812942374758e-06,1.2802249825124906e-05,3.42262646159768e-05,tokens/kWh,7479635.971741402,MB,3489.87392,4837.015552,0.0,4429.185024,4245.89824,s,10,385.57617187499994,38.5576171875,0.010987278465840647,38.560576171875,38.567292968749996,38.567697265625,38.568020703125,"[38.53258984375, 38.54215234375, 38.55772265625, 38.5681015625, 38.5658203125, 38.5649296875, 38.5565, 38.560171875, 38.56098046875, 38.567203125]",tokens/s,1.6339183952587188,kWh,0.001124747326490886,0.00012406806433800434,0.0007475806918140751,0.0019963960826429655,tokens/kWh,31556.86416524936,,s,630,385.5724367675785,0.6120197409009176,0.0004554953714293266,0.6120278625488281,0.6126125122070313,0.6127261108398437,0.6130161370849609,"[0.6121142578125, 0.6112144165039063, 0.612133056640625, 0.6109717407226563, 0.6118790283203125, 0.6111687622070312, 0.6118563842773438, 0.6111764526367187, 0.611968994140625, 0.6111453247070312, 0.6115288696289063, 0.6115491333007812, 0.61184814453125, 0.6115392456054688, 0.6113744506835938, 0.6116513061523438, 0.6119515380859375, 0.6112620239257812, 0.6114207763671875, 0.6118275146484375, 0.61140771484375, 0.6112462158203125, 0.6116078491210938, 0.6112278442382812, 0.6117545166015625, 0.6113173828125, 0.6115408325195313, 0.6119054565429688, 0.61109716796875, 0.611557373046875, 0.6114898071289062, 0.6120910034179687, 0.6114508666992188, 0.6112526245117188, 0.6114199829101562, 0.6118121337890625, 0.6114620361328125, 0.61165380859375, 0.6115807495117187, 0.6114669799804687, 0.6115678100585937, 0.6115462036132813, 0.6115228881835938, 0.6111729736328125, 0.611970703125, 0.6116990356445312, 0.6120406494140626, 0.611641357421875, 0.6119807739257812, 0.6115968017578125, 0.6114283447265625, 0.6122384643554688, 0.6113780517578125, 0.612173828125, 0.6114588012695312, 0.6116984252929687, 0.6117297973632813, 0.61170703125, 0.6120755004882813, 0.61180126953125, 0.6120068359375, 0.612145263671875, 0.611707763671875, 0.611455078125, 0.6119686889648438, 0.6116396484375, 0.611403564453125, 0.6110066528320313, 0.61208984375, 0.6113250732421875, 0.6116536254882813, 0.6114210815429687, 0.611715087890625, 0.6115339965820312, 0.6111299438476563, 0.6121720581054687, 0.6109406127929687, 0.6123619384765625, 0.6113341064453125, 0.6119102783203125, 0.6113526000976562, 0.611614013671875, 0.6116974487304687, 0.611786376953125, 0.6113773803710938, 0.6118853149414063, 0.611208984375, 0.6120120239257812, 0.6114317626953125, 0.6120004272460937, 0.6119423828125, 0.6115693969726562, 0.6115078735351562, 0.6117171020507812, 0.6123374633789063, 0.6121275024414062, 0.6108787231445313, 0.6121347045898438, 0.6113515014648437, 0.61224267578125, 0.6114926147460937, 0.6115780029296874, 0.611550537109375, 0.6118218383789062, 0.61205126953125, 0.6117457885742188, 0.6117621459960938, 0.6116024169921875, 0.6124619140625, 0.6115656127929687, 0.6119385986328125, 0.6118463134765625, 0.612028564453125, 0.6117929077148437, 0.6125826416015625, 0.6117527465820313, 0.6122456665039062, 0.6116406860351562, 0.6126637573242187, 0.6123397216796875, 0.6117594604492187, 0.6127886352539063, 0.6115760498046875, 0.6121256713867187, 0.6121380004882813, 0.6117232666015625, 0.6116925659179687, 0.6115901489257812, 0.6112005615234375, 0.6118138427734375, 0.6111497192382812, 0.6115753173828125, 0.6116255493164062, 0.6120460205078125, 0.6120271606445312, 0.6112542724609376, 0.6120878295898438, 0.6115795288085938, 0.6120797729492188, 0.6120482788085938, 0.6121416015625, 0.6114710693359375, 0.6121723022460938, 0.6112501831054687, 0.6116390991210937, 0.6114646606445312, 0.6126207275390625, 0.6116801147460937, 0.6122869873046874, 0.61224755859375, 0.6117556762695312, 0.6119776000976562, 0.6119215698242187, 0.6119111938476562, 0.61240380859375, 0.6120157470703125, 0.612395751953125, 0.6117763061523438, 0.6126807861328125, 0.61121630859375, 0.6124871826171875, 0.6119192504882812, 0.6123485717773437, 0.612099365234375, 0.6127904663085938, 0.611822021484375, 0.6121922607421875, 0.6124646606445312, 0.6118911743164063, 0.61201416015625, 0.6117307739257812, 0.612303466796875, 0.61236962890625, 0.6120701904296875, 0.612619384765625, 0.6118327026367187, 0.612288330078125, 0.612212890625, 0.61239013671875, 0.6121643676757812, 0.6127042846679688, 0.61183349609375, 0.6126124877929687, 0.6126674194335937, 0.611536376953125, 0.6123505249023438, 0.612005859375, 0.6123560791015625, 0.6124705200195313, 0.6131610717773438, 0.6112392578125, 0.6117443237304687, 0.6113894653320312, 0.6120325317382812, 0.6118903198242187, 0.6113043212890625, 0.6120017700195313, 0.6121266479492188, 0.61219775390625, 0.6118837280273437, 0.6123126831054687, 0.6115352172851563, 0.6124722290039063, 0.611629638671875, 0.612042724609375, 0.6121021118164063, 0.6123438110351562, 0.6116590576171875, 0.6119451293945313, 0.6118370971679687, 0.6123292846679688, 0.6120560302734375, 0.6118502197265625, 0.612443115234375, 0.6120396728515625, 0.6123577880859375, 0.6119038696289063, 0.6128283081054687, 0.6119330444335938, 0.612441650390625, 0.6119649047851563, 0.6122930908203125, 0.6122843627929687, 0.6123295288085937, 0.6125045776367187, 0.6127236938476562, 0.6114489135742187, 0.61211572265625, 0.6123500366210938, 0.6122659301757812, 0.6124222412109375, 0.6124031982421875, 0.6122719116210937, 0.6120572509765625, 0.6123601684570312, 0.61216064453125, 0.6124381713867187, 0.6126107788085937, 0.6120919189453125, 0.6125908203125, 0.6119821166992188, 0.6124503173828125, 0.6126817016601562, 0.6120588989257812, 0.6132062377929688, 0.6120140991210937, 0.6124320068359375, 0.6122822265625, 0.6124273071289063, 0.612599365234375, 0.6120205078125, 0.61283935546875, 0.6122119140625, 0.6117545166015625, 0.611811279296875, 0.6120430908203125, 0.611078125, 0.61248876953125, 0.6120595703125, 0.612294677734375, 0.6119854125976563, 0.6123458862304687, 0.6114295043945313, 0.611914794921875, 0.612179443359375, 0.6117108154296875, 0.6124776611328125, 0.6116636962890625, 0.6123334350585937, 0.6115018310546875, 0.6118132934570313, 0.612077392578125, 0.6123218994140625, 0.6119627685546875, 0.6126206665039062, 0.6122882690429687, 0.6115143432617187, 0.6121533203125, 0.6119895629882812, 0.6120072021484375, 0.6114556884765625, 0.6126775512695313, 0.61162890625, 0.6123578491210937, 0.6120326538085937, 0.6123626708984375, 0.6125403442382813, 0.612054443359375, 0.6119348754882813, 0.6121572265625, 0.6124013671875, 0.612495361328125, 0.6122327880859375, 0.6121760864257813, 0.6119195556640625, 0.6124318237304688, 0.6122091674804687, 0.6119915771484375, 0.6124253540039063, 0.6124895629882813, 0.6127388916015625, 0.6125733642578125, 0.6124769287109375, 0.6121710815429687, 0.6117905883789062, 0.6126417236328126, 0.6126325073242187, 0.6121554565429688, 0.6125913696289063, 0.61184228515625, 0.6124534912109375, 0.6122402954101562, 0.6122333374023438, 0.612042236328125, 0.6128357543945312, 0.612595703125, 0.6127513427734375, 0.6109691162109375, 0.6123616943359375, 0.6117359619140625, 0.612026611328125, 0.6123339233398437, 0.610981689453125, 0.6121719970703124, 0.6120570678710937, 0.61194677734375, 0.6122832641601562, 0.611919921875, 0.6118634033203125, 0.6124083862304688, 0.6119985961914063, 0.61241357421875, 0.6116821899414062, 0.61247900390625, 0.611831787109375, 0.611758056640625, 0.6124605712890625, 0.61218408203125, 0.6115502319335937, 0.6121154174804687, 0.6121103515625, 0.6117601318359375, 0.6125892333984375, 0.61183349609375, 0.6121336669921875, 0.6117366943359375, 0.612065185546875, 0.61258837890625, 0.6118395385742188, 0.6126959838867188, 0.6116390380859374, 0.6129447021484375, 0.6121799926757813, 0.6119232788085938, 0.611804931640625, 0.6121580810546875, 0.6127963256835938, 0.6115818481445312, 0.6126958618164062, 0.6120201416015625, 0.6121695556640625, 0.61252197265625, 0.6119035034179687, 0.6121950073242187, 0.6117481079101562, 0.6126157836914062, 0.6124855346679687, 0.6115591430664062, 0.6131427612304687, 0.6119464721679687, 0.612766845703125, 0.6125670776367188, 0.61140869140625, 0.6127734985351563, 0.6117210083007812, 0.6126946411132812, 0.6123840942382812, 0.6119880981445313, 0.611838134765625, 0.6118623657226563, 0.6119669189453125, 0.6112620849609375, 0.6125039672851562, 0.6112794189453125, 0.6130339965820313, 0.6112164916992188, 0.6126127319335938, 0.6115816040039063, 0.6117484741210938, 0.61230078125, 0.61129931640625, 0.6126282348632812, 0.6112975463867187, 0.6116823120117187, 0.6121248168945312, 0.61120703125, 0.6125687255859374, 0.6120260620117187, 0.6120589599609375, 0.6121654052734375, 0.6110865478515625, 0.6126713256835937, 0.6112509155273438, 0.6119139404296875, 0.6123639526367187, 0.6120186157226563, 0.6126338500976563, 0.6111279907226562, 0.6127632446289063, 0.6112950439453125, 0.6119976196289062, 0.6120557250976563, 0.6124134521484375, 0.6123069458007813, 0.6116843872070312, 0.6122720336914063, 0.611460693359375, 0.612424072265625, 0.6122516479492187, 0.612010009765625, 0.6124620361328125, 0.6112745361328125, 0.6120944213867188, 0.6120062255859375, 0.6122250366210937, 0.6117908325195313, 0.6119178466796875, 0.6122025146484374, 0.6119874267578125, 0.611997802734375, 0.6123902587890625, 0.6122452392578125, 0.6121665649414062, 0.6117449951171875, 0.6119155883789062, 0.6119039306640625, 0.6129322509765625, 0.6121162719726563, 0.612421630859375, 0.6124832763671875, 0.6116205444335937, 0.6125343627929688, 0.6110094604492188, 0.6124031982421875, 0.6119382934570312, 0.611999755859375, 0.61218603515625, 0.610879150390625, 0.61291259765625, 0.6114223022460937, 0.6118695068359375, 0.6122926635742187, 0.611399658203125, 0.6122921752929688, 0.6110989379882813, 0.6120439453125, 0.6116995239257812, 0.6118563842773438, 0.6123151245117188, 0.6112803955078125, 0.612638427734375, 0.6121705322265625, 0.611858154296875, 0.6119235229492187, 0.6121417236328125, 0.6124308471679687, 0.6114147338867187, 0.6120963134765625, 0.6115348510742188, 0.6124412231445312, 0.6118445434570312, 0.6117661743164062, 0.612811279296875, 0.6112337646484375, 0.6126388549804688, 0.6118950805664063, 0.612406982421875, 0.6118363037109374, 0.6121062622070312, 0.6122528686523437, 0.6123179931640625, 0.6118911743164063, 0.6126052856445312, 0.6117557373046875, 0.612244140625, 0.612090087890625, 0.6121342163085938, 0.612966552734375, 0.6113693237304687, 0.6125150146484375, 0.611779541015625, 0.61260546875, 0.6122430419921875, 0.6119534301757813, 0.6121980590820313, 0.612149169921875, 0.6125381469726563, 0.6121417846679688, 0.612169677734375, 0.61263671875, 0.611600341796875, 0.6127117919921875, 0.6122625732421875, 0.6120914916992187, 0.6121656494140625, 0.6111639404296875, 0.6126246337890625, 0.6118850708007812, 0.6122250366210937, 0.61163134765625, 0.6121688842773437, 0.6111559448242188, 0.6125655517578125, 0.6118174438476562, 0.6122291259765625, 0.6118236083984375, 0.6115594482421876, 0.611704833984375, 0.6121345825195312, 0.6124312744140625, 0.6116069946289062, 0.6121016235351563, 0.6124471435546875, 0.611276123046875, 0.6121101684570313, 0.6114844970703125, 0.6124954833984375, 0.6115463256835938, 0.6123585205078125, 0.612188232421875, 0.6113446655273438, 0.6121123657226563, 0.6114365234375, 0.6124310302734375, 0.6119739379882813, 0.6115614624023438, 0.6124237060546875, 0.61134814453125, 0.6127559204101563, 0.6119259033203125, 0.6119415893554687, 0.61205712890625, 0.6115681762695313, 0.612296875, 0.6123171997070312, 0.61203662109375, 0.6125028686523437, 0.6117075805664063, 0.6124912719726563, 0.6118809814453126, 0.6126448364257813, 0.61213037109375, 0.6118486328125, 0.6125650024414062, 0.6119321899414063, 0.6125468139648438, 0.6120425415039062, 0.6125135498046875, 0.6115678100585937, 0.61297216796875, 0.6116644287109375, 0.612972412109375, 0.6127227172851563, 0.6117510986328125, 0.6131720581054687, 0.6115921630859374, 0.6129044799804687, 0.6119600830078125, 0.6125370483398438, 0.61171435546875, 0.6126210327148438, 0.6113599243164063, 0.6115885620117187, 0.6133087158203125, 0.6110249633789062, 0.6127280883789062, 0.6117313842773437, 0.6121046142578125, 0.61195849609375, 0.6116296997070313, 0.6124127197265625, 0.6114652099609375, 0.61193701171875, 0.612396728515625, 0.6118967895507812, 0.6119945068359375, 0.6115444946289063, 0.6119818725585937, 0.6122965698242188, 0.6124524536132813, 0.6119517211914063, 0.6124677734375, 0.611462158203125, 0.6120108642578125, 0.612780029296875, 0.6114529418945313, 0.6124046630859376, 0.611566162109375, 0.6125131225585938, 0.6121314697265625, 0.6122491455078125, 0.6122808227539063, 0.6119035034179687, 0.6121082763671875, 0.611983154296875, 0.6118319702148437, 0.6126550903320312, 0.612560791015625, 0.6118763427734375, 0.6122172241210937, 0.6117850341796875, 0.6125855712890625, 0.6121488647460938, 0.6121957397460938, 0.6129197387695312, 0.611925537109375, 0.612774658203125, 0.6122659912109375, 0.6121143798828125, 0.6127218627929687, 0.6118162841796875, 0.6126605224609375, 0.6120098266601562, 0.6125699462890625, 0.6128468627929687, 0.6122276000976562, 0.6131796875, 0.61227197265625, 0.612358154296875, 0.6123499755859375]",tokens/s,1.6339342233110976,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,5180.358656,5584.584704,0.0,5182.062592,5181.108736,s,1,11.4033701171875,11.4033701171875,0.0,11.4033701171875,11.4033701171875,11.4033701171875,11.4033701171875,[11.4033701171875],,kWh,0.00011793607067084699,1.2999488384072235e-05,3.546447281600057e-05,0.00016640003187091978,,MB,5158.52288,5739.773952,0.0,5322.571776,5283.621376,s,10,2.5200260314941407,0.2520026031494141,0.00023730096354918385,0.25199404907226564,0.2523175537109375,0.2523290344238281,0.2523382189941406,"[0.2520255432128906, 0.2518825225830078, 0.25204130554199217, 0.2519625549316406, 0.25176133728027345, 0.25231500244140626, 0.2515979766845703, 0.25181840515136716, 0.25234051513671873, 0.2522808685302734]",tokens/s,1015.862522055837,kWh,7.4315017853130404e-06,8.193028827532785e-07,4.914594209450007e-06,1.3165398877516325e-05,tokens/kWh,19444910.28199632,MB,5164.875776,5756.551168,0.0,5339.348992,5283.623936,s,10,28.434452392578123,2.8434452392578122,0.00992853494976894,2.84279150390625,2.860056396484375,2.8606650390625,2.861151953125,"[2.83762939453125, 2.835802978515625, 2.8265859375, 2.8394404296875, 2.845128173828125, 2.842534912109375, 2.843087646484375, 2.861273681640625, 2.859921142578125, 2.843048095703125]",tokens/s,22.15622060526971,kWh,8.189476586593759e-05,9.033323295490017e-06,5.2827674206550046e-05,0.00014375576336797768,tokens/kWh,438243.2990789819,,s,630,28.43129645919799,0.045129041998726985,0.0007832032918577279,0.04501094436645508,0.04575907897949219,0.04640671424865722,0.0487604891204834,"[0.04511142349243164, 0.04876540756225586, 0.044556289672851565, 0.044652542114257815, 0.044267520904541016, 0.04396169662475586, 0.04398342514038086, 0.044265567779541014, 0.04417257690429687, 0.044664638519287106, 0.04425001525878906, 0.04437811279296875, 0.0442749137878418, 0.04409833526611328, 0.04412416076660156, 0.04421836853027344, 0.04508176040649414, 0.04489625549316406, 0.044993217468261716, 0.044964256286621096, 0.04498191833496094, 0.044853343963623046, 0.04430352020263672, 0.044921695709228514, 0.04972748947143555, 0.044592479705810546, 0.04484777450561524, 0.04470988845825195, 0.04430438232421875, 0.04458438491821289, 0.0451610221862793, 0.04432691192626953, 0.046862335205078126, 0.04504780960083008, 0.04423680114746094, 0.04426137542724609, 0.04474265670776367, 0.04437744140625, 0.044423839569091794, 0.044420673370361326, 0.04445228958129883, 0.04468255996704101, 0.04495840072631836, 0.04483891296386719, 0.04461308670043945, 0.048876064300537106, 0.044912094116210936, 0.0494699821472168, 0.04523417663574219, 0.04506828689575195, 0.04490444946289063, 0.04526006317138672, 0.04503567886352539, 0.04506246566772461, 0.04615913772583008, 0.04510819244384766, 0.045551616668701174, 0.044980224609375, 0.04526851272583008, 0.04512201690673828, 0.04493628692626953, 0.0447210578918457, 0.044746753692626956, 0.04530176162719726, 0.04853145599365234, 0.046018558502197264, 0.04595503997802734, 0.044862590789794925, 0.044886943817138675, 0.044678558349609376, 0.04535766220092773, 0.04539187240600586, 0.04436377716064453, 0.04483283233642578, 0.04476102447509766, 0.04481024169921875, 0.04478739166259765, 0.04480646514892578, 0.04476089477539062, 0.04453753662109375, 0.044814273834228514, 0.04549075317382813, 0.04516556930541992, 0.045071361541748046, 0.04525161743164063, 0.04465353775024414, 0.04475235366821289, 0.044945953369140625, 0.04476518249511719, 0.045348705291748045, 0.04523212814331055, 0.045086879730224606, 0.044928863525390626, 0.047558815002441406, 0.04566758346557617, 0.04547660827636719, 0.04453116989135742, 0.04513232040405273, 0.04492697525024414, 0.04471398544311524, 0.044482398986816406, 0.044843006134033206, 0.0445786247253418, 0.044198238372802734, 0.0446376953125, 0.04495001602172852, 0.04490864181518555, 0.04480195236206055, 0.04454105758666992, 0.04440281677246094, 0.04456320190429688, 0.045888832092285156, 0.04425759887695312, 0.044519680023193356, 0.044917919158935546, 0.045771297454833985, 0.04486374282836914, 0.044626113891601565, 0.044604480743408205, 0.0444339828491211, 0.04473484802246094, 0.044761089324951174, 0.04465459060668946, 0.04500396728515625, 0.044730430603027345, 0.04467993545532226, 0.0457542724609375, 0.0451478385925293, 0.0454925422668457, 0.04494649505615234, 0.04456499099731445, 0.044507583618164065, 0.044799999237060545, 0.04490854263305664, 0.04548339080810547, 0.04475334548950195, 0.04480633544921875, 0.04490854263305664, 0.04484873580932617, 0.04451084899902344, 0.04442297744750977, 0.04507900619506836, 0.04515891265869141, 0.04505187225341797, 0.044940353393554684, 0.04486857604980469, 0.04583628845214844, 0.0448573112487793, 0.0443408317565918, 0.044407230377197265, 0.04524758529663086, 0.04523865509033203, 0.04502924728393555, 0.044894046783447265, 0.045316032409667965, 0.045115711212158204, 0.044698177337646486, 0.04444566345214844, 0.04507036972045898, 0.0449117431640625, 0.04480908966064453, 0.04437811279296875, 0.04441088104248047, 0.044199935913085936, 0.04417443084716797, 0.04414556884765625, 0.044520641326904295, 0.045034305572509765, 0.044797409057617185, 0.04477596664428711, 0.0449637451171875, 0.044642208099365234, 0.044765377044677736, 0.04444287872314453, 0.04498009490966797, 0.04566515350341797, 0.04498227310180664, 0.04505395126342773, 0.044859390258789066, 0.0447281265258789, 0.044365215301513675, 0.044506977081298825, 0.04533942413330078, 0.045252769470214844, 0.045350910186767575, 0.045080577850341794, 0.04458838272094726, 0.044300960540771483, 0.04482358551025391, 0.04580352020263672, 0.04477337646484375, 0.04431788635253906, 0.04547257614135742, 0.04592844772338867, 0.04501913452148437, 0.045195262908935545, 0.04874844741821289, 0.04474249649047852, 0.04447660827636719, 0.04474211120605469, 0.045185150146484374, 0.045218208312988284, 0.04509702301025391, 0.04497139358520508, 0.04468377685546875, 0.044687488555908206, 0.04446822357177734, 0.044322017669677735, 0.045093406677246095, 0.045053665161132815, 0.04486608123779297, 0.04482608032226563, 0.04453020858764648, 0.044128257751464846, 0.04404592132568359, 0.044275936126708985, 0.04636896133422851, 0.044971969604492186, 0.045117504119873045, 0.04463564682006836, 0.044466686248779294, 0.044396190643310546, 0.04438438415527344, 0.04472444915771484, 0.045149215698242186, 0.04523926544189453, 0.044843006134033206, 0.044783519744873046, 0.04463216018676758, 0.04445552062988281, 0.04451484680175781, 0.04462067031860351, 0.0449486083984375, 0.044948352813720706, 0.045017055511474606, 0.04465206527709961, 0.046141952514648435, 0.04458700942993164, 0.04519116973876953, 0.04484864044189453, 0.04518924713134766, 0.045695358276367185, 0.045503742218017576, 0.044444416046142576, 0.044576416015625, 0.044774753570556644, 0.045116447448730466, 0.04501641464233398, 0.04526144027709961, 0.044969249725341796, 0.04571609497070313, 0.05057689666748047, 0.04542879867553711, 0.04668937683105469, 0.04508348846435547, 0.04530387115478516, 0.04506009674072266, 0.04458700942993164, 0.04435103988647461, 0.04472051239013672, 0.04510521697998047, 0.04546355056762695, 0.04470783996582031, 0.044930049896240234, 0.044783615112304685, 0.04465151977539063, 0.04460508728027344, 0.04486563110351562, 0.045007102966308596, 0.04523212814331055, 0.044865535736083983, 0.04744540786743164, 0.04569497680664063, 0.045265472412109375, 0.04458425521850586, 0.044969791412353514, 0.04724124908447266, 0.046457534790039064, 0.0454453125, 0.04502460861206055, 0.04496166229248047, 0.044639007568359375, 0.044862590789794925, 0.04495232009887695, 0.04531008148193359, 0.04534815979003906, 0.04500051116943359, 0.044880416870117186, 0.044451423645019535, 0.044262142181396485, 0.04415283203125, 0.0444951057434082, 0.045340160369873046, 0.045187328338623045, 0.045023231506347655, 0.0446484489440918, 0.04437811279296875, 0.04467251205444336, 0.04466739273071289, 0.049608543395996095, 0.0457811508178711, 0.04580979156494141, 0.045117313385009766, 0.04599603271484375, 0.04462387084960937, 0.04468851089477539, 0.04475379180908203, 0.04506828689575195, 0.045090816497802735, 0.045279232025146485, 0.04514815902709961, 0.04476038360595703, 0.04489049530029297, 0.04462575912475586, 0.04478326416015625, 0.046629150390625, 0.04507881546020508, 0.04503718566894531, 0.04490646362304687, 0.04458118438720703, 0.045281375885009766, 0.04514815902709961, 0.04491657638549805, 0.045096736907958984, 0.04486297607421875, 0.04465702438354492, 0.04448716735839844, 0.04469760131835938, 0.045080257415771485, 0.04487200164794922, 0.04469548797607422, 0.04497375869750977, 0.044832672119140625, 0.04468988800048828, 0.04450835037231445, 0.04464672088623047, 0.0450334701538086, 0.04495001602172852, 0.04496588897705078, 0.04509286499023438, 0.04533772659301758, 0.04483567810058594, 0.044402721405029294, 0.04480220794677734, 0.044871040344238285, 0.045123455047607425, 0.04500515365600586, 0.04485555267333984, 0.044886016845703126, 0.04499792098999023, 0.04506492614746094, 0.045235294342041016, 0.04467292785644531, 0.04501164627075195, 0.04482284927368164, 0.0449536018371582, 0.044795745849609374, 0.04452675247192383, 0.044542976379394535, 0.04515382385253906, 0.045074817657470706, 0.04515030288696289, 0.04511539077758789, 0.04519935989379883, 0.04501094436645508, 0.045238273620605465, 0.04523417663574219, 0.045088768005371094, 0.045190654754638675, 0.045275646209716795, 0.04545497512817383, 0.04999724960327148, 0.04592326354980469, 0.045758464813232425, 0.04556390380859375, 0.04527228927612305, 0.045605281829833984, 0.045457790374755856, 0.04712572860717774, 0.046021087646484375, 0.04556185531616211, 0.045879295349121094, 0.04540729522705078, 0.045953983306884764, 0.04591820907592774, 0.045176830291748044, 0.0455035514831543, 0.04509286499023438, 0.04515264129638672, 0.045365825653076175, 0.04493926239013672, 0.04514003372192383, 0.04554940795898438, 0.045273185729980465, 0.04518672180175781, 0.04526671981811523, 0.044993087768554686, 0.04627395248413086, 0.046432865142822265, 0.04520345687866211, 0.04528271865844727, 0.04697727966308594, 0.04511164855957031, 0.045483104705810545, 0.04564406585693359, 0.045199329376220704, 0.04541872024536133, 0.044896705627441406, 0.0447957763671875, 0.04462195205688477, 0.04433715057373047, 0.04459628677368164, 0.04487263870239258, 0.044803199768066404, 0.04488796615600586, 0.044553184509277345, 0.04435353469848633, 0.04423680114746094, 0.04415667343139648, 0.04439648056030274, 0.04478598403930664, 0.04522393417358399, 0.045142017364501956, 0.04612473678588867, 0.044568702697753905, 0.044597438812255856, 0.04452761459350586, 0.045385726928710936, 0.044974079132080076, 0.04501504135131836, 0.045063488006591795, 0.044726974487304685, 0.04430847930908203, 0.04420956802368164, 0.04416969680786133, 0.044603519439697266, 0.044771072387695315, 0.0449169921875, 0.04492281723022461, 0.045217857360839844, 0.044539905548095705, 0.04566134262084961, 0.04495167922973633, 0.04460918426513672, 0.04502617645263672, 0.04463411331176758, 0.044371967315673826, 0.044267520904541016, 0.04431257629394531, 0.04558396911621094, 0.04442323303222656, 0.047282176971435545, 0.0449969596862793, 0.04481833648681641, 0.0448328628540039, 0.04500275039672851, 0.04530153656005859, 0.0451995849609375, 0.045158401489257816, 0.04637475204467773, 0.04511350250244141, 0.0454389762878418, 0.044969791412353514, 0.04503571319580078, 0.045125633239746096, 0.04528643035888672, 0.04550857543945312, 0.045489055633544925, 0.04565001678466797, 0.04521564865112305, 0.04527299118041992, 0.04515190505981445, 0.045240863800048825, 0.04516659164428711, 0.045402015686035156, 0.04528889465332031, 0.048081024169921875, 0.04557263946533203, 0.04511673736572266, 0.04535532760620117, 0.04586739349365234, 0.04691763305664062, 0.045281280517578126, 0.04632153701782227, 0.04671062469482422, 0.045299999237060545, 0.047202144622802734, 0.04509014511108399, 0.04582073593139648, 0.04575436782836914, 0.046952449798583984, 0.045100639343261716, 0.0449192008972168, 0.044701694488525394, 0.0444026870727539, 0.044214080810546875, 0.04529097747802734, 0.04500143814086914, 0.045258209228515624, 0.04543952178955078, 0.04529148864746094, 0.04507036972045898, 0.04520140838623047, 0.048330753326416016, 0.047104320526123046, 0.045239936828613284, 0.04628313446044922, 0.048363521575927736, 0.0451932144165039, 0.04600012969970703, 0.04501094436645508, 0.04552908706665039, 0.04534611129760742, 0.04507686233520508, 0.04508089447021484, 0.04514406585693359, 0.0452567024230957, 0.045107200622558595, 0.04500275039672851, 0.045764606475830076, 0.04653875350952148, 0.04524979019165039, 0.04511616134643555, 0.04517068862915039, 0.04538163375854492, 0.04524236679077148, 0.04501628875732422, 0.04501174545288086, 0.04534476852416992, 0.045238273620605465, 0.04549836730957031, 0.04582783889770508, 0.04542851257324219, 0.04516707229614258, 0.04504304122924805, 0.04544169616699219, 0.045142017364501956, 0.04522598266601562, 0.04520959854125976, 0.045049854278564457, 0.04514406585693359, 0.045174976348876954, 0.04493894577026367, 0.045246593475341795, 0.04531315231323242, 0.045138622283935545, 0.04498041534423828, 0.04532428741455078, 0.04511948776245117, 0.045192798614501956, 0.045217952728271484, 0.0452402229309082, 0.04529391860961914, 0.04562496185302734, 0.04542892837524414, 0.04530531311035156, 0.045378273010253906, 0.04525801467895508, 0.045144798278808594, 0.04526185607910156, 0.04512246322631836, 0.04514003372192383, 0.044969406127929684, 0.04528745651245117, 0.04681372833251953, 0.045284385681152346, 0.045429054260253905, 0.04580966567993164, 0.04506828689575195, 0.04575414276123047, 0.04485894393920899, 0.04458089447021484, 0.045152542114257815, 0.04488131332397461, 0.04488636779785156, 0.04474303817749024, 0.04470937728881836, 0.044735198974609376, 0.044834815979003906, 0.044566463470458985, 0.044507198333740235, 0.04528287887573242, 0.045252384185791014, 0.044867294311523434, 0.044548545837402344, 0.044964351654052735, 0.044644351959228515, 0.04492287826538086, 0.044380126953125, 0.0450662727355957, 0.04528857421875, 0.04559740829467773, 0.04514831924438477, 0.04530176162719726, 0.045395969390869144, 0.045080257415771485, 0.045357376098632815, 0.04534220886230469, 0.04555417633056641, 0.045254657745361325, 0.045176830291748044, 0.04502937698364258, 0.04580556869506836, 0.04537548828125, 0.04529340744018555, 0.048365726470947265, 0.04534457778930664, 0.04582419204711914, 0.045006366729736326, 0.04493155288696289, 0.044834686279296876, 0.04494348907470703, 0.04480195236206055, 0.04480790328979492, 0.045419937133789064, 0.04494409561157227, 0.04646937561035156, 0.04489011383056641, 0.044844062805175784, 0.044781600952148434, 0.044776382446289065, 0.04470742416381836, 0.045623233795166016, 0.045260478973388675, 0.044772129058837894, 0.04487699127197266, 0.04464060974121094, 0.04496022415161133, 0.045197311401367186, 0.04469964981079102]",tokens/s,22.158679992103718,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,873.742336,655.294464,0.0,260.046848,258.555392,s,1,7.44559619140625,7.44559619140625,0.0,7.44559619140625,7.44559619140625,7.44559619140625,7.44559619140625,[7.44559619140625],,kWh,1.4648123000000623e-05,1.6085814022668542e-06,4.526670288003409e-06,2.0783374690270886e-05,,MB,1328.021504,751.763456,0.0,341.835776,317.950464,s,14,0.19269519901275634,0.013763942786625455,0.0004346000602619136,0.013681695938110352,0.013778102111816407,0.01431407971382141,0.01510181740760803,"[0.0137838716506958, 0.013700896263122559, 0.013743647575378418, 0.013534432411193847, 0.013509984016418458, 0.013537280082702637, 0.015298751831054688, 0.013539520263671875, 0.013588768005371094, 0.013641023635864257, 0.01368899154663086, 0.013764639854431153, 0.013680255889892579, 0.013683135986328126]",tokens/s,18599.321718247586,kWh,4.0062819423870005e-07,4.4182161689928466e-08,2.0989309582167692e-07,6.547034517503055e-07,tokens/kWh,391016725.6879451,MB,1367.61344,779.026432,0.0,369.098752,317.953024,s,14,9.901269592285155,0.7072335423060825,0.004373440602804263,0.7081146545410156,0.712881365966797,0.7140843505859376,0.7146861083984375,"[0.7070420532226562, 0.7039707641601562, 0.7092022094726562, 0.709187255859375, 0.7008070678710937, 0.704184814453125, 0.70412060546875, 0.7023123779296875, 0.7011494750976562, 0.7095607299804687, 0.7148365478515625, 0.7101968994140625, 0.7110194702148438, 0.7136793212890625]",tokens/s,89.07948539116988,kWh,2.068407336409532e-05,2.281084263019303e-06,7.4988829124638515e-06,3.0464040539578474e-05,tokens/kWh,2068011.9539018879,,s,882,9.894378210067753,0.011218115884430556,0.00020622448943279577,0.01117680025100708,0.011354713726043702,0.011455970811843873,0.012109231624603268,"[0.011076095581054688, 0.011262368202209473, 0.011347519874572753, 0.011610560417175293, 0.011513855934143067, 0.01123641586303711, 0.011499584197998047, 0.011205504417419434, 0.011307007789611816, 0.011177984237670899, 0.011100255966186523, 0.011169280052185059, 0.011192319869995117, 0.011210271835327149, 0.011164192199707032, 0.011237215995788575, 0.01117849636077881, 0.011163647651672364, 0.011311167716979981, 0.011204544067382813, 0.011051008224487305, 0.011173343658447266, 0.011166239738464355, 0.011132927894592285, 0.011118240356445312, 0.011170144081115723, 0.011259903907775879, 0.011276288032531738, 0.011118656158447265, 0.011115455627441406, 0.011142304420471191, 0.011261792182922363, 0.011211968421936036, 0.011199007987976075, 0.011227423667907716, 0.011177984237670899, 0.011155360221862793, 0.01117807960510254, 0.011374784469604491, 0.011120608329772949, 0.01115664005279541, 0.011430624008178712, 0.011270272254943847, 0.011229023933410645, 0.011132415771484374, 0.0111211519241333, 0.011161151885986329, 0.011190303802490234, 0.011147744178771972, 0.01112451171875, 0.011155872344970704, 0.01120025634765625, 0.011302111625671386, 0.011239263534545898, 0.01121615982055664, 0.011128479957580567, 0.011225088119506836, 0.011241472244262696, 0.01126195240020752, 0.011247008323669434, 0.011185791969299317, 0.011212032318115234, 0.011197919845581055, 0.011031968116760254, 0.011237471580505372, 0.011267775535583497, 0.01126255989074707, 0.011224960327148438, 0.011163999557495117, 0.01113702392578125, 0.011163359642028808, 0.011116671562194824, 0.01107158374786377, 0.011173888206481934, 0.011159968376159669, 0.011200160026550292, 0.011179871559143066, 0.011102368354797363, 0.011190272331237794, 0.011173024177551269, 0.01112559986114502, 0.011188223838806152, 0.011085887908935547, 0.011153056144714355, 0.011153696060180664, 0.011146944046020507, 0.011168416023254395, 0.01114896011352539, 0.011279552459716798, 0.011195520401000976, 0.011116064071655274, 0.011071647644042969, 0.011290592193603516, 0.011249088287353515, 0.011139776229858398, 0.011100064277648926, 0.0110797119140625, 0.011116512298583984, 0.011142175674438476, 0.011146112442016602, 0.011159903526306152, 0.011112192153930663, 0.011161600112915039, 0.011181407928466797, 0.011204768180847167, 0.011188032150268555, 0.011181056022644043, 0.011270048141479493, 0.01114668846130371, 0.011120320320129395, 0.01110524845123291, 0.011148480415344239, 0.011242336273193359, 0.011175871849060059, 0.011154399871826173, 0.011168512344360351, 0.011154848098754883, 0.011149920463562012, 0.011204031944274903, 0.011136832237243653, 0.011098015785217285, 0.011172703742980957, 0.011195551872253419, 0.011279264450073241, 0.011250975608825684, 0.01114793586730957, 0.010966143608093261, 0.011238271713256837, 0.011218943595886231, 0.011265567779541016, 0.011244000434875489, 0.011204159736633301, 0.011133567810058594, 0.011419455528259277, 0.011125920295715332, 0.011131936073303223, 0.011095423698425293, 0.011121088027954102, 0.011118880271911621, 0.0111626558303833, 0.01117289638519287, 0.011117471694946288, 0.011052831649780274, 0.011135968208312988, 0.011195615768432617, 0.011149248123168946, 0.011452383995056152, 0.011191328048706055, 0.011161727905273438, 0.011196127891540527, 0.011161888122558594, 0.011173312187194823, 0.011148639678955078, 0.011248576164245605, 0.011171551704406738, 0.011141152381896972, 0.01109331226348877, 0.011336640357971191, 0.011114496231079102, 0.01337660789489746, 0.01354543972015381, 0.011299936294555665, 0.011167584419250488, 0.011184127807617187, 0.011161184310913086, 0.011164064407348634, 0.01109347152709961, 0.011108896255493165, 0.011145152091979981, 0.011206208229064941, 0.011151455879211425, 0.01110262393951416, 0.011060640335083008, 0.011184639930725097, 0.011251520156860352, 0.01117737579345703, 0.011222240447998046, 0.011107263565063476, 0.011141856193542481, 0.01123356819152832, 0.0112391357421875, 0.011270144462585448, 0.011194368362426758, 0.011272095680236816, 0.011245120048522949, 0.011156000137329102, 0.011124735832214355, 0.011080896377563477, 0.01120524787902832, 0.01093996810913086, 0.011124959945678711, 0.011298208236694337, 0.011147775650024415, 0.011173983573913575, 0.011179840087890625, 0.011210687637329101, 0.01118671989440918, 0.011111807823181153, 0.011077952384948731, 0.011165727615356445, 0.01123532772064209, 0.011938207626342774, 0.012149951934814452, 0.011880096435546874, 0.011303680419921875, 0.011206975936889648, 0.011251423835754394, 0.011130847930908204, 0.011239423751831054, 0.011309056282043458, 0.011202783584594726, 0.011157279968261718, 0.011116288185119628, 0.011241408348083496, 0.011538496017456054, 0.011747808456420898, 0.011462431907653808, 0.011300864219665528, 0.011228832244873047, 0.011338080406188965, 0.011382399559020996, 0.011356063842773437, 0.011386879920959473, 0.011172320365905762, 0.011101216316223144, 0.011099103927612305, 0.011157343864440918, 0.011126688003540039, 0.011135231971740723, 0.011134783744812011, 0.011132575988769532, 0.011137568473815918, 0.011175647735595703, 0.01107808017730713, 0.011279935836791993, 0.011210399627685547, 0.01121548843383789, 0.011169792175292969, 0.011081727981567382, 0.011184127807617187, 0.011134976387023926, 0.011201696395874023, 0.01105174446105957, 0.011046079635620118, 0.011806816101074218, 0.011182944297790527, 0.011213055610656738, 0.011204128265380859, 0.011132351875305176, 0.011158304214477539, 0.011150848388671876, 0.011131391525268555, 0.010863072395324707, 0.011168864250183106, 0.011160479545593262, 0.011077024459838868, 0.011189184188842773, 0.011129792213439942, 0.011118464469909668, 0.011073535919189453, 0.01107158374786377, 0.011113216400146484, 0.011132927894592285, 0.011313216209411621, 0.011034560203552247, 0.011116031646728516, 0.011315263748168946, 0.011167807579040527, 0.011116352081298828, 0.011067744255065917, 0.011110336303710937, 0.011093503952026367, 0.011084575653076172, 0.011000991821289063, 0.011123583793640137, 0.011098079681396485, 0.011044992446899413, 0.011036512374877929, 0.011085984230041503, 0.011140992164611816, 0.011111807823181153, 0.011158304214477539, 0.01111638355255127, 0.011155263900756836, 0.011221311569213866, 0.01117081642150879, 0.01122764778137207, 0.011155296325683593, 0.011166239738464355, 0.011141119956970215, 0.01111244773864746, 0.011101920127868653, 0.011090208053588867, 0.01109763240814209, 0.011132991790771485, 0.011082400321960449, 0.011036416053771973, 0.011139072418212891, 0.011122688293457032, 0.011053152084350586, 0.011032416343688964, 0.011058624267578126, 0.011375231742858887, 0.011196415901184082, 0.011112607955932618, 0.011081567764282227, 0.011120991706848144, 0.011097760200500488, 0.011055071830749512, 0.01104435157775879, 0.011052607536315918, 0.011096192359924316, 0.01126863956451416, 0.011036479949951172, 0.011039199829101563, 0.011505023956298828, 0.011086144447326661, 0.011063615798950195, 0.011175775527954101, 0.0111494722366333, 0.01114851188659668, 0.011078432083129882, 0.01113702392578125, 0.01133516788482666, 0.011422143936157227, 0.01111251163482666, 0.011115615844726562, 0.011143487930297851, 0.011248224258422852, 0.011177984237670899, 0.011104255676269532, 0.011069439888000488, 0.011134976387023926, 0.011120832443237305, 0.011114303588867187, 0.011102335929870605, 0.011165568351745606, 0.011216896057128906, 0.011184127807617187, 0.011151040077209472, 0.011112832069396972, 0.011198240280151367, 0.011144831657409668, 0.011121343612670899, 0.011129920005798339, 0.011050848007202149, 0.0111212158203125, 0.011121088027954102, 0.011099424362182617, 0.011043071746826172, 0.011063712120056152, 0.011259903907775879, 0.011105888366699219, 0.011126175880432129, 0.011056127548217774, 0.011358207702636718, 0.011191871643066406, 0.01113475227355957, 0.011155167579650878, 0.01110758399963379, 0.011153311729431152, 0.011134847640991211, 0.011098015785217285, 0.011132479667663574, 0.011072256088256837, 0.011171072006225587, 0.011216608047485352, 0.011193152427673339, 0.011216799736022949, 0.0111627197265625, 0.011145600318908691, 0.011142911911010743, 0.011279135704040528, 0.011077216148376465, 0.011123104095458984, 0.011176063537597656, 0.011224800109863282, 0.011966624259948731, 0.011808511734008788, 0.01118832015991211, 0.011069600105285644, 0.011231231689453124, 0.011163552284240723, 0.011174176216125488, 0.011097184181213379, 0.011164128303527832, 0.011141375541687011, 0.011189472198486329, 0.011144191741943359, 0.01112656021118164, 0.011161600112915039, 0.01144761562347412, 0.011147968292236328, 0.011082847595214844, 0.01121177577972412, 0.011138879776000976, 0.011077280044555665, 0.011066880226135254, 0.01104582405090332, 0.011315199851989746, 0.011132672309875488, 0.011165280342102051, 0.011123359680175781, 0.011280320167541503, 0.011069503784179688, 0.011101247787475586, 0.01113491153717041, 0.011019264221191406, 0.010992799758911133, 0.011135968208312988, 0.011116415977478027, 0.011171456336975097, 0.011161984443664551, 0.011122688293457032, 0.011292896270751952, 0.011304991722106934, 0.011419679641723633, 0.011161439895629883, 0.011034496307373047, 0.011208703994750976, 0.011116543769836425, 0.011397151947021485, 0.011079744338989258, 0.011035584449768066, 0.01118511962890625, 0.011155455589294434, 0.011366592407226563, 0.011513664245605468, 0.011163552284240723, 0.011213215827941894, 0.011220831871032715, 0.011170944213867188, 0.01108563232421875, 0.01103279972076416, 0.011076288223266601, 0.01105510425567627, 0.011075584411621094, 0.01100595188140869, 0.01112063980102539, 0.01113702392578125, 0.01108902359008789, 0.010907103538513183, 0.011314784049987793, 0.011164608001708985, 0.011249664306640626, 0.011298368453979492, 0.011282719612121583, 0.011314911842346191, 0.011196096420288086, 0.011092512130737305, 0.011155679702758789, 0.011147263526916504, 0.011177184104919434, 0.011105055809020997, 0.011063615798950195, 0.011162400245666504, 0.011164544105529785, 0.011136992454528809, 0.011029727935791015, 0.011107168197631835, 0.011186400413513183, 0.011175935745239257, 0.011142368316650391, 0.0111942720413208, 0.011194751739501954, 0.011288640022277831, 0.01113916778564453, 0.01108950424194336, 0.011071423530578613, 0.01110598373413086, 0.01116204833984375, 0.011046527862548828, 0.011065855979919433, 0.01108579158782959, 0.01112054443359375, 0.011139552116394042, 0.01107968044281006, 0.01108790397644043, 0.011349696159362793, 0.01118015956878662, 0.011167903900146485, 0.011018048286437989, 0.011071680068969726, 0.011235424041748047, 0.011138976097106934, 0.011073535919189453, 0.01103667163848877, 0.011122688293457032, 0.011087871551513672, 0.01115664005279541, 0.011027487754821777, 0.011109760284423828, 0.011119359970092774, 0.011116512298583984, 0.011089632034301758, 0.011040767669677735, 0.011094016075134277, 0.011177311897277832, 0.011127552032470704, 0.011136927604675294, 0.011104191780090332, 0.011220704078674317, 0.011241824150085448, 0.011138208389282226, 0.010908384323120118, 0.011159616470336914, 0.011165696144104004, 0.01115340805053711, 0.011192319869995117, 0.011218976020812989, 0.01112700843811035, 0.011105216026306153, 0.011131711959838867, 0.011130816459655761, 0.01114527988433838, 0.011108351707458495, 0.011064352035522461, 0.011146080017089844, 0.011208831787109375, 0.01113868808746338, 0.011125408172607421, 0.011083488464355468, 0.011105343818664551, 0.011084704399108887, 0.01109216022491455, 0.011091327667236328, 0.011082207679748535, 0.011133119583129883, 0.011097920417785645, 0.01099731159210205, 0.011098560333251952, 0.011212800025939941, 0.011096063613891602, 0.011041088104248047, 0.011042495727539063, 0.01113491153717041, 0.011206720352172852, 0.011126784324645997, 0.011024383544921875, 0.011096384048461914, 0.011095744132995605, 0.011095135688781739, 0.011105216026306153, 0.011025376319885254, 0.011086848258972168, 0.011115551948547363, 0.011121536254882812, 0.011068991661071777, 0.011235872268676757, 0.011185503959655762, 0.011109024047851563, 0.01108137607574463, 0.011168224334716797, 0.011136896133422851, 0.011165696144104004, 0.011222271919250489, 0.011217663764953614, 0.011200032234191894, 0.01117846393585205, 0.011150783538818359, 0.011092543601989746, 0.011091263771057128, 0.011127488136291504, 0.011079232215881347, 0.011140735626220703, 0.011143808364868163, 0.011102272033691405, 0.012161312103271484, 0.012273664474487305, 0.011263744354248047, 0.011227680206298827, 0.011314463615417481, 0.011237536430358887, 0.011233152389526367, 0.011157695770263671, 0.011190496444702148, 0.011065343856811523, 0.011231231689453124, 0.01112883186340332, 0.011180031776428222, 0.01112883186340332, 0.0110632963180542, 0.011097503662109374, 0.011168064117431641, 0.011129119873046874, 0.01114521598815918, 0.011099200248718261, 0.011150272369384765, 0.011188575744628906, 0.011142687797546387, 0.011032575607299805, 0.011118720054626465, 0.011118528366088867, 0.011081888198852538, 0.011152671813964845, 0.011080320358276368, 0.01113702392578125, 0.011149439811706543, 0.01114896011352539, 0.011104479789733886, 0.011177984237670899, 0.011132224082946778, 0.011179743766784668, 0.011103551864624023, 0.011110239982604981, 0.011153216361999512, 0.011177215576171875, 0.011216768264770508, 0.01143513584136963, 0.011545472145080567, 0.011807519912719727, 0.011300959587097169, 0.011218943595886231, 0.011212608337402345, 0.01118227195739746, 0.011276288032531738, 0.011244640350341797, 0.011350048065185548, 0.0113887996673584, 0.011833791732788085, 0.011519871711730956, 0.011254464149475098, 0.011156831741333008, 0.011253631591796875, 0.011227935791015624, 0.011280384063720703, 0.011255807876586914, 0.0113154878616333, 0.011280096054077149, 0.011364352226257325, 0.010883904457092286, 0.011250592231750489, 0.01125222396850586, 0.011202272415161132, 0.01121459197998047, 0.011183296203613281, 0.011247008323669434, 0.011286591529846192, 0.011441599845886231, 0.011355008125305176, 0.01125699234008789, 0.011212736129760743, 0.011317215919494628, 0.011311103820800781, 0.011316160202026367, 0.011280223846435547, 0.011380895614624024, 0.011268095970153809, 0.011390624046325683, 0.011416095733642578, 0.011365407943725587, 0.01129747200012207, 0.011594079971313476, 0.01141500759124756, 0.011315327644348144, 0.011350175857543946, 0.011337087631225587, 0.011407903671264649, 0.011331680297851563, 0.011286527633666991, 0.011272128105163575, 0.011268320083618165, 0.011337759971618653, 0.01130844783782959, 0.011215104103088378, 0.01117568016052246, 0.01127667236328125, 0.011274271965026856, 0.01133568000793457, 0.011290623664855956, 0.011243519783020019, 0.011265279769897461, 0.011279104232788086, 0.011283552169799805, 0.011302016258239745, 0.011251487731933595, 0.011150367736816406, 0.011314399719238281, 0.011396863937377929, 0.011318431854248048, 0.01128758430480957, 0.011228351593017579, 0.011156000137329102, 0.01125385570526123, 0.01199078369140625, 0.013234175682067872, 0.011904895782470703, 0.011393407821655273, 0.011343615531921387, 0.011288415908813477, 0.011235744476318359, 0.011180031776428222, 0.01113923168182373, 0.011018239974975585, 0.011105824470520019, 0.011176416397094727, 0.011188223838806152, 0.01123465633392334, 0.011199199676513671, 0.011159647941589355, 0.011240832328796387, 0.011211071968078613, 0.011261856079101563, 0.01117414379119873, 0.01112883186340332, 0.01145036792755127, 0.011197823524475098, 0.01116812801361084, 0.011135231971740723, 0.011106623649597167, 0.01111945629119873, 0.011178048133850097, 0.011193087577819824, 0.011187616348266602, 0.01114192008972168, 0.011220831871032715, 0.011202239990234375, 0.011241791725158691, 0.01114521598815918, 0.011069439888000488, 0.011203680038452148, 0.011197279930114747, 0.011151424407958984, 0.011168928146362304, 0.01112067222595215, 0.011201343536376952, 0.011333632469177245, 0.011187616348266602, 0.011180447578430177, 0.011203904151916504, 0.01118502426147461, 0.011179360389709472, 0.011168448448181153, 0.011125856399536133, 0.011146112442016602, 0.011206015586853028, 0.011204511642456054, 0.011213631629943847, 0.01115135955810547, 0.01116096019744873, 0.011191007614135742, 0.012163071632385255, 0.012752703666687012, 0.01135206413269043, 0.012099679946899413, 0.011315103530883788, 0.011405311584472656, 0.01136844825744629, 0.011192319869995117, 0.01131929588317871, 0.011407360076904297, 0.01141385555267334, 0.011351712226867676, 0.011298368453979492, 0.011217344284057617, 0.011321344375610352, 0.01100217628479004, 0.01123401641845703, 0.011242303848266601, 0.011219136238098145, 0.011225407600402831, 0.011216544151306152, 0.011128255844116211, 0.011269760131835938, 0.011223679542541503, 0.011233856201171875, 0.011245120048522949, 0.011219136238098145, 0.011243519783020019, 0.011286527633666991, 0.011376383781433106, 0.011336064338684082, 0.011370368003845215, 0.011194368362426758, 0.011198047637939454, 0.011262368202209473, 0.011236448287963868, 0.011231295585632324, 0.011147199630737304, 0.011567551612854004, 0.011302495956420898, 0.011270272254943847, 0.011235487937927247, 0.011159199714660645, 0.01115231990814209, 0.011304767608642578, 0.011439807891845703, 0.011469311714172363, 0.011415552139282227, 0.011333696365356445, 0.011376704216003418, 0.011323264122009277, 0.011560959815979004, 0.011286751747131347, 0.011271552085876465, 0.011264512062072754, 0.011220895767211914, 0.011310943603515625, 0.011438207626342773, 0.011380767822265625, 0.011285728454589843, 0.011285311698913574, 0.011253727912902831, 0.011280384063720703, 0.011287903785705567, 0.011309727668762207, 0.011271488189697266, 0.011267871856689453, 0.011264415740966796, 0.011291135787963867, 0.011268095970153809, 0.011257856369018555, 0.01122697639465332, 0.011337216377258302, 0.011264927864074707, 0.011236384391784668, 0.011285216331481933, 0.01122326374053955, 0.011181856155395508, 0.010975008010864257, 0.011221504211425782, 0.011208864212036132, 0.011300671577453613, 0.011297151565551758, 0.011308863639831543, 0.011216575622558594, 0.012291680335998536, 0.011283136367797852, 0.011202272415161132, 0.011284992218017579, 0.01133561611175537, 0.01123423957824707, 0.011239456176757813, 0.01119324779510498, 0.011208800315856934, 0.011213824272155762, 0.011196415901184082, 0.011195199966430664, 0.011102304458618165, 0.011202560424804688, 0.011193408012390136, 0.011226048469543457, 0.01127030372619629, 0.011108192443847656, 0.011165696144104004, 0.011198431968688964, 0.011208767890930177, 0.011136799812316895, 0.011097824096679688, 0.01131340789794922, 0.011186495780944824, 0.011238975524902344, 0.011170368194580078, 0.011097567558288574, 0.011175583839416504, 0.011217568397521973, 0.011306143760681153, 0.011449055671691894, 0.011587712287902832, 0.011535455703735351, 0.01160700798034668, 0.01141708755493164, 0.01141097640991211, 0.011284704208374024, 0.011248671531677246, 0.011232959747314452, 0.011296992301940917, 0.011492159843444824, 0.011616959571838378, 0.011693599700927735, 0.011696864128112792, 0.011456159591674805, 0.0113155517578125, 0.011251999855041503, 0.011301888465881347, 0.01131107234954834, 0.011406399726867677, 0.011568832397460937, 0.011503647804260254, 0.011517056465148926, 0.011509951591491699, 0.011436256408691406]",tokens/s,89.14152878273295,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4293.218304,4878.958592,0.0,4483.710976,4465.672704,s,1,10.81604296875,10.81604296875,0.0,10.81604296875,10.81604296875,10.81604296875,10.81604296875,[10.81604296875],,kWh,0.00010374368459583061,1.1436280005859159e-05,3.232752586200538e-05,0.00014750749046369517,,MB,2153.508864,5302.583296,0.0,4892.655616,4841.339904,s,10,1.967754409790039,0.1967754409790039,0.0007356678215833901,0.19680464935302733,0.19747964477539062,0.1976221923828125,0.19773623046875,"[0.1949957733154297, 0.19676002502441406, 0.1961219482421875, 0.19675791931152345, 0.19684927368164062, 0.19744796752929689, 0.19728854370117188, 0.19710079956054688, 0.19666741943359375, 0.19776473999023436]",tokens/s,1300.97535915224,kWh,5.7379459151963324e-06,6.325698364105242e-07,3.81651176780382e-06,1.0187027519410678e-05,tokens/kWh,25129999.846590154,MB,2159.16544,5470.355456,0.0,5060.427776,5012.931584,s,10,18.726147827148434,1.8726147827148438,0.00503696046827301,1.8736304321289063,1.8771864624023438,1.8791170349121094,1.880661492919922,"[1.8708316650390624, 1.881047607421875, 1.8662344970703124, 1.8717244873046874, 1.872715576171875, 1.8745452880859375, 1.8745848388671875, 1.8767574462890626, 1.8753662109375, 1.8623402099609374]",tokens/s,33.6427975371769,kWh,5.4658738813137084e-05,6.028875793492107e-06,3.62540202885962e-05,9.69416348952254e-05,tokens/kWh,649875.5675834274,,s,630,18.72291741943359,0.02971891653878348,0.0005142623723293799,0.029654208183288573,0.03003931541442871,0.030243571472167968,0.0314014969444275,"[0.030463424682617188, 0.02997715187072754, 0.029487104415893556, 0.029668352127075196, 0.029520896911621092, 0.02987411117553711, 0.029372447967529296, 0.029342784881591796, 0.029365215301513672, 0.029259775161743166, 0.029061119079589845, 0.02939244842529297, 0.02922540855407715, 0.0293787841796875, 0.029474592208862303, 0.029542400360107423, 0.02950262451171875, 0.029974592208862304, 0.02957596778869629, 0.029585407257080077, 0.029521087646484374, 0.029368831634521485, 0.029324928283691407, 0.02948780822753906, 0.029716480255126954, 0.02940108871459961, 0.029578912734985353, 0.029714784622192382, 0.029619359970092775, 0.02945110321044922, 0.029661184310913087, 0.030052352905273437, 0.029980064392089844, 0.029922943115234375, 0.029886911392211914, 0.029596416473388672, 0.02959119987487793, 0.029589632034301757, 0.02946601676940918, 0.029612640380859374, 0.029789663314819335, 0.02962076759338379, 0.029616128921508788, 0.0297205753326416, 0.029644800186157227, 0.029851648330688478, 0.029791711807250976, 0.029909536361694335, 0.029647008895874023, 0.029645919799804688, 0.029611904144287108, 0.029639392852783202, 0.029651103973388673, 0.0299683837890625, 0.029793920516967772, 0.02988889694213867, 0.029861888885498046, 0.02996428871154785, 0.030437023162841796, 0.030390623092651368, 0.0302259521484375, 0.03008355140686035, 0.030171295166015626, 0.03016499137878418, 0.0297574405670166, 0.02948067283630371, 0.02966352081298828, 0.02950783920288086, 0.029498655319213866, 0.03165232086181641, 0.030357503890991212, 0.029487104415893556, 0.02939904022216797, 0.029232736587524413, 0.029388608932495116, 0.029538719177246094, 0.029498912811279296, 0.029464767456054686, 0.029190303802490235, 0.02938902473449707, 0.02940732765197754, 0.029429471969604493, 0.029329311370849608, 0.030562688827514648, 0.029421567916870117, 0.03015235137939453, 0.029454687118530273, 0.02939289665222168, 0.029599647521972656, 0.029658271789550782, 0.029379520416259765, 0.029703903198242187, 0.029581600189208985, 0.02959494400024414, 0.03010630416870117, 0.03567001724243164, 0.02977996826171875, 0.030162975311279296, 0.02968556785583496, 0.02983923149108887, 0.0302840633392334, 0.02997452735900879, 0.029942815780639648, 0.030874528884887696, 0.02984556770324707, 0.029734527587890625, 0.02974883270263672, 0.029854496002197264, 0.029927200317382812, 0.029898815155029297, 0.029691743850708007, 0.029566688537597655, 0.02968022346496582, 0.02954854393005371, 0.02961395263671875, 0.029661312103271484, 0.029699167251586913, 0.029577695846557617, 0.02983475112915039, 0.029743936538696288, 0.029734432220458986, 0.02995599937438965, 0.030107999801635744, 0.029877983093261718, 0.030009151458740235, 0.029821760177612306, 0.030220544815063477, 0.029968000411987303, 0.029748928070068358, 0.02934351921081543, 0.029317312240600586, 0.029285087585449218, 0.02937446403503418, 0.029433727264404297, 0.029251712799072266, 0.029417472839355467, 0.02936422348022461, 0.029336896896362305, 0.029158079147338867, 0.029367712020874022, 0.02915705680847168, 0.02933145523071289, 0.02951817512512207, 0.02954297637939453, 0.02937446403503418, 0.02937766456604004, 0.029369216918945312, 0.02937651252746582, 0.029489152908325194, 0.029519712448120117, 0.029554847717285157, 0.029666303634643554, 0.029844640731811523, 0.029589344024658203, 0.029580703735351564, 0.02998918342590332, 0.029819168090820313, 0.029626367568969726, 0.02977177619934082, 0.029587200164794922, 0.029681663513183593, 0.029933631896972655, 0.029696191787719727, 0.02948691177368164, 0.029687999725341797, 0.029479936599731447, 0.02957200050354004, 0.029655136108398438, 0.029679616928100585, 0.02964406394958496, 0.02976201629638672, 0.029610240936279297, 0.02959516716003418, 0.029587936401367188, 0.029462528228759766, 0.02960758399963379, 0.02954070472717285, 0.029824480056762696, 0.029671167373657225, 0.029816768646240235, 0.029666080474853515, 0.02974959945678711, 0.02971343994140625, 0.029887071609497072, 0.030062496185302736, 0.030080448150634764, 0.0299748477935791, 0.03041868782043457, 0.02974176025390625, 0.030338783264160157, 0.029975135803222655, 0.029693952560424806, 0.029526079177856445, 0.029370304107666015, 0.029419456481933594, 0.02934121513366699, 0.02937091255187988, 0.029210432052612305, 0.029315263748168945, 0.029294559478759766, 0.029294015884399414, 0.02924995231628418, 0.02929635238647461, 0.02936675262451172, 0.029458431243896483, 0.02953545570373535, 0.029438144683837892, 0.029368928909301758, 0.02937651252746582, 0.029474815368652343, 0.02953215980529785, 0.029687328338623045, 0.029809120178222657, 0.029884416580200194, 0.029740415573120117, 0.029770143508911134, 0.029775999069213868, 0.02981488037109375, 0.0298024959564209, 0.029886240005493163, 0.02981091117858887, 0.02994748878479004, 0.030706079483032226, 0.029882368087768556, 0.029784128189086913, 0.02962761688232422, 0.02969468879699707, 0.02982089614868164, 0.029847583770751952, 0.029740575790405274, 0.029784032821655274, 0.02967807960510254, 0.02968550491333008, 0.02974745559692383, 0.029691743850708007, 0.029719999313354492, 0.02967625617980957, 0.029716480255126954, 0.02954035186767578, 0.029464000701904296, 0.029786687850952148, 0.029861888885498046, 0.029764896392822267, 0.030147296905517578, 0.029947776794433594, 0.0299234561920166, 0.030175199508666994, 0.029936832427978517, 0.029846368789672853, 0.02987014389038086, 0.029953344345092774, 0.030071168899536132, 0.03019980812072754, 0.03004524803161621, 0.029596607208251954, 0.02947452735900879, 0.02925596809387207, 0.029263168334960937, 0.029259967803955077, 0.02928201675415039, 0.03066540718078613, 0.030205856323242186, 0.02945350456237793, 0.029455360412597657, 0.02936627197265625, 0.029431039810180665, 0.02947465515136719, 0.02964374351501465, 0.02973075294494629, 0.029511680603027345, 0.029404800415039064, 0.029428096771240236, 0.029472768783569334, 0.029739007949829102, 0.029495296478271486, 0.02940108871459961, 0.02942073631286621, 0.029562911987304687, 0.029500192642211914, 0.029483007431030273, 0.029526016235351563, 0.029822368621826172, 0.031152736663818358, 0.03197337532043457, 0.030089088439941406, 0.029808256149291994, 0.02969584083557129, 0.029796735763549805, 0.0296058235168457, 0.029578847885131834, 0.029682432174682617, 0.02958745574951172, 0.029705663681030274, 0.0295503044128418, 0.029843328475952148, 0.029875167846679686, 0.02973801612854004, 0.029682655334472657, 0.029601791381835937, 0.02959116744995117, 0.029606239318847656, 0.029809823989868166, 0.029680511474609376, 0.02960383987426758, 0.029672447204589843, 0.029692928314208986, 0.029892608642578124, 0.029962240219116212, 0.029845504760742186, 0.029849599838256836, 0.029868032455444334, 0.029661184310913087, 0.029749248504638674, 0.029626367568969726, 0.029787872314453127, 0.030157215118408204, 0.029927167892456055, 0.029685375213623046, 0.029537408828735352, 0.029422527313232423, 0.02955923271179199, 0.02944041633605957, 0.02978553581237793, 0.02952016067504883, 0.029628704071044922, 0.029791711807250976, 0.02954044723510742, 0.029623743057250976, 0.029671968460083006, 0.02961984062194824, 0.02972115135192871, 0.029540479660034178, 0.029546655654907227, 0.02968899154663086, 0.02990729522705078, 0.029538047790527343, 0.029409088134765626, 0.029567935943603515, 0.029601568222045897, 0.029661407470703127, 0.029683935165405274, 0.029651840209960936, 0.029665983200073243, 0.030025760650634767, 0.029853887557983398, 0.029638656616210936, 0.030006944656372072, 0.030037631988525392, 0.03003865623474121, 0.029636703491210937, 0.029767295837402345, 0.02947929573059082, 0.029549728393554686, 0.029765727996826172, 0.030672895431518556, 0.02994374465942383, 0.029848384857177734, 0.02997248077392578, 0.02984671974182129, 0.02972502326965332, 0.029561151504516603, 0.029574432373046876, 0.030085599899291993, 0.0296942081451416, 0.02949488067626953, 0.029628576278686522, 0.02966364860534668, 0.029800447463989257, 0.029765920639038088, 0.02967932891845703, 0.029655040740966795, 0.030007295608520508, 0.029787296295166014, 0.02979088020324707, 0.02997881507873535, 0.030000511169433593, 0.03023948860168457, 0.029874048233032226, 0.03025779151916504, 0.029765888214111327, 0.02946409606933594, 0.029403615951538085, 0.029388799667358398, 0.029396991729736328, 0.02940108871459961, 0.02939084815979004, 0.030436704635620117, 0.029624576568603515, 0.02939126396179199, 0.02950953674316406, 0.02954457664489746, 0.02939695930480957, 0.02944112014770508, 0.029403520584106446, 0.02946124839782715, 0.029705408096313477, 0.029509311676025392, 0.029499391555786132, 0.030738784790039064, 0.03150310325622559, 0.029554496765136717, 0.02950553512573242, 0.029439424514770506, 0.030032032012939452, 0.030224992752075196, 0.029632320404052736, 0.029750463485717773, 0.029819711685180664, 0.02981068801879883, 0.02978611183166504, 0.029845279693603517, 0.029771455764770506, 0.029626848220825196, 0.029556800842285156, 0.030461439132690428, 0.029742975234985352, 0.029591936111450196, 0.029703487396240236, 0.02950035285949707, 0.02961020851135254, 0.029734687805175783, 0.029650943756103516, 0.02982863998413086, 0.02981936073303223, 0.029640703201293944, 0.029689855575561523, 0.029664384841918946, 0.02975993537902832, 0.030023487091064453, 0.029694591522216797, 0.029731840133666993, 0.029682687759399414, 0.02981385612487793, 0.02986422348022461, 0.03003865623474121, 0.03014656066894531, 0.029828384399414064, 0.029839296340942383, 0.03017398452758789, 0.029609535217285158, 0.02949920082092285, 0.030246912002563478, 0.029726720809936522, 0.029381727218627928, 0.02916854476928711, 0.02918809509277344, 0.029148576736450195, 0.029153888702392577, 0.029728511810302734, 0.029505056381225588, 0.02942639923095703, 0.029336959838867187, 0.0294017276763916, 0.029336767196655275, 0.02924012756347656, 0.02956492805480957, 0.029485055923461914, 0.02950137519836426, 0.029974592208862304, 0.0315043830871582, 0.029329408645629884, 0.02969183921813965, 0.02966534423828125, 0.029288063049316405, 0.02972915267944336, 0.02972003173828125, 0.029559328079223634, 0.029476863861083984, 0.02941244888305664, 0.02957814407348633, 0.029460479736328125, 0.029335552215576172, 0.030482431411743165, 0.029553760528564454, 0.029527231216430663, 0.029591264724731444, 0.029560319900512694, 0.02962483215332031, 0.029701919555664064, 0.029538112640380858, 0.029566719055175782, 0.029429952621459962, 0.02949718475341797, 0.029917823791503907, 0.029887775421142578, 0.02993388748168945, 0.02995756721496582, 0.02964784049987793, 0.029601791381835937, 0.029657087326049804, 0.02972211265563965, 0.029694400787353515, 0.029782079696655275, 0.02979430389404297, 0.02979430389404297, 0.02999295997619629, 0.02995622444152832, 0.030277664184570313, 0.030340959548950195, 0.03747840118408203, 0.02978179168701172, 0.029559007644653322, 0.029623359680175782, 0.029634559631347656, 0.03012393569946289, 0.0296092472076416, 0.02927804756164551, 0.02937651252746582, 0.029578432083129883, 0.02956867218017578, 0.029720703125, 0.02976153564453125, 0.02959699249267578, 0.029571775436401368, 0.029493247985839844, 0.029324800491333007, 0.029482656478881836, 0.02980521583557129, 0.029761728286743165, 0.02978767967224121, 0.02975699234008789, 0.029624799728393554, 0.029514144897460938, 0.02951580810546875, 0.029648096084594726, 0.02998147201538086, 0.029929471969604493, 0.029378175735473633, 0.031972063064575194, 0.02960758399963379, 0.0294420166015625, 0.02950761604309082, 0.02973695945739746, 0.02973695945739746, 0.029633920669555665, 0.029600383758544922, 0.029665279388427734, 0.030051488876342774, 0.02972870445251465, 0.02974198341369629, 0.029487104415893556, 0.029531167984008788, 0.02965760040283203, 0.030472415924072266, 0.029853055953979492, 0.029893503189086915, 0.02978611183166504, 0.029616128921508788, 0.02960588836669922, 0.02954444885253906, 0.029560831069946288, 0.029695264816284178, 0.02985763168334961, 0.030048831939697266, 0.029929792404174805, 0.02985887908935547, 0.02983203125, 0.029681535720825197, 0.030808544158935545, 0.029704992294311523, 0.0297706241607666, 0.02983247947692871, 0.029751327514648436, 0.029704992294311523, 0.029666847229003906, 0.02954083251953125, 0.030657567977905274, 0.030111263275146485, 0.029600223541259765, 0.029256927490234376, 0.02918070411682129, 0.029466272354125977, 0.029208927154541015, 0.029173759460449217, 0.02924742317199707, 0.029104192733764647, 0.029269472122192382, 0.029223455429077148, 0.02936832046508789, 0.029462528228759766, 0.030314367294311525, 0.02930240058898926, 0.02929100799560547, 0.029283903121948243, 0.02921721649169922, 0.02998636817932129, 0.030140800476074217, 0.029898815155029297, 0.02952720069885254, 0.02938697624206543, 0.02930956840515137, 0.029259775161743166, 0.02918809509277344, 0.029489152908325194, 0.029378559112548826, 0.029509632110595704, 0.029530208587646486, 0.029673376083374024, 0.029671039581298828, 0.02971891212463379, 0.029877504348754882, 0.030225151062011717, 0.02975667190551758, 0.02962713623046875, 0.029664928436279298, 0.029505887985229493, 0.02953011131286621, 0.02958131217956543, 0.02954444885253906, 0.029593599319458007, 0.02949497604370117, 0.029495616912841797, 0.029560831069946288, 0.02977916717529297, 0.02965337562561035, 0.02960742378234863, 0.029608863830566406, 0.029591552734375, 0.029585407257080077, 0.029677568435668947, 0.02958060836791992, 0.029636831283569337, 0.029591936111450196, 0.02961008071899414, 0.029714431762695313, 0.029587392807006837, 0.029589567184448242, 0.02960383987426758, 0.02959769630432129, 0.029845312118530275]",tokens/s,33.64860218558069,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7137.8944,7948.075008,0.0,7545.552896,7295.865344,s,1,12.95448046875,12.95448046875,0.0,12.95448046875,12.95448046875,12.95448046875,12.95448046875,[12.95448046875],,kWh,0.00016743833187080856,1.8462341399461568e-05,5.111948534000654e-05,0.00023702015861027666,,MB,3037.04064,8264.74496,0.0,7847.542784,7548.649984,s,10,3.316819793701172,0.3316819793701171,0.0008051522959342814,0.33153733825683596,0.33236585998535156,0.33292252349853513,0.33336785430908206,"[0.33030712890625, 0.33110287475585937, 0.3312548217773438, 0.3316152954101563, 0.33145938110351564, 0.33127334594726565, 0.3322001953125, 0.3318854064941406, 0.3322421569824219, 0.33347918701171875]",tokens/s,771.8236621903865,kWh,9.675818259408333e-06,1.067061854758999e-06,6.422621625548182e-06,1.7165501739715516e-05,tokens/kWh,14913633.395736832,MB,3046.72768,8558.34624,0.0,8141.144064,7829.444096,s,10,27.00395458984375,2.7003954589843753,0.00434515275438668,2.6991160888671875,2.7055049072265627,2.7059224975585936,2.7062565698242187,"[2.694590087890625, 2.69827490234375, 2.694842041015625, 2.699887939453125, 2.69834423828125, 2.696543212890625, 2.704648193359375, 2.706340087890625, 2.705412109375, 2.70507177734375]",tokens/s,23.329916287036877,kWh,7.888963691225842e-05,8.701633787767827e-06,5.2398730987052615e-05,0.0001399900016870789,tokens/kWh,450032.1397296969,,s,630,26.99833569717408,0.04285450110662552,0.00032490259272692214,0.042846975326538084,0.04316139678955078,0.04328394660949707,0.04414307640075684,"[0.04259209442138672, 0.04243865585327149, 0.04210483169555664, 0.04221747207641602, 0.04262604904174805, 0.04277116775512695, 0.042283294677734375, 0.042336254119873046, 0.042471424102783206, 0.04245913696289062, 0.042625022888183595, 0.04238131332397461, 0.04271420669555664, 0.0428388786315918, 0.043075519561767577, 0.042425662994384765, 0.043162433624267575, 0.042702465057373046, 0.04269279861450195, 0.04245443344116211, 0.042522911071777345, 0.04266649627685547, 0.04287670516967773, 0.04273907089233398, 0.042775390625, 0.04262448120117188, 0.042409664154052736, 0.04240060806274414, 0.04281753540039063, 0.042790912628173826, 0.0429749755859375, 0.04282803344726562, 0.04314726257324219, 0.04255702209472656, 0.04254966354370117, 0.042881023406982424, 0.04253900909423828, 0.04271664047241211, 0.042834400177001956, 0.042942527770996095, 0.04270048141479492, 0.04282988739013672, 0.04271948623657226, 0.04274166488647461, 0.04263081741333008, 0.04264595031738281, 0.04302643203735351, 0.042972320556640624, 0.042833793640136716, 0.043020320892333985, 0.043036670684814454, 0.0429634895324707, 0.04295516967773438, 0.04291923141479492, 0.04295948791503906, 0.04315331268310547, 0.04300163269042969, 0.042987903594970706, 0.043038719177246096, 0.043089313507080076, 0.04316543960571289, 0.04319523239135742, 0.043030529022216796, 0.0427586555480957, 0.04243865585327149, 0.04224204635620117, 0.042444801330566405, 0.04242432022094727, 0.042551296234130856, 0.04247552108764648, 0.04302643203735351, 0.042590145111083985, 0.04233631896972656, 0.04230752182006836, 0.04257798385620117, 0.04260851287841797, 0.04269068908691406, 0.042753406524658207, 0.042601089477539066, 0.04261999893188476, 0.042861473083496096, 0.04269776153564453, 0.042392543792724606, 0.042323841094970706, 0.042657920837402344, 0.04288265609741211, 0.04262934494018555, 0.04261088180541992, 0.04277657699584961, 0.04263727951049805, 0.04253699111938476, 0.042945598602294924, 0.04287993621826172, 0.04463206481933594, 0.043433982849121096, 0.04255478286743164, 0.042543712615966796, 0.042813438415527344, 0.042671585083007814, 0.042861087799072266, 0.04299763107299805, 0.04273139190673828, 0.04280960083007813, 0.04319232177734375, 0.043055103302001956, 0.042889217376708984, 0.04282572937011719, 0.042625022888183595, 0.04300595092773438, 0.043069438934326174, 0.04296636962890625, 0.04298400115966797, 0.042815582275390625, 0.04266774368286133, 0.04296323013305664, 0.04298137664794922, 0.04355865478515625, 0.04296934509277344, 0.04299305725097656, 0.04303523254394531, 0.04293222427368164, 0.04301004791259765, 0.04312630462646484, 0.04359215927124024, 0.043053054809570314, 0.043186046600341796, 0.04250435256958008, 0.04232809448242188, 0.04237516784667969, 0.0425164794921875, 0.042534912109375, 0.04245503997802735, 0.042274494171142575, 0.04260076904296875, 0.04236083221435547, 0.042262527465820314, 0.042423519134521484, 0.04256208038330078, 0.04277478408813477, 0.04284153747558594, 0.04240851211547852, 0.04251372909545898, 0.042603199005126956, 0.04265926361083985, 0.04277920150756836, 0.04269055938720703, 0.04256051254272461, 0.04276044845581055, 0.042574592590332035, 0.042543102264404296, 0.042856449127197264, 0.042491233825683594, 0.04259088134765625, 0.042968990325927735, 0.042643104553222656, 0.0427047348022461, 0.042699359893798826, 0.0425098876953125, 0.04254585647583008, 0.04298112106323242, 0.04298060989379883, 0.042912513732910156, 0.04252467346191406, 0.042561153411865234, 0.04274214553833008, 0.042858238220214846, 0.04294255828857422, 0.042893470764160155, 0.04271091079711914, 0.0429442253112793, 0.04317839813232422, 0.04290969467163086, 0.04284956741333008, 0.04261552047729492, 0.04298137664794922, 0.04302438354492188, 0.04291923141479492, 0.042840766906738284, 0.04272073745727539, 0.042952606201171875, 0.04302707290649414, 0.042968734741210934, 0.0430978889465332, 0.04304767990112305, 0.04316128158569336, 0.04304089736938477, 0.044859390258789066, 0.042987518310546875, 0.04328348922729492, 0.04323984146118164, 0.04264550399780274, 0.04243999862670898, 0.042472129821777345, 0.04244070434570312, 0.04225228881835937, 0.042412033081054686, 0.0423026237487793, 0.0423199348449707, 0.042575904846191406, 0.04244351959228516, 0.04280115127563477, 0.04265977478027344, 0.04249353790283203, 0.04283168029785156, 0.042423999786376954, 0.04256582260131836, 0.0425494384765625, 0.04248380661010742, 0.04263731384277344, 0.042847774505615235, 0.04264604949951172, 0.04291628646850586, 0.043030529022216796, 0.04295884704589844, 0.04293632125854492, 0.04269055938720703, 0.042626335144042966, 0.04270902252197266, 0.04263801574707031, 0.042858497619628906, 0.04265369415283203, 0.042897407531738284, 0.042979328155517575, 0.04307263946533203, 0.04296086502075196, 0.04296166229248047, 0.04318624114990234, 0.04391289520263672, 0.04291587066650391, 0.04295923233032226, 0.04306249618530274, 0.04304348754882813, 0.04318988800048828, 0.0431907844543457, 0.043147136688232425, 0.04302764892578125, 0.042965953826904296, 0.04276838302612305, 0.04291340637207031, 0.042838401794433593, 0.04301824188232422, 0.04310220718383789, 0.043019584655761715, 0.042994174957275394, 0.04302048110961914, 0.04301372909545898, 0.043090335845947264, 0.042967041015625, 0.04330905532836914, 0.043216766357421874, 0.04323331069946289, 0.043033790588378903, 0.04243539047241211, 0.04284415817260742, 0.042759681701660154, 0.04231423950195313, 0.04243014526367188, 0.04249222564697266, 0.042684417724609375, 0.042336254119873046, 0.042627071380615236, 0.042622783660888675, 0.04266355133056641, 0.042846145629882815, 0.042689151763916015, 0.042590015411376955, 0.042689727783203124, 0.0424376335144043, 0.04258816146850586, 0.04245913696289062, 0.04259603118896484, 0.042794689178466794, 0.042786529541015625, 0.04298640060424805, 0.04268422317504883, 0.04270918273925781, 0.042618881225585936, 0.042840065002441405, 0.04276838302612305, 0.04290707015991211, 0.04290822219848633, 0.04355820846557617, 0.04254966354370117, 0.04276867294311523, 0.042567550659179686, 0.04276435089111328, 0.04303878402709961, 0.04291584014892578, 0.042823680877685545, 0.04265740966796875, 0.042574207305908204, 0.042800769805908204, 0.04286832046508789, 0.04269340896606445, 0.042955936431884764, 0.043079776763916014, 0.04308556747436523, 0.042929153442382816, 0.04288716888427734, 0.04276019287109375, 0.042823680877685545, 0.043046432495117186, 0.043006431579589846, 0.04306124877929687, 0.04355184173583984, 0.04293929672241211, 0.04291788864135742, 0.04271718215942383, 0.04290079879760742, 0.043366302490234376, 0.04304361724853516, 0.04306534576416016, 0.043093505859375, 0.043004417419433595, 0.043072990417480465, 0.04245139312744141, 0.042526111602783204, 0.042412384033203125, 0.0425536003112793, 0.04248371124267578, 0.04250624084472656, 0.04250419235229492, 0.04248310470581055, 0.0424699821472168, 0.042438014984130856, 0.043200542449951175, 0.04274995040893555, 0.04275436782836914, 0.04252700805664063, 0.042401439666748045, 0.042586463928222656, 0.042534912109375, 0.042893310546875, 0.04268803024291992, 0.04270332717895508, 0.04262297439575195, 0.04258816146850586, 0.04250771331787109, 0.04239007949829102, 0.04257948684692383, 0.042641887664794924, 0.04286003112792969, 0.04259481430053711, 0.042804672241210935, 0.042574337005615234, 0.04261040115356445, 0.0429488639831543, 0.04281967926025391, 0.04268851089477539, 0.04265369415283203, 0.04290758514404297, 0.042958911895751954, 0.042831871032714845, 0.04272700881958008, 0.04275241470336914, 0.04283580780029297, 0.042935871124267576, 0.04324553680419922, 0.04303046417236328, 0.042922687530517575, 0.04308755111694336, 0.04294688034057617, 0.043014144897460936, 0.04290137481689453, 0.04294259262084961, 0.04312063980102539, 0.04315955352783203, 0.04312063980102539, 0.04284620666503906, 0.042843425750732425, 0.042912479400634765, 0.042931743621826175, 0.0431313591003418, 0.04334592056274414, 0.043474945068359375, 0.04318822479248047, 0.042974624633789066, 0.043262561798095706, 0.04300262451171875, 0.04274176025390625, 0.04286873626708984, 0.042614814758300784, 0.042444766998291014, 0.04254924774169922, 0.04248896026611328, 0.04258272171020508, 0.042520767211914064, 0.04319334411621094, 0.042761249542236326, 0.04418332672119141, 0.04269622421264648, 0.04255952072143555, 0.04264204788208008, 0.04289516830444336, 0.04261248016357422, 0.042660289764404294, 0.042763935089111325, 0.042979679107666015, 0.04302438354492188, 0.04300799942016602, 0.04306739044189453, 0.042960830688476566, 0.044146751403808596, 0.04297478485107422, 0.04272377777099609, 0.04254515075683594, 0.042657791137695314, 0.04282089614868164, 0.04274777603149414, 0.04285935974121094, 0.042782718658447266, 0.04282694244384765, 0.042842945098876956, 0.04269036865234375, 0.04276444625854492, 0.042641441345214845, 0.042971134185791016, 0.04263315200805664, 0.042951904296875, 0.04286105728149414, 0.042907840728759764, 0.043067550659179686, 0.042858497619628906, 0.04287849426269531, 0.04306787109375, 0.04315865707397461, 0.043133342742919925, 0.04297280120849609, 0.04300790405273437, 0.04282400131225586, 0.042756641387939456, 0.04309718322753906, 0.04312985610961914, 0.043270145416259766, 0.04313433456420898, 0.04291801452636719, 0.042954399108886716, 0.0442720947265625, 0.04285628890991211, 0.0434672966003418, 0.04328432083129883, 0.04257791900634766, 0.04266796875, 0.042522689819335935, 0.0425533447265625, 0.04309401702880859, 0.04281139373779297, 0.0425984001159668, 0.04239974212646484, 0.04247119903564453, 0.04275836944580078, 0.042813438415527344, 0.0429486083984375, 0.042592254638671875, 0.042710529327392575, 0.04277289581298828, 0.04259849548339844, 0.04284758377075195, 0.04297795104980469, 0.04287823867797851, 0.04280393600463867, 0.04290339279174805, 0.04290780639648437, 0.04261628723144531, 0.0426767692565918, 0.04278825759887695, 0.04297513580322266, 0.04305088043212891, 0.043076416015625, 0.04297225570678711, 0.0429488639831543, 0.04288560104370117, 0.04290755081176758, 0.04271542358398438, 0.04280275344848633, 0.043055553436279294, 0.04283596801757812, 0.042971134185791016, 0.04305920028686523, 0.043122528076171875, 0.043067550659179686, 0.04325344085693359, 0.04299756622314453, 0.04298739242553711, 0.043010688781738284, 0.043140670776367185, 0.04314694213867187, 0.04294527816772461, 0.04308582305908203, 0.04307539367675781, 0.04314540863037109, 0.04363884735107422, 0.043247550964355466, 0.04308992004394531, 0.04310835266113281, 0.043087646484375, 0.04332479858398437, 0.04341622543334961, 0.04319353485107422, 0.04312575912475586, 0.043278209686279295, 0.04319859313964844, 0.043230369567871095, 0.04343014526367187, 0.042926048278808596, 0.04276271820068359, 0.04336470413208008, 0.04287641525268555, 0.04277008056640625, 0.04242243194580078, 0.04255753707885742, 0.0425494384765625, 0.04257759857177734, 0.04276297760009766, 0.042534912109375, 0.042600223541259766, 0.042633438110351564, 0.04287692642211914, 0.04273356628417969, 0.042759807586669925, 0.043002239227294924, 0.04283801651000976, 0.04268851089477539, 0.042842113494873046, 0.04276428985595703, 0.04286873626708984, 0.04273971176147461, 0.042808513641357425, 0.04280543899536133, 0.04265776062011719, 0.04251465606689453, 0.04262527847290039, 0.04290911865234375, 0.04302214431762695, 0.04294079971313477, 0.04359990310668945, 0.04347343826293945, 0.04290764617919922, 0.042581439971923825, 0.042981952667236326, 0.04310796737670899, 0.04312918472290039, 0.04294863891601562, 0.0429035530090332, 0.04291171264648438, 0.04413407897949219, 0.042893600463867185, 0.04306694412231445, 0.0430546875, 0.0429925422668457, 0.04302755355834961, 0.0430294075012207, 0.04296831893920899, 0.04304111862182617, 0.043008415222167966, 0.04295244979858399, 0.04347724914550781, 0.04287692642211914, 0.04302995300292969, 0.04290617752075195, 0.043050334930419924, 0.04303529739379883, 0.043278335571289066, 0.04317184066772461, 0.04355632019042969, 0.043133472442626955, 0.04307689666748047, 0.04288355255126953, 0.04287916946411133, 0.04247689437866211, 0.042562206268310546, 0.042314785003662106, 0.042664447784423826, 0.042543582916259766, 0.042510337829589843, 0.04256972885131836, 0.042638782501220704, 0.04298124694824219, 0.04281155014038086, 0.04262351989746094, 0.04256972885131836, 0.04281958389282227, 0.042651649475097655, 0.042655071258544924, 0.04274448013305664, 0.04295884704589844, 0.04271718215942383, 0.04282777786254883, 0.04282572937011719, 0.0429936637878418, 0.04270006561279297, 0.042939231872558596, 0.04293209457397461, 0.04271500778198242, 0.042673694610595704, 0.042682975769042966, 0.04257177734375, 0.04295254516601563, 0.04284636688232422, 0.04426444625854492, 0.045511711120605466, 0.042864639282226565, 0.04264956665039062, 0.04294041442871094, 0.04292806243896485, 0.04314300918579102, 0.0429136962890625, 0.04278844833374024, 0.04291167831420899, 0.04289177703857422, 0.0430489616394043, 0.04306304168701172, 0.043016735076904296, 0.0429486083984375, 0.043046913146972655, 0.04293807983398437, 0.04285673522949219, 0.043068862915039065, 0.0429918098449707, 0.04312031936645508, 0.04319302368164062, 0.043218944549560545, 0.04326979064941406, 0.04307523345947266, 0.042816158294677734, 0.04283321762084961, 0.043309791564941406, 0.043292671203613284, 0.043320831298828126, 0.043170143127441406]",tokens/s,23.33477170838876,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4033.65888,4592.631808,0.0,4190.109696,3918.037504,s,1,10.68051171875,10.68051171875,0.0,10.68051171875,10.68051171875,10.68051171875,10.68051171875,[10.68051171875],,kWh,9.766174387082174e-05,1.0764771314119688e-05,2.902446766399336e-05,0.0001374509828489348,,MB,1760.309248,4678.61504,0.0,4261.412864,4088.623616,s,10,1.8373931121826172,0.18373931121826173,0.0003427441389007619,0.18365309143066405,0.1842168212890625,0.18422572021484374,0.18423283935546875,"[0.18359295654296875, 0.18353219604492188, 0.18401837158203124, 0.1830732421875, 0.18392988586425782, 0.1837005157470703, 0.1834908142089844, 0.184234619140625, 0.18421484375, 0.18360566711425783]",tokens/s,1393.2783262472376,kWh,5.378707559696618e-06,5.929345234782097e-07,3.5481190001090706e-06,9.519761083283898e-06,tokens/kWh,26891431.17777608,MB,1772.158976,4804.44416,0.0,4387.241984,4262.434304,s,10,16.655712646484375,1.6655712646484375,0.002937438722640952,1.665074462890625,1.667884619140625,1.6703179809570312,1.6722646704101563,"[1.6673438720703124, 1.6635850830078125, 1.661226318359375, 1.664418701171875, 1.6727513427734375, 1.6659691162109376, 1.665730224609375, 1.663897705078125, 1.663913330078125, 1.666876953125]",tokens/s,37.82486005682729,kWh,4.829679545780343e-05,5.327144355426299e-06,3.2053437258890594e-05,8.567737707212033e-05,tokens/kWh,735316.6279468234,,s,630,16.649963211059575,0.02642851303342789,0.00028079069042091835,0.026404399871826172,0.026632957458496094,0.026726558780670165,0.027493589057922384,"[0.02642099189758301, 0.026427648544311524, 0.02778940773010254, 0.02677356719970703, 0.026117599487304688, 0.026194656372070312, 0.026247167587280275, 0.026224416732788088, 0.026218719482421875, 0.026219583511352538, 0.026135456085205077, 0.026142751693725586, 0.0261529598236084, 0.026253055572509766, 0.026147071838378905, 0.026169343948364256, 0.026396671295166017, 0.02622870445251465, 0.02630784034729004, 0.026275903701782226, 0.026360544204711914, 0.026369823455810546, 0.02635158348083496, 0.026343679428100585, 0.026429439544677736, 0.026703872680664063, 0.026273632049560548, 0.02643779182434082, 0.026482688903808595, 0.02640246391296387, 0.026360128402709963, 0.026310400009155275, 0.026558752059936522, 0.02634956741333008, 0.026289983749389647, 0.026425535202026368, 0.026595327377319337, 0.026606975555419923, 0.026528383255004884, 0.026320287704467774, 0.02662166404724121, 0.026440576553344728, 0.02645583915710449, 0.02641481590270996, 0.02652191925048828, 0.02639072036743164, 0.026537984848022462, 0.026501056671142578, 0.02645315170288086, 0.026432416915893556, 0.026365375518798827, 0.026462783813476564, 0.026482112884521486, 0.02653001594543457, 0.026447359085083007, 0.02646307182312012, 0.026435583114624024, 0.02651136016845703, 0.026494688034057617, 0.026607391357421874, 0.028824064254760744, 0.02669152069091797, 0.026503231048583983, 0.026404415130615234, 0.026458879470825196, 0.026187231063842773, 0.026139263153076173, 0.02617296028137207, 0.026345247268676757, 0.026245824813842772, 0.026245119094848633, 0.026415103912353514, 0.026441728591918946, 0.026238975524902345, 0.0262873592376709, 0.026288799285888672, 0.026349664688110352, 0.02631884765625, 0.026376192092895507, 0.026285696029663085, 0.02646668815612793, 0.026402816772460938, 0.02643667221069336, 0.026282400131225587, 0.026241567611694334, 0.02629987144470215, 0.02620879936218262, 0.026402208328247072, 0.02647305679321289, 0.026414495468139648, 0.026371936798095703, 0.026305280685424804, 0.026296192169189454, 0.02644495964050293, 0.02633964729309082, 0.02636835289001465, 0.026247488021850587, 0.02627324867248535, 0.026290719985961913, 0.02648579216003418, 0.02639561653137207, 0.026425344467163086, 0.026359487533569335, 0.026495296478271483, 0.026613759994506835, 0.02650931167602539, 0.02648624038696289, 0.026484384536743164, 0.026500991821289063, 0.026565631866455077, 0.02634752082824707, 0.02632908821105957, 0.026421247482299806, 0.026443775177001954, 0.026521600723266602, 0.02657689666748047, 0.026468128204345704, 0.02655014419555664, 0.02663235282897949, 0.02650115203857422, 0.026503231048583983, 0.0266200008392334, 0.026610944747924806, 0.0265611515045166, 0.026560064315795898, 0.02650783920288086, 0.02635811233520508, 0.026245599746704103, 0.026158208847045897, 0.026088319778442382, 0.02595840072631836, 0.026013696670532226, 0.025954208374023437, 0.02599932861328125, 0.02597644805908203, 0.02607974433898926, 0.026244319915771485, 0.0260382080078125, 0.025954944610595703, 0.02597216033935547, 0.02615171241760254, 0.02608742332458496, 0.026090656280517578, 0.026190271377563478, 0.026144704818725585, 0.026111936569213866, 0.026014240264892578, 0.026066719055175783, 0.02612246322631836, 0.026310400009155275, 0.026196224212646484, 0.026234880447387695, 0.02627174377441406, 0.026388320922851562, 0.026433183670043946, 0.026345983505249023, 0.026425344467163086, 0.02636739158630371, 0.02623753547668457, 0.02635366439819336, 0.02642076873779297, 0.026588800430297852, 0.02654412841796875, 0.026573503494262695, 0.02647238349914551, 0.02638051223754883, 0.027076608657836915, 0.028508159637451173, 0.02640870475769043, 0.026351871490478514, 0.026397920608520507, 0.026323295593261718, 0.026322912216186524, 0.026442207336425782, 0.02654345512390137, 0.02660419273376465, 0.026438943862915038, 0.02640121650695801, 0.026501407623291017, 0.026472448348999023, 0.026484256744384767, 0.026560096740722655, 0.026469247817993164, 0.02671820831298828, 0.02650931167602539, 0.026570751190185548, 0.026529184341430666, 0.02656265640258789, 0.0264135684967041, 0.02648678398132324, 0.026406719207763673, 0.026222240447998046, 0.02622313690185547, 0.026174848556518554, 0.02624985694885254, 0.026326496124267577, 0.02637059211730957, 0.02623868751525879, 0.026453344345092774, 0.026180160522460937, 0.02617545509338379, 0.026317216873168944, 0.026261503219604493, 0.026291423797607422, 0.02634012794494629, 0.026217567443847657, 0.026174367904663084, 0.026120031356811523, 0.026265567779541015, 0.02640438461303711, 0.0263474235534668, 0.026346111297607423, 0.026406335830688476, 0.02630067253112793, 0.026245567321777345, 0.026605567932128905, 0.02627993583679199, 0.026380287170410157, 0.026359807968139647, 0.026241024017333983, 0.026294271469116212, 0.026273088455200197, 0.02629088020324707, 0.026262943267822265, 0.02661027145385742, 0.02647020721435547, 0.026548416137695312, 0.026539167404174804, 0.026527584075927733, 0.026567455291748046, 0.026597600936889648, 0.02689200019836426, 0.026726688385009766, 0.026505216598510743, 0.026606592178344726, 0.026477439880371094, 0.0265032958984375, 0.026458112716674805, 0.026537919998168947, 0.026527488708496094, 0.026492799758911133, 0.026440000534057616, 0.026363391876220704, 0.026351808547973633, 0.026452415466308592, 0.026611263275146485, 0.026571199417114257, 0.026615808486938477, 0.026658336639404298, 0.02665705680847168, 0.026598783493041993, 0.026577728271484375, 0.026736640930175783, 0.026779584884643555, 0.026286144256591797, 0.026406591415405273, 0.026261823654174805, 0.026279199600219728, 0.02622127914428711, 0.026249120712280274, 0.02618707275390625, 0.02607798385620117, 0.026336896896362306, 0.026351999282836915, 0.026382335662841795, 0.026815935134887694, 0.026290624618530274, 0.026281152725219727, 0.026205120086669923, 0.026134464263916017, 0.026155040740966796, 0.026211679458618162, 0.026267871856689454, 0.02636025619506836, 0.026368032455444335, 0.026439680099487304, 0.026394559860229493, 0.026525760650634767, 0.026763263702392577, 0.026482688903808595, 0.02627174377441406, 0.02634547233581543, 0.026375839233398438, 0.026379743576049806, 0.026275808334350587, 0.02840812873840332, 0.026446271896362304, 0.02655436706542969, 0.026556575775146484, 0.026875904083251953, 0.02943519973754883, 0.027660768508911134, 0.026439104080200195, 0.02662995147705078, 0.026503263473510744, 0.0265798397064209, 0.02653593635559082, 0.026424640655517577, 0.026610368728637694, 0.026755071640014647, 0.026611328125, 0.026521984100341796, 0.026666336059570313, 0.02655299186706543, 0.02651670455932617, 0.02652035140991211, 0.02636595153808594, 0.026431488037109374, 0.026373600006103514, 0.026468639373779298, 0.026603008270263673, 0.02662419128417969, 0.02650374412536621, 0.026793664932250976, 0.026495071411132814, 0.026568063735961912, 0.026611455917358397, 0.026370367050170897, 0.026185504913330077, 0.026157920837402343, 0.02614271926879883, 0.02609561538696289, 0.0261693115234375, 0.026148895263671874, 0.026275487899780275, 0.026382463455200195, 0.026406496047973634, 0.02638502311706543, 0.02634137535095215, 0.02638640022277832, 0.026593311309814453, 0.026739871978759766, 0.02643235206604004, 0.026423295974731444, 0.02629955291748047, 0.026288991928100587, 0.02625334358215332, 0.026334720611572264, 0.02624332809448242, 0.02638368034362793, 0.026399648666381836, 0.02636595153808594, 0.026346559524536132, 0.026385343551635743, 0.02635759925842285, 0.0263472957611084, 0.026422719955444336, 0.026350528717041015, 0.02627529525756836, 0.026216352462768554, 0.02622480010986328, 0.026398464202880858, 0.02641494369506836, 0.02634841537475586, 0.02643721580505371, 0.02639094352722168, 0.026646528244018555, 0.02670182418823242, 0.026394624710083008, 0.026400480270385742, 0.026449823379516603, 0.026648767471313478, 0.026638399124145506, 0.026521215438842772, 0.02654617691040039, 0.02651747131347656, 0.02650771141052246, 0.02662819290161133, 0.02674630355834961, 0.026567232131958007, 0.026529151916503905, 0.026597919464111327, 0.026555679321289063, 0.02670470428466797, 0.02666700744628906, 0.026657791137695314, 0.026498048782348634, 0.02654412841796875, 0.02652169609069824, 0.026302944183349608, 0.02603228759765625, 0.02614886474609375, 0.02613657569885254, 0.02612633514404297, 0.026025983810424806, 0.026019584655761718, 0.026185888290405274, 0.02628166389465332, 0.026136480331420898, 0.02624287986755371, 0.026243776321411134, 0.026421247482299806, 0.026398719787597655, 0.02641641616821289, 0.026288639068603514, 0.02634137535095215, 0.02628611183166504, 0.02641756820678711, 0.02808297538757324, 0.026218528747558593, 0.026125280380249024, 0.026359455108642578, 0.026357215881347658, 0.026376800537109377, 0.026260799407958984, 0.02624947166442871, 0.026327423095703125, 0.026328575134277343, 0.026362560272216798, 0.026328447341918946, 0.026272544860839842, 0.026330591201782227, 0.02635775947570801, 0.026564895629882814, 0.026450176239013672, 0.02647772789001465, 0.026440160751342773, 0.026495359420776368, 0.026566656112670898, 0.02640239906311035, 0.026302175521850588, 0.026378944396972658, 0.026371328353881836, 0.026413824081420897, 0.026715167999267576, 0.026667999267578124, 0.026648576736450196, 0.02658070373535156, 0.026408544540405275, 0.026466304779052735, 0.026581695556640625, 0.02672640037536621, 0.026466304779052735, 0.02653183937072754, 0.026626047134399415, 0.027074560165405274, 0.027084287643432618, 0.026582975387573243, 0.026767391204833985, 0.026571296691894532, 0.026656768798828126, 0.026639616012573242, 0.02657766342163086, 0.02646563148498535, 0.0262191047668457, 0.026138080596923827, 0.026295936584472657, 0.026077632904052735, 0.026100255966186522, 0.02603183937072754, 0.026120128631591796, 0.02629462432861328, 0.026253087997436524, 0.026378143310546876, 0.02632035255432129, 0.0263985595703125, 0.026473312377929686, 0.026308767318725584, 0.026226688385009765, 0.026162879943847656, 0.026290496826171874, 0.02611404800415039, 0.02608742332458496, 0.026103296279907227, 0.026472959518432617, 0.026525440216064452, 0.026743040084838868, 0.026226943969726562, 0.02613542366027832, 0.026105728149414063, 0.026331743240356444, 0.026273632049560548, 0.026364095687866212, 0.026393983840942385, 0.026489856719970704, 0.02628540802001953, 0.026393247604370118, 0.026435136795043945, 0.02663225555419922, 0.026564992904663087, 0.026527711868286133, 0.026474111557006835, 0.02634998321533203, 0.026334400177001952, 0.02673731231689453, 0.026482847213745116, 0.026420608520507812, 0.026427616119384767, 0.026521343231201172, 0.026619968414306642, 0.02653785514831543, 0.026376895904541016, 0.026561664581298827, 0.026629024505615235, 0.026687488555908204, 0.0267325439453125, 0.02669158363342285, 0.026649728775024414, 0.0264979190826416, 0.026468223571777343, 0.026627296447753905, 0.02664678382873535, 0.026535839080810548, 0.026534656524658203, 0.026744352340698243, 0.026707935333251952, 0.026403327941894532, 0.026300416946411134, 0.026346624374389647, 0.02618867111206055, 0.02610585594177246, 0.026164512634277343, 0.02642812728881836, 0.026472448348999023, 0.026371871948242188, 0.026187999725341797, 0.026234880447387695, 0.026287328720092772, 0.026270496368408204, 0.026369535446166992, 0.026382848739624022, 0.02636150360107422, 0.0263089599609375, 0.026359807968139647, 0.02627993583679199, 0.02656051254272461, 0.026461984634399412, 0.026283712387084962, 0.026238719940185548, 0.026399648666381836, 0.02634441566467285, 0.026139200210571287, 0.026138015747070312, 0.02626041603088379, 0.02612944030761719, 0.026245471954345703, 0.02594207954406738, 0.026090047836303712, 0.02610963249206543, 0.02657004737854004, 0.02661244773864746, 0.026497312545776367, 0.026596479415893555, 0.02648320007324219, 0.026408479690551757, 0.02641391944885254, 0.02647859191894531, 0.02652774429321289, 0.02650111961364746, 0.026402816772460938, 0.026402816772460938, 0.02660710334777832, 0.026556928634643553, 0.02647039985656738, 0.026414688110351563, 0.026562400817871094, 0.026577215194702148, 0.026435840606689454, 0.026394176483154296, 0.026345439910888672, 0.026400800704956054, 0.02676710319519043, 0.02668601608276367, 0.026771583557128907, 0.026681343078613282, 0.026660863876342773, 0.026664543151855468, 0.026912736892700195, 0.026548255920410158, 0.02649087905883789, 0.02652342414855957, 0.02617366409301758, 0.026142047882080077, 0.02615158462524414, 0.02629417610168457, 0.02618377685546875, 0.026300416946411134, 0.026639904022216797, 0.026405344009399413, 0.026224224090576172, 0.026232608795166014, 0.026702175140380858, 0.026295711517333984, 0.026380287170410157, 0.026329727172851564, 0.026402847290039062, 0.02635593605041504, 0.026544031143188478, 0.02651875114440918, 0.026297216415405274, 0.026317983627319335, 0.026305376052856447, 0.026257408142089843, 0.026372095108032227, 0.026877952575683595, 0.026595327377319337, 0.026253311157226563, 0.026619199752807618, 0.026274240493774415, 0.026621952056884765, 0.026283647537231444, 0.026514047622680663, 0.026599103927612305, 0.026499391555786133, 0.026732095718383787, 0.026519775390625, 0.02652796745300293, 0.02652569580078125, 0.02656198310852051, 0.026422975540161132, 0.02628432083129883, 0.02623958396911621, 0.02629734420776367, 0.026389375686645507, 0.026477888107299806, 0.02645884895324707, 0.026408063888549806, 0.026465248107910157, 0.02647039985656738, 0.026443775177001954, 0.026468351364135743, 0.026359807968139647, 0.026537919998168947, 0.026656831741333008, 0.026513023376464842, 0.026450111389160157, 0.026514751434326172, 0.026604223251342773, 0.02654636764526367, 0.026562559127807618]",tokens/s,37.83792144246474,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1265.750016,1247.674368,0.0,845.152256,799.789056,s,1,8.7531455078125,8.7531455078125,0.0,8.7531455078125,8.7531455078125,8.7531455078125,8.7531455078125,[8.7531455078125],,kWh,3.1421495145824945e-05,3.458656490768654e-06,9.605285461997126e-06,4.4485437098590726e-05,,MB,1497.534464,1298.006016,0.0,880.80384,837.353472,s,10,0.41358710479736327,0.04135871047973633,7.785083290956118e-05,0.04136769485473633,0.041467424774169925,0.04148542518615723,0.04149982551574707,"[0.04150342559814453, 0.0412529296875, 0.04138431930541992, 0.04125433731079101, 0.04136793518066406, 0.041298656463623046, 0.04131622314453125, 0.04137839889526367, 0.04136745452880859, 0.04146342468261719]",tokens/s,6189.748109420071,kWh,1.2410522599752587e-06,1.3686519094566512e-07,8.203137447627021e-07,2.198231195683626e-06,tokens/kWh,116457268.23578572,MB,1534.881792,1298.006016,0.0,880.80384,837.356032,s,10,14.71154150390625,1.4711541503906251,0.007012889482694289,1.4716920776367188,1.47979853515625,1.4818697875976563,1.4835267895507813,"[1.4754423828125, 1.472107666015625, 1.471848876953125, 1.4839410400390625, 1.461985107421875, 1.4707540283203124, 1.4793382568359374, 1.461998291015625, 1.462590576171875, 1.4715352783203124]",tokens/s,42.82352055579768,kWh,4.245536593669341e-05,4.682430028855832e-06,1.732761744023754e-05,6.446541340578678e-05,tokens/kWh,977268.2229374297,,s,630,14.705365638732905,0.02334185022021097,0.0004286187202624672,0.02330601692199707,0.02358928699493408,0.023737878036499023,0.02462349742889405,"[0.02287718391418457, 0.023202816009521485, 0.023357440948486328, 0.02329804801940918, 0.023432735443115235, 0.023331071853637694, 0.023462112426757813, 0.02333888053894043, 0.023707008361816405, 0.023642879486083984, 0.02349260711669922, 0.023251232147216798, 0.023271135330200195, 0.023353343963623048, 0.02327552032470703, 0.023350847244262694, 0.0234051513671875, 0.023400287628173828, 0.02349247932434082, 0.023361663818359375, 0.0234703369140625, 0.023367488861083984, 0.02327868843078613, 0.02436089515686035, 0.023439519882202147, 0.023450368881225585, 0.02326265525817871, 0.023330368041992188, 0.024656896591186524, 0.023318527221679687, 0.023282880783081054, 0.02406483268737793, 0.023648191452026367, 0.023232576370239257, 0.023279232025146486, 0.023331199645996094, 0.023310335159301757, 0.02330415916442871, 0.023264575958251953, 0.02323923110961914, 0.02398031997680664, 0.023320671081542968, 0.023169919967651366, 0.02319657516479492, 0.023185407638549805, 0.023052288055419923, 0.023218175888061524, 0.023162879943847657, 0.023117824554443358, 0.02333286476135254, 0.02309903907775879, 0.023206207275390626, 0.02365657615661621, 0.026933151245117186, 0.023372032165527343, 0.023180992126464843, 0.02317535972595215, 0.023134048461914063, 0.023124000549316407, 0.02313216018676758, 0.02306800079345703, 0.022882976531982423, 0.02298409652709961, 0.023109439849853516, 0.02310927963256836, 0.023069215774536134, 0.02296793556213379, 0.02321027183532715, 0.022868064880371092, 0.022920448303222655, 0.02311199951171875, 0.023195903778076173, 0.02340678405761719, 0.023367551803588866, 0.02339446449279785, 0.02313827133178711, 0.02330419158935547, 0.023320320129394532, 0.023224576950073243, 0.023240703582763672, 0.023233823776245117, 0.023173503875732422, 0.023037439346313478, 0.022965087890625, 0.023113759994506836, 0.02315158462524414, 0.023216224670410155, 0.023192480087280275, 0.023307584762573243, 0.023290559768676757, 0.02320355224609375, 0.023058719635009765, 0.023636224746704102, 0.023256832122802735, 0.02349056053161621, 0.023625728607177734, 0.02671011161804199, 0.023836576461791992, 0.02366054344177246, 0.023375871658325196, 0.02341993522644043, 0.023407583236694337, 0.02328780746459961, 0.023379968643188476, 0.02339795112609863, 0.02326572799682617, 0.02339967918395996, 0.02331328010559082, 0.02355187225341797, 0.02321718406677246, 0.023357696533203125, 0.023315456390380858, 0.023259872436523436, 0.023450719833374024, 0.023407615661621094, 0.02342380714416504, 0.02356755256652832, 0.02333577537536621, 0.023396415710449217, 0.023286912918090822, 0.023404672622680665, 0.02363209533691406, 0.02368764877319336, 0.0233341121673584, 0.023378496170043946, 0.023124095916748046, 0.023454559326171874, 0.023244863510131837, 0.023224416732788085, 0.02328771209716797, 0.023313568115234374, 0.023313247680664062, 0.02331001663208008, 0.023487808227539063, 0.0232806396484375, 0.02327552032470703, 0.02320908737182617, 0.02330944061279297, 0.023236352920532225, 0.023402496337890624, 0.02328384017944336, 0.02340656089782715, 0.02327756881713867, 0.023216032028198243, 0.023409664154052736, 0.023184480667114257, 0.02314841651916504, 0.023352800369262697, 0.02334396743774414, 0.02353036880493164, 0.02316700744628906, 0.023360319137573242, 0.02332271957397461, 0.023295743942260742, 0.02322265625, 0.02326848030090332, 0.02346460723876953, 0.023513088226318358, 0.02343280029296875, 0.023379648208618164, 0.023552703857421874, 0.023311744689941405, 0.023777408599853514, 0.02333545684814453, 0.023164928436279295, 0.02332467269897461, 0.0231014404296875, 0.023244800567626952, 0.023144287109375, 0.0233822078704834, 0.023320320129394532, 0.023401952743530272, 0.023272192001342774, 0.023284767150878908, 0.023253599166870118, 0.023152416229248046, 0.02358697509765625, 0.02454172706604004, 0.023375776290893553, 0.023330816268920897, 0.023162527084350584, 0.023310720443725588, 0.02342412757873535, 0.023421791076660155, 0.023461408615112304, 0.023265119552612304, 0.023353248596191405, 0.023564224243164063, 0.023434175491333007, 0.023545856475830077, 0.024078336715698243, 0.02368118476867676, 0.023325664520263672, 0.02323107147216797, 0.023527711868286134, 0.023306304931640626, 0.023343040466308595, 0.02341072082519531, 0.02354595184326172, 0.023643680572509766, 0.023230815887451173, 0.02326937675476074, 0.023357215881347655, 0.0234682559967041, 0.023463935852050782, 0.023443456649780273, 0.023361408233642578, 0.023436992645263673, 0.023419328689575195, 0.023417984008789063, 0.0233123836517334, 0.023259904861450194, 0.023373023986816406, 0.023263200759887696, 0.023139263153076173, 0.023422975540161133, 0.02326323127746582, 0.023495872497558593, 0.02321491241455078, 0.023197696685791015, 0.023076576232910158, 0.02303561592102051, 0.02304240036010742, 0.024704959869384764, 0.023265567779541016, 0.023363744735717774, 0.02337366485595703, 0.023361663818359375, 0.02328563117980957, 0.023297952651977538, 0.023214176177978517, 0.023382015228271484, 0.023427072525024413, 0.02365590476989746, 0.023457759857177733, 0.02339263916015625, 0.02343939208984375, 0.023244031906127928, 0.023522207260131836, 0.02337187194824219, 0.02354515266418457, 0.023701984405517577, 0.023555328369140625, 0.024028032302856446, 0.028065792083740236, 0.025729087829589842, 0.02358675193786621, 0.023653568267822264, 0.023522111892700197, 0.02351923179626465, 0.023502208709716796, 0.023527103424072264, 0.02364409637451172, 0.02348217582702637, 0.023650144577026366, 0.023695775985717774, 0.023617536544799804, 0.023998464584350586, 0.023465471267700197, 0.023291519165039062, 0.02333785629272461, 0.02342911911010742, 0.023318527221679687, 0.02311299133300781, 0.02327631950378418, 0.02312495994567871, 0.02327987289428711, 0.023313119888305665, 0.023267328262329103, 0.023179264068603517, 0.02331648063659668, 0.023346271514892578, 0.023288736343383788, 0.0234019832611084, 0.024428735733032225, 0.023693632125854493, 0.0229529914855957, 0.023411104202270508, 0.0232108154296875, 0.02297350311279297, 0.02295408058166504, 0.02315907287597656, 0.023093568801879884, 0.022956031799316406, 0.023003135681152344, 0.02310553550720215, 0.023037887573242186, 0.02294175910949707, 0.023096416473388674, 0.023008256912231444, 0.02292835235595703, 0.022918079376220705, 0.022820447921752928, 0.022870431900024413, 0.022804479598999023, 0.022971839904785157, 0.02291974449157715, 0.022908319473266603, 0.02297097587585449, 0.02292086410522461, 0.023159135818481447, 0.023188608169555664, 0.022895103454589845, 0.022915456771850588, 0.02270751953125, 0.022887104034423827, 0.023224319458007812, 0.023130111694335938, 0.023156288146972657, 0.023183807373046875, 0.02325075149536133, 0.023110015869140625, 0.02337081527709961, 0.02317353630065918, 0.023138656616210937, 0.024069664001464843, 0.023284191131591796, 0.02325859260559082, 0.023065120697021484, 0.023146495819091797, 0.023326719284057617, 0.02307302474975586, 0.02356425666809082, 0.023395360946655272, 0.02346860885620117, 0.023148799896240236, 0.02336479949951172, 0.023225088119506836, 0.02330134391784668, 0.02329884719848633, 0.02309939193725586, 0.023275423049926757, 0.023396448135375978, 0.02345779228210449, 0.023236543655395507, 0.0232194881439209, 0.023454463958740235, 0.02319977569580078, 0.02337798309326172, 0.02321129608154297, 0.023149215698242187, 0.023508991241455078, 0.023386112213134767, 0.023317600250244142, 0.0230100154876709, 0.02316652870178223, 0.0230100154876709, 0.02328156852722168, 0.02331827163696289, 0.023353599548339845, 0.023435264587402343, 0.023408639907836915, 0.02330847930908203, 0.023295808792114257, 0.023444799423217772, 0.02344416046142578, 0.02325196838378906, 0.02347315216064453, 0.023336063385009764, 0.02353036880493164, 0.02326300811767578, 0.02321164894104004, 0.02321059226989746, 0.02319875144958496, 0.023237663269042967, 0.023280832290649416, 0.02358963203430176, 0.023422975540161133, 0.023351295471191406, 0.023384063720703126, 0.02325503921508789, 0.023373760223388673, 0.02342006492614746, 0.023391136169433592, 0.02338934326171875, 0.02333161544799805, 0.023427135467529298, 0.024020992279052734, 0.02342585563659668, 0.023388160705566406, 0.023377344131469725, 0.023405120849609374, 0.02344864082336426, 0.023770048141479493, 0.023448736190795898, 0.023546335220336913, 0.023717920303344728, 0.023540063858032225, 0.023541759490966797, 0.02434236717224121, 0.02797558403015137, 0.023533824920654298, 0.023418048858642578, 0.023555072784423828, 0.023602943420410156, 0.023648319244384767, 0.023611391067504883, 0.023749887466430666, 0.023772031784057616, 0.02346963119506836, 0.023513376235961916, 0.02344758415222168, 0.023537664413452147, 0.023453887939453126, 0.023318336486816405, 0.02331443214416504, 0.02365779113769531, 0.023360191345214845, 0.02324406433105469, 0.023394975662231445, 0.023279104232788086, 0.023357759475708006, 0.02322831916809082, 0.02319385528564453, 0.023287904739379882, 0.02327756881713867, 0.023127328872680663, 0.023181440353393555, 0.023235296249389647, 0.02326848030090332, 0.02337664031982422, 0.023179264068603517, 0.02305135917663574, 0.02322230339050293, 0.02310028839111328, 0.023701343536376953, 0.023225503921508787, 0.02307379150390625, 0.02335436820983887, 0.02329020881652832, 0.023141023635864257, 0.023166976928710937, 0.0232857608795166, 0.023262432098388672, 0.023232608795166015, 0.023386335372924803, 0.02322096061706543, 0.023133951187133787, 0.023233760833740236, 0.023212831497192384, 0.02391200065612793, 0.02324665641784668, 0.023355583190917968, 0.024237119674682617, 0.023126815795898436, 0.02307695960998535, 0.023071008682250975, 0.02336131286621094, 0.0232704963684082, 0.022962495803833007, 0.022876224517822265, 0.022829599380493164, 0.022974464416503908, 0.022849184036254883, 0.022906944274902342, 0.023134496688842772, 0.022978271484375, 0.02299833679199219, 0.022827295303344725, 0.023076927185058594, 0.022921215057373046, 0.022801023483276367, 0.022859584808349608, 0.02296633529663086, 0.023062911987304688, 0.02298240089416504, 0.023240671157836915, 0.023172735214233398, 0.02314854431152344, 0.02314691162109375, 0.022909088134765623, 0.02296406364440918, 0.023068672180175782, 0.022943328857421875, 0.022913440704345703, 0.023017471313476562, 0.022919424057006837, 0.022869760513305665, 0.022956064224243164, 0.022888063430786133, 0.022835039138793947, 0.02280703926086426, 0.023000383377075197, 0.023419008255004883, 0.02412828826904297, 0.023764768600463868, 0.023138303756713868, 0.02342278480529785, 0.02357472038269043, 0.02335875129699707, 0.023486719131469727, 0.023810527801513673, 0.023483903884887695, 0.02349126434326172, 0.023361536026000978, 0.023469024658203125, 0.02337673568725586, 0.023381664276123048, 0.023461408615112304, 0.0236879997253418, 0.02353923225402832, 0.02352924728393555, 0.023604127883911134, 0.023344127655029297, 0.02347417640686035, 0.023371776580810546, 0.02326527976989746, 0.023162399291992188, 0.023337440490722658, 0.023369951248168944, 0.023082208633422852, 0.024178720474243163, 0.02422563171386719, 0.023165632247924804, 0.023027711868286133, 0.02295187187194824, 0.023079168319702147, 0.022970176696777343, 0.022781152725219727, 0.022984672546386718, 0.022776639938354493, 0.022994207382202148, 0.022999231338500976, 0.022839839935302735, 0.023102527618408204, 0.023036415100097657, 0.02296793556213379, 0.02291935920715332, 0.02302012825012207, 0.02299715232849121, 0.022843263626098634, 0.022798336029052735, 0.02314854431152344, 0.02317900848388672, 0.02304595184326172, 0.023297792434692384, 0.023069375991821288, 0.023023616790771483, 0.023087295532226562, 0.022968128204345704, 0.023219392776489257, 0.02310553550720215, 0.022956159591674803, 0.023186111450195314, 0.023439008712768553, 0.0233636474609375, 0.02332700729370117, 0.02337721633911133, 0.023295711517333985, 0.023272415161132813, 0.023222272872924804, 0.023723199844360353, 0.023321407318115234, 0.023135711669921875, 0.023306783676147462, 0.023394304275512694, 0.023214080810546874, 0.023371103286743165, 0.0233908805847168, 0.023443456649780273, 0.02336332893371582, 0.023311840057373048, 0.023301952362060546, 0.023268447875976563, 0.02350172805786133, 0.023263456344604493, 0.023339359283447266, 0.023194463729858398, 0.023183359146118163, 0.023330879211425782, 0.023272895812988283, 0.02331715202331543, 0.02321766471862793, 0.02403158378601074, 0.023178495407104493, 0.023271360397338868, 0.023372320175170897, 0.023283552169799805, 0.02371014404296875, 0.023343103408813477, 0.02350476837158203, 0.02353324890136719, 0.023376319885253908, 0.02337945556640625, 0.023319040298461914, 0.023305728912353517, 0.02339072036743164, 0.02336934471130371, 0.023515039443969727, 0.02324323272705078, 0.023436576843261718, 0.023359487533569336, 0.023882463455200197, 0.02337753677368164, 0.023271808624267577, 0.02341417694091797, 0.023375743865966796, 0.02339913558959961, 0.023186975479125977, 0.023180992126464843, 0.02331510353088379, 0.023410272598266602, 0.023493152618408203, 0.023422719955444336, 0.023386623382568358, 0.023239519119262694, 0.023269311904907226, 0.023246912002563475, 0.0232825927734375, 0.02332057571411133, 0.023339008331298827, 0.023375423431396484, 0.023312063217163087, 0.02330700874328613, 0.023334911346435547, 0.023288032531738282, 0.02369513511657715, 0.023197280883789063, 0.02322649574279785, 0.023262752532958984, 0.023288576126098633, 0.023287424087524412, 0.023164384841918944, 0.02325779151916504, 0.02358924865722656, 0.02336966323852539, 0.023195552825927734, 0.023142623901367187, 0.02316828727722168, 0.02319740867614746]",tokens/s,42.841505303385574,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,2227.376128,2551.119872,0.0,2155.872256,2032.413184,s,1,8.578361328125,8.578361328125,0.0,8.578361328125,8.578361328125,8.578361328125,8.578361328125,[8.578361328125],,kWh,4.871220372082234e-05,5.366284855721087e-06,1.557001245600098e-05,6.96485010325444e-05,,MB,2285.416448,2827.943936,0.0,2418.016256,2280.154112,s,10,0.9276774215698241,0.09276774215698241,0.00017955024401048356,0.09279806518554687,0.09292477111816407,0.09296712341308594,0.09300100524902344,"[0.09300947570800781, 0.09272541046142578, 0.0928094711303711, 0.09280976104736328, 0.09272265625, 0.09277798461914062, 0.09291535949707032, 0.09228800201416015, 0.09278665924072266, 0.0928326416015625]",tokens/s,2759.5799363834303,kWh,2.754002014937018e-06,3.0371625865989896e-07,1.822247264924521e-06,4.879965538521438e-06,tokens/kWh,52459386.850007236,MB,2300.915712,2911.830016,0.0,2501.902336,2389.055488,s,10,18.30338610839844,1.8303386108398434,0.009095563549333042,1.8309453735351562,1.8442125366210937,1.8443202575683595,1.844406434326172,"[1.831056396484375, 1.831028076171875, 1.82028466796875, 1.8239434814453126, 1.8308626708984375, 1.838055908203125, 1.844427978515625, 1.822085693359375, 1.81745263671875, 1.8441885986328126]",tokens/s,34.41986068965278,kWh,5.320215330131275e-05,5.867637290272799e-06,2.6927331290275196e-05,8.599712188186075e-05,tokens/kWh,732582.6565050255,,s,630,18.300550205230714,0.029048492389255103,0.0004858272422063414,0.028922767639160157,0.02944813804626465,0.02959853448867798,0.030944135189056407,"[0.02943180847167969, 0.029294719696044923, 0.02919411277770996, 0.029276031494140625, 0.029212703704833986, 0.029059167861938476, 0.028839935302734376, 0.02882691192626953, 0.028779232025146484, 0.02880905532836914, 0.029033632278442384, 0.029175840377807617, 0.029332447052001952, 0.029549888610839844, 0.029122304916381837, 0.028951040267944338, 0.028757568359375, 0.028894079208374022, 0.028909568786621095, 0.03006025505065918, 0.02959916877746582, 0.029191007614135744, 0.0290119686126709, 0.028915712356567383, 0.029038591384887694, 0.029081567764282227, 0.029542207717895508, 0.02938630485534668, 0.029187776565551757, 0.028896223068237303, 0.028833728790283203, 0.028825664520263673, 0.02881852722167969, 0.029664159774780274, 0.02893414306640625, 0.028911615371704103, 0.02873958396911621, 0.028927135467529296, 0.028894048690795898, 0.02952729606628418, 0.028877567291259766, 0.02876006317138672, 0.028732608795166016, 0.02890220832824707, 0.028848127365112306, 0.028872703552246092, 0.028833152770996094, 0.028920448303222657, 0.029054975509643553, 0.028829439163208008, 0.028819711685180664, 0.0288973445892334, 0.028861919403076173, 0.02900009536743164, 0.029076896667480468, 0.029157440185546876, 0.029561439514160157, 0.029016063690185546, 0.028785951614379884, 0.028951263427734374, 0.029267520904541017, 0.029432256698608397, 0.028886432647705077, 0.029036224365234373, 0.030164960861206055, 0.028903360366821288, 0.028950368881225586, 0.02885696029663086, 0.028719295501708986, 0.028641151428222655, 0.028889055252075194, 0.02889129638671875, 0.02881331253051758, 0.028809215545654295, 0.028907520294189453, 0.028837440490722656, 0.0287523193359375, 0.029069311141967775, 0.028837343215942383, 0.02945484733581543, 0.031023136138916017, 0.029276159286499022, 0.029009920120239258, 0.02879283142089844, 0.029199359893798828, 0.02997478485107422, 0.029546911239624024, 0.029472511291503908, 0.029121120452880858, 0.028874752044677734, 0.028737152099609375, 0.028921567916870117, 0.029080223083496094, 0.028868608474731446, 0.02910207939147949, 0.028899328231811523, 0.029523935317993164, 0.02883731269836426, 0.0288035831451416, 0.02881283187866211, 0.028895456314086913, 0.029047136306762696, 0.02931065559387207, 0.031068479537963867, 0.028968448638916015, 0.028807680130004884, 0.028825279235839843, 0.02878665542602539, 0.02894416046142578, 0.02879302406311035, 0.02885875129699707, 0.028817407608032225, 0.028862464904785157, 0.028915712356567383, 0.02955232048034668, 0.02905891227722168, 0.02889299201965332, 0.02879759979248047, 0.02895871925354004, 0.029109439849853515, 0.02884281539916992, 0.028940288543701172, 0.0289751033782959, 0.028804927825927733, 0.028771615982055663, 0.02875254440307617, 0.02896281623840332, 0.028829984664916992, 0.028855968475341796, 0.02885830307006836, 0.028848255157470703, 0.028841663360595703, 0.028846368789672852, 0.028886112213134765, 0.028799936294555663, 0.028869888305664063, 0.02905369567871094, 0.028991487503051756, 0.028866559982299804, 0.028839935302734376, 0.028729055404663088, 0.028987680435180664, 0.028716480255126953, 0.02877907180786133, 0.028594175338745118, 0.03075071907043457, 0.02903603172302246, 0.029235712051391603, 0.029016063690185546, 0.02875961685180664, 0.028838336944580076, 0.028931520462036134, 0.028800991058349608, 0.028840543746948243, 0.028703935623168947, 0.028672704696655272, 0.028695680618286132, 0.02870368003845215, 0.029064416885375977, 0.028760927200317383, 0.028868608474731446, 0.029007871627807616, 0.028868608474731446, 0.028733440399169922, 0.028794719696044923, 0.028857631683349608, 0.028856191635131836, 0.028688703536987305, 0.028768287658691407, 0.02869862365722656, 0.028672351837158203, 0.0287869758605957, 0.029167648315429687, 0.02930796813964844, 0.02910713577270508, 0.029079551696777343, 0.029050880432128907, 0.02892367935180664, 0.028672224044799806, 0.028819456100463867, 0.028765695571899414, 0.02884783935546875, 0.028725856781005858, 0.028778688430786133, 0.028727296829223634, 0.029040639877319335, 0.0287825927734375, 0.02870681571960449, 0.028940223693847657, 0.029313760757446287, 0.029163135528564452, 0.028950431823730468, 0.02893667221069336, 0.028984447479248047, 0.02883628845214844, 0.02894304084777832, 0.028673791885375978, 0.028875040054321288, 0.028698335647583006, 0.0289355525970459, 0.02905766487121582, 0.028846080780029298, 0.0294072322845459, 0.028639232635498047, 0.02874982452392578, 0.028773567199707032, 0.02876089668273926, 0.02877644729614258, 0.02874982452392578, 0.028831743240356447, 0.02880512046813965, 0.028854272842407228, 0.02919424057006836, 0.02954649543762207, 0.029287584304809572, 0.02902716827392578, 0.029222911834716796, 0.029340927124023437, 0.02883865547180176, 0.02870848083496094, 0.028748159408569337, 0.03018956756591797, 0.029814783096313476, 0.02876380729675293, 0.028768224716186522, 0.02866988754272461, 0.02881760025024414, 0.02887295913696289, 0.02876006317138672, 0.028720447540283203, 0.028992191314697265, 0.02874777603149414, 0.0288275203704834, 0.028780031204223632, 0.028924543380737303, 0.02879267120361328, 0.02886841583251953, 0.02874166488647461, 0.02866003227233887, 0.028643264770507812, 0.02887411117553711, 0.028751903533935547, 0.028829568862915038, 0.02882537651062012, 0.029029375076293946, 0.02882745552062988, 0.02883603286743164, 0.02883516883850098, 0.02903856086730957, 0.0289553279876709, 0.02900534439086914, 0.030042591094970702, 0.02873472023010254, 0.028759967803955077, 0.02880556869506836, 0.02932383918762207, 0.029491199493408202, 0.02877644729614258, 0.028849760055541993, 0.028862464904785157, 0.03125494384765625, 0.02920003128051758, 0.02909833526611328, 0.028989280700683594, 0.028737119674682617, 0.028869184494018554, 0.028882944107055664, 0.028853759765625, 0.028860736846923828, 0.028928192138671874, 0.028736671447753905, 0.028717920303344725, 0.02860201644897461, 0.028817760467529298, 0.028688383102416993, 0.02901126480102539, 0.02911097526550293, 0.029014015197753908, 0.028825439453125, 0.02891107177734375, 0.028796640396118164, 0.028784799575805663, 0.029544384002685546, 0.036940128326416015, 0.029493471145629883, 0.028904863357543945, 0.028780895233154295, 0.028830175399780274, 0.028880992889404298, 0.028686336517333984, 0.02892185592651367, 0.028860416412353516, 0.02879897689819336, 0.028738752365112304, 0.02867487907409668, 0.02875801658630371, 0.028951839447021486, 0.02926665687561035, 0.028874015808105467, 0.0289835205078125, 0.02876374435424805, 0.02889411163330078, 0.028792255401611327, 0.028848512649536133, 0.028782079696655274, 0.028748416900634767, 0.02877241516113281, 0.028864320755004884, 0.02865171241760254, 0.02873958396911621, 0.02876825523376465, 0.029011520385742187, 0.02883014488220215, 0.029108224868774416, 0.028819456100463867, 0.02881878471374512, 0.02864387130737305, 0.028591680526733398, 0.02871353530883789, 0.028797279357910155, 0.028694175720214845, 0.028898624420166014, 0.02875257682800293, 0.028929407119750977, 0.028835840225219726, 0.028811040878295897, 0.028881664276123046, 0.02894857597351074, 0.028895231246948243, 0.02922844886779785, 0.029243200302124024, 0.028963647842407226, 0.029077472686767577, 0.028993120193481447, 0.028977567672729493, 0.02887049674987793, 0.029382816314697267, 0.028753055572509765, 0.028930912017822264, 0.029027999877929686, 0.029206207275390625, 0.029145759582519533, 0.029572608947753907, 0.02947942352294922, 0.029347232818603516, 0.029475296020507812, 0.02932134437561035, 0.029250751495361327, 0.029616960525512694, 0.02953625679016113, 0.02938047981262207, 0.02939686393737793, 0.02956723213195801, 0.029287424087524414, 0.02934272003173828, 0.029730335235595703, 0.029501920700073243, 0.02932310485839844, 0.029247648239135744, 0.029237056732177736, 0.02942790412902832, 0.02936832046508789, 0.02973695945739746, 0.02937446403503418, 0.029667327880859375, 0.02939904022216797, 0.02949839973449707, 0.029408224105834962, 0.029203872680664062, 0.029153888702392577, 0.029136831283569337, 0.029134016036987304, 0.029005727767944335, 0.028773344039916993, 0.028947807312011718, 0.028916128158569337, 0.029169919967651368, 0.029746400833129884, 0.029960031509399413, 0.029597759246826172, 0.029247488021850586, 0.029406368255615236, 0.029373279571533205, 0.02925676727294922, 0.029143104553222655, 0.028918655395507812, 0.0293703670501709, 0.028927072525024414, 0.028935007095336914, 0.029185760498046876, 0.029462879180908202, 0.029574432373046876, 0.03015545654296875, 0.029418624877929688, 0.029102655410766603, 0.02917616081237793, 0.029077503204345705, 0.029276159286499022, 0.02920022392272949, 0.028981407165527343, 0.02894374465942383, 0.029210592269897463, 0.02919491195678711, 0.02896895980834961, 0.02889727973937988, 0.028891136169433593, 0.028887039184570314, 0.029453472137451173, 0.0311297607421875, 0.02952262306213379, 0.029450239181518553, 0.029207647323608397, 0.029254560470581056, 0.02985536003112793, 0.029393056869506835, 0.029397216796875, 0.02918400001525879, 0.029409120559692383, 0.029444095611572265, 0.029116575241088866, 0.028990848541259766, 0.029543039321899413, 0.02898739242553711, 0.029035968780517576, 0.029106592178344725, 0.029012128829956053, 0.028770303726196288, 0.028823360443115235, 0.02880940818786621, 0.028876800537109375, 0.028815359115600587, 0.02878873634338379, 0.031192735671997072, 0.030237024307250976, 0.029025312423706054, 0.02884048080444336, 0.029639104843139648, 0.029106176376342774, 0.029112319946289062, 0.029073408126831055, 0.02879078483581543, 0.02909721565246582, 0.029363616943359375, 0.029245759963989256, 0.02936832046508789, 0.028880895614624022, 0.029038591384887694, 0.028872703552246092, 0.02886841583251953, 0.02879302406311035, 0.028862464904785157, 0.028669952392578125, 0.02893619155883789, 0.028848127365112306, 0.028884992599487305, 0.029263231277465822, 0.029319807052612303, 0.029181215286254884, 0.029143775939941406, 0.028882047653198243, 0.028768224716186522, 0.028767135620117186, 0.028757823944091796, 0.028852415084838868, 0.028818687438964843, 0.02888755226135254, 0.02869264030456543, 0.028721248626708985, 0.02874163246154785, 0.02905606460571289, 0.02893305587768555, 0.029087135314941406, 0.02910063934326172, 0.029027360916137696, 0.02880406379699707, 0.02876416015625, 0.02879897689819336, 0.02904473686218262, 0.02897100830078125, 0.029220863342285155, 0.028853504180908204, 0.028697343826293947, 0.028698400497436525, 0.028702207565307617, 0.028766944885253908, 0.02874339294433594, 0.028920095443725587, 0.028851232528686523, 0.028895519256591798, 0.028656320571899416, 0.028822656631469726, 0.029061376571655275, 0.0291146240234375, 0.02886284828186035, 0.028704767227172853, 0.028655071258544922, 0.028712703704833985, 0.02876905632019043, 0.028753440856933595, 0.028824031829833983, 0.029343263626098633, 0.02919830322265625, 0.02906982421875, 0.028788608551025392, 0.02924435234069824, 0.029014015197753908, 0.02871903991699219, 0.028768320083618164, 0.028835840225219726, 0.028844032287597656, 0.028984447479248047, 0.02877529525756836, 0.028747392654418946, 0.0287010555267334, 0.028678144454956055, 0.028785791397094727, 0.02878963279724121, 0.029009920120239258, 0.02896691131591797, 0.028913280487060548, 0.0288155517578125, 0.028824960708618164, 0.028809471130371092, 0.02891993522644043, 0.0289366397857666, 0.028841663360595703, 0.028704608917236328, 0.02871548843383789, 0.028895008087158204, 0.02865558433532715, 0.02913920021057129, 0.029070335388183592, 0.028877824783325196, 0.028620800018310546, 0.028626943588256838, 0.02860598373413086, 0.028694271087646484, 0.02865385627746582, 0.028935840606689453, 0.028863264083862306, 0.029314367294311524, 0.029194719314575197, 0.02916988754272461, 0.029120512008666992, 0.0294021110534668, 0.029612672805786132, 0.028909759521484377, 0.02880441665649414, 0.028699520111083985, 0.02862678337097168, 0.02866320037841797, 0.028770624160766603, 0.028602815628051757, 0.028807167053222657, 0.02855936050415039, 0.028667903900146483, 0.028674047470092775, 0.028733440399169922, 0.029122560501098634, 0.02879475212097168, 0.028607999801635742, 0.029067903518676757, 0.02854707145690918, 0.02866899108886719, 0.028566463470458985, 0.02870841598510742, 0.028795263290405274, 0.02898905563354492, 0.02886899185180664, 0.028837888717651368, 0.0290119686126709, 0.02893619155883789, 0.028786687850952147, 0.02870889663696289, 0.02864944076538086, 0.02874982452392578, 0.028868608474731446, 0.028704767227172853, 0.029014015197753908, 0.02894438362121582, 0.029063167572021483, 0.02954582405090332, 0.029067935943603514, 0.029128095626831055, 0.02946928024291992, 0.029190143585205077, 0.029327360153198243, 0.029224672317504884, 0.031176992416381836, 0.03018342399597168, 0.029560831069946288, 0.029593599319458007, 0.029409183502197265, 0.029389184951782228, 0.029260704040527344, 0.02939974403381348, 0.029171424865722655, 0.029401504516601562, 0.029439456939697267, 0.02931065559387207, 0.029184864044189452, 0.029104127883911132, 0.029243200302124024, 0.029137088775634767, 0.029261823654174804, 0.029328800201416014, 0.029439712524414064, 0.02920537567138672, 0.02915123176574707, 0.029046783447265623, 0.03036774444580078, 0.03038412857055664, 0.02966102409362793, 0.0293090877532959, 0.029360128402709962, 0.029265920639038087, 0.029871103286743163, 0.02896143913269043, 0.02935433578491211, 0.029003135681152342, 0.029141471862792968, 0.029026464462280275, 0.029079551696777343, 0.02917136001586914, 0.029065568923950194, 0.0291429443359375, 0.029447904586791994, 0.02928883171081543, 0.029286399841308593, 0.029212671279907225]",tokens/s,34.425194485132565,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1170.219008,1101.98784,0.0,706.740224,681.6384,s,1,7.8936787109375,7.8936787109375,0.0,7.8936787109375,7.8936787109375,7.8936787109375,7.8936787109375,[7.8936787109375],,kWh,3.011512195416799e-05,3.3143799531649245e-06,8.991673859995442e-06,4.242117576732835e-05,,MB,1514.057728,1406.07488,0.0,996.1472,949.238272,s,10,0.3091976623535156,0.03091976623535156,0.0008538072460815612,0.03063159942626953,0.031216042327880857,0.032319124603271486,0.03320159042358398,"[0.03048031997680664, 0.030715423583984373, 0.030547775268554688, 0.03097091293334961, 0.030498783111572267, 0.03342220687866211, 0.0304355525970459, 0.03088038444519043, 0.030446239471435547, 0.03080006408691406]",tokens/s,8279.493384633257,kWh,8.967287174462902e-07,9.889336138598275e-08,5.504401131533824e-07,1.5460621919856557e-06,tokens/kWh,165581954.80558982,MB,1547.411456,1414.463488,0.0,1004.535808,949.240832,s,10,15.011969116210938,1.501196911621094,0.006065398363650671,1.4994981079101564,1.5073589721679688,1.5110018737792967,1.5139161950683593,"[1.514644775390625, 1.494724853515625, 1.4974677734375, 1.5023614501953124, 1.5009422607421874, 1.4954993896484374, 1.498053955078125, 1.5062763671875, 1.4954488525390626, 1.5065494384765625]",tokens/s,41.96651319510666,kWh,4.3876149645470506e-05,4.839156124163211e-06,1.7077490655846606e-05,6.579279642548033e-05,tokens/kWh,957551.6382155367,,s,630,15.009932506561269,0.023825289692954416,0.0004177225653914419,0.02373446369171143,0.024083247756958005,0.024272825336456298,0.02621741447448731,"[0.023875583648681642, 0.023785472869873047, 0.023803903579711915, 0.02387353515625, 0.02369491195678711, 0.02679827117919922, 0.02461676788330078, 0.024101312637329102, 0.02366464042663574, 0.02370150375366211, 0.023621248245239257, 0.023894399642944337, 0.023719648361206054, 0.023734560012817384, 0.02372403144836426, 0.02370355224609375, 0.02371788787841797, 0.023792703628540038, 0.023747520446777345, 0.02388921546936035, 0.02401081657409668, 0.023719839096069336, 0.023870176315307617, 0.024176639556884767, 0.02389958381652832, 0.02389039993286133, 0.023762144088745118, 0.024040319442749022, 0.024091808319091797, 0.024021856307983397, 0.024206911087036133, 0.02432454490661621, 0.025222208023071287, 0.026256288528442383, 0.02446886444091797, 0.02406630325317383, 0.024072608947753905, 0.023932928085327147, 0.023913951873779298, 0.023689056396484377, 0.02409059143066406, 0.02378211212158203, 0.02365644836425781, 0.024152063369750978, 0.02379743957519531, 0.02373868751525879, 0.023871488571166992, 0.023799808502197265, 0.023963647842407225, 0.023812095642089845, 0.02371379280090332, 0.024164352416992187, 0.023771135330200196, 0.023837984085083006, 0.02476652717590332, 0.023673503875732423, 0.02371583938598633, 0.026605728149414063, 0.02386934471130371, 0.02370899200439453, 0.023580831527709963, 0.02368076705932617, 0.02359574317932129, 0.02410963249206543, 0.023945215225219727, 0.023807296752929686, 0.02367305564880371, 0.02364601516723633, 0.02360710334777832, 0.023509279251098632, 0.02356617546081543, 0.023507680892944336, 0.02363529586791992, 0.02369536018371582, 0.023642688751220702, 0.02373651123046875, 0.02371148872375488, 0.023713823318481445, 0.023730207443237304, 0.023830623626708985, 0.023889951705932617, 0.02374448013305664, 0.023680639266967774, 0.023701887130737304, 0.02392416000366211, 0.02358233642578125, 0.023670751571655272, 0.023843679428100586, 0.02373641586303711, 0.02378767967224121, 0.023639936447143555, 0.02371788787841797, 0.023665727615356444, 0.023688127517700195, 0.023787519454956055, 0.02371788787841797, 0.023735616683959963, 0.023784128189086914, 0.02375481605529785, 0.023736255645751953, 0.023595008850097656, 0.023574527740478517, 0.023472127914428712, 0.023788768768310545, 0.023886623382568358, 0.023754911422729494, 0.024133472442626952, 0.023977983474731446, 0.023860639572143554, 0.023724639892578125, 0.02370956802368164, 0.02371187210083008, 0.023508352279663088, 0.02358131217956543, 0.023476224899291992, 0.023734272003173826, 0.02356608009338379, 0.023536127090454103, 0.023495807647705078, 0.02368076705932617, 0.023894912719726564, 0.02384435272216797, 0.02379417610168457, 0.023830528259277343, 0.02377235221862793, 0.02376688003540039, 0.02365635108947754, 0.023665279388427735, 0.0235807991027832, 0.02361510467529297, 0.02383910369873047, 0.02425593566894531, 0.02398476791381836, 0.023856895446777344, 0.023685312271118163, 0.02368716812133789, 0.023613439559936524, 0.02369692802429199, 0.02364028739929199, 0.023552255630493166, 0.023619583129882812, 0.023629823684692384, 0.023611391067504883, 0.02356172752380371, 0.023648191452026367, 0.023531904220581056, 0.023652128219604492, 0.024197536468505858, 0.02422921562194824, 0.024018911361694335, 0.023732927322387694, 0.023799327850341796, 0.023560575485229492, 0.02374460792541504, 0.024065088272094727, 0.024243135452270508, 0.023995616912841796, 0.023941919326782225, 0.023732032775878906, 0.023443647384643555, 0.023610559463500977, 0.02365318489074707, 0.02352284812927246, 0.02353955268859863, 0.02364236831665039, 0.02359334373474121, 0.023638015747070314, 0.023613439559936524, 0.02376848030090332, 0.023583328247070313, 0.023606784820556642, 0.023626239776611328, 0.023631872177124022, 0.023797760009765623, 0.02370150375366211, 0.02381955146789551, 0.023882463455200197, 0.02378704071044922, 0.02376697540283203, 0.023679519653320314, 0.02369935989379883, 0.023889312744140623, 0.024936864852905274, 0.024049951553344728, 0.023783424377441405, 0.023821920394897462, 0.023746463775634767, 0.023793216705322265, 0.02379667282104492, 0.024821247100830078, 0.027457984924316406, 0.023902271270751955, 0.023799808502197265, 0.023627456665039064, 0.023628032684326172, 0.023896127700805663, 0.025265504837036133, 0.02428585624694824, 0.02452889633178711, 0.02410700798034668, 0.023952831268310548, 0.023695615768432616, 0.024369472503662108, 0.023754751205444336, 0.023934335708618165, 0.023847551345825196, 0.02373347282409668, 0.023593759536743163, 0.02353152084350586, 0.023472127914428712, 0.02352262306213379, 0.02364076805114746, 0.023582176208496095, 0.023691808700561524, 0.02371788787841797, 0.023809823989868164, 0.02357414436340332, 0.023784032821655275, 0.023729951858520507, 0.023772512435913086, 0.02370796775817871, 0.023939039230346678, 0.024135488510131836, 0.023933792114257814, 0.023821855545043947, 0.023675296783447267, 0.023768672943115233, 0.02363974380493164, 0.02345238494873047, 0.02349260711669922, 0.023560192108154295, 0.02437068748474121, 0.023842399597167968, 0.024001440048217772, 0.02384614372253418, 0.023671552658081053, 0.023506752014160158, 0.023396543502807617, 0.02356831932067871, 0.02361897659301758, 0.02360736083984375, 0.023511648178100586, 0.023504896163940428, 0.023680063247680665, 0.023636608123779296, 0.02354412841796875, 0.023496511459350587, 0.023482559204101562, 0.02374774360656738, 0.023645023345947265, 0.02371174430847168, 0.023617536544799804, 0.0237061767578125, 0.02360495948791504, 0.023775583267211915, 0.023648319244384767, 0.023637056350708008, 0.023574943542480468, 0.02380451202392578, 0.02366873550415039, 0.023692480087280275, 0.02373468780517578, 0.02368320083618164, 0.0237673282623291, 0.02370355224609375, 0.02371379280090332, 0.02371174430847168, 0.023654399871826173, 0.02360550308227539, 0.02379305648803711, 0.02369772720336914, 0.023756832122802735, 0.02464143943786621, 0.02414601516723633, 0.023867391586303712, 0.023895584106445312, 0.023914976119995116, 0.023820192337036132, 0.023742464065551756, 0.024264799118041993, 0.023909759521484376, 0.02374457550048828, 0.023788095474243164, 0.02388991928100586, 0.023875583648681642, 0.02386124801635742, 0.02387059211730957, 0.023771520614624023, 0.02385980796813965, 0.024012704849243165, 0.024350143432617186, 0.023960128784179687, 0.023877599716186523, 0.023922367095947264, 0.023869792938232423, 0.023767040252685546, 0.023750656127929686, 0.023937152862548827, 0.023671968460083008, 0.02363670349121094, 0.023838623046875, 0.023631967544555665, 0.023621631622314454, 0.02363910484313965, 0.023679935455322265, 0.023852607727050782, 0.024056255340576174, 0.024063392639160155, 0.023734880447387696, 0.023826431274414063, 0.023832096099853515, 0.023744991302490234, 0.023688703536987304, 0.02382614326477051, 0.02414668846130371, 0.02394870376586914, 0.024070432662963867, 0.023959871292114257, 0.023733823776245118, 0.023985664367675782, 0.02358742332458496, 0.0235807991027832, 0.0236824951171875, 0.02368979263305664, 0.023694847106933595, 0.023575040817260744, 0.023563423156738282, 0.02355695915222168, 0.023541759490966797, 0.02347398376464844, 0.023549184799194336, 0.023629983901977538, 0.02360393524169922, 0.023912511825561523, 0.023789567947387694, 0.02371379280090332, 0.023635616302490236, 0.02360086441040039, 0.023497184753417968, 0.02365635108947754, 0.02356393623352051, 0.02386774444580078, 0.0235545597076416, 0.023666431427001953, 0.023689184188842773, 0.02368671989440918, 0.02364259147644043, 0.023723552703857422, 0.02362771224975586, 0.02363155174255371, 0.023594911575317384, 0.023625759124755858, 0.023550880432128905, 0.023608831405639647, 0.02367145538330078, 0.02371567916870117, 0.0241213436126709, 0.023889888763427736, 0.02385103988647461, 0.02372198486328125, 0.02385251235961914, 0.02412326431274414, 0.02436521530151367, 0.024005119323730468, 0.023801759719848634, 0.02370992088317871, 0.023705472946166994, 0.023834623336791993, 0.02374812889099121, 0.023908832550048827, 0.02371788787841797, 0.02388787269592285, 0.023580575942993166, 0.023652448654174804, 0.023783424377441405, 0.023862720489501953, 0.023730112075805665, 0.023768831253051757, 0.023799999237060547, 0.023577152252197267, 0.023646495819091798, 0.023496192932128908, 0.02372140884399414, 0.023509248733520508, 0.023558528900146484, 0.02362713623046875, 0.023570655822753906, 0.02371436882019043, 0.023601375579833984, 0.02362710380554199, 0.023748096466064454, 0.02382291221618652, 0.023521663665771485, 0.02358502388000488, 0.023594240188598632, 0.023708160400390626, 0.023752992630004882, 0.02421228790283203, 0.02373110389709473, 0.023743520736694335, 0.023644159317016602, 0.023690208435058594, 0.02423811149597168, 0.02377724838256836, 0.023805952072143553, 0.02372403144836426, 0.023592575073242188, 0.023682687759399416, 0.023862016677856444, 0.023669952392578124, 0.02352351951599121, 0.02359359931945801, 0.023828479766845705, 0.023670783996582033, 0.02375587272644043, 0.023669599533081054, 0.02371993637084961, 0.02578598403930664, 0.02471776008605957, 0.02407596778869629, 0.023828800201416016, 0.023654048919677734, 0.023551616668701172, 0.02357052803039551, 0.023716480255126952, 0.023762943267822266, 0.023769088745117187, 0.023618560791015625, 0.02356915283203125, 0.02364646339416504, 0.023846912384033202, 0.02370560073852539, 0.023495744705200196, 0.023661504745483397, 0.023586687088012696, 0.023665792465209962, 0.023950336456298828, 0.024102912902832032, 0.02387763214111328, 0.024143871307373048, 0.024216896057128907, 0.02416640090942383, 0.02406399917602539, 0.024680448532104493, 0.02612224006652832, 0.02432156753540039, 0.023899871826171874, 0.023996320724487305, 0.023740287780761718, 0.023683103561401367, 0.023958463668823243, 0.02479516792297363, 0.02391446495056152, 0.023867391586303712, 0.0238623046875, 0.023849567413330077, 0.02377356719970703, 0.023803808212280272, 0.023920095443725586, 0.023844575881958006, 0.02383692741394043, 0.023746944427490233, 0.023660192489624022, 0.02369945526123047, 0.024009344100952148, 0.023775232315063476, 0.0237076473236084, 0.023740127563476564, 0.023822303771972655, 0.02677382469177246, 0.02384214401245117, 0.023870111465454102, 0.02390550422668457, 0.02376313591003418, 0.0237544002532959, 0.02382716751098633, 0.023754175186157227, 0.023628576278686524, 0.02375094413757324, 0.02373347282409668, 0.02420172882080078, 0.023838144302368164, 0.023824544906616212, 0.023753120422363282, 0.023866912841796876, 0.02373263931274414, 0.023652416229248047, 0.023733503341674806, 0.023741119384765624, 0.023732288360595703, 0.02361280059814453, 0.023791711807250978, 0.023620128631591797, 0.023572479248046875, 0.023590911865234376, 0.02372403144836426, 0.023631872177124022, 0.023486080169677733, 0.02368115234375, 0.02366080093383789, 0.023734272003173826, 0.02365398406982422, 0.023633663177490234, 0.02373811149597168, 0.023608896255493166, 0.023683263778686525, 0.023658943176269532, 0.023667808532714843, 0.02371046447753906, 0.02355766487121582, 0.02360793685913086, 0.023488512039184572, 0.02342911911010742, 0.023513088226318358, 0.023513088226318358, 0.02366464042663574, 0.023607231140136718, 0.023674943923950195, 0.023823871612548828, 0.023865856170654298, 0.023870719909667968, 0.02393164825439453, 0.02387763214111328, 0.02370476722717285, 0.023581504821777344, 0.023746559143066406, 0.02399555206298828, 0.023735136032104493, 0.02363382339477539, 0.02373580741882324, 0.0237259521484375, 0.023775264739990233, 0.023720640182495117, 0.02408243179321289, 0.023812095642089845, 0.0237238712310791, 0.023650463104248048, 0.023770111083984375, 0.023768064498901367, 0.023586719512939454, 0.023607391357421875, 0.023551647186279296, 0.023621984481811523, 0.023586816787719726, 0.023609312057495117, 0.023635263442993163, 0.023550687789916994, 0.02386124801635742, 0.023635967254638672, 0.023842815399169923, 0.023810047149658203, 0.02366054344177246, 0.023713567733764648, 0.0236363525390625, 0.023584224700927733, 0.023682880401611327, 0.023648832321166994, 0.02364944076538086, 0.023679136276245117, 0.023865407943725585, 0.02371206474304199, 0.024023359298706054, 0.023961280822753905, 0.023895584106445312, 0.02495568084716797, 0.02387081527709961, 0.023888544082641603, 0.023862943649291993, 0.023724384307861328, 0.02370947265625, 0.02367740821838379, 0.023629024505615236, 0.026835487365722655, 0.02406921577453613, 0.023784608840942384, 0.023837823867797852, 0.023814783096313477, 0.023773183822631837, 0.023746559143066406, 0.023797216415405272, 0.023759391784667967, 0.02366873550415039, 0.023756799697875978, 0.023777055740356445, 0.023660768508911134, 0.023652032852172853, 0.02359878349304199, 0.023625728607177734, 0.023861888885498048, 0.02362883186340332, 0.023626720428466797, 0.02368921661376953, 0.023768831253051757, 0.024657279968261718, 0.023714399337768553, 0.023855039596557617, 0.023996768951416017, 0.023803903579711915, 0.02373436737060547, 0.02386115264892578, 0.023644159317016602, 0.024195072174072265, 0.027305376052856444, 0.024035520553588867, 0.023902624130249024, 0.024221696853637696, 0.023748607635498048, 0.023855104446411132, 0.02446710395812988, 0.02427939224243164, 0.024213504791259766, 0.02412473678588867, 0.02405036735534668, 0.02369945526123047, 0.02354582405090332, 0.023688352584838868, 0.02351568031311035, 0.02363376045227051, 0.023545696258544923, 0.023597312927246095, 0.0236200008392334, 0.023777280807495117, 0.0235762882232666, 0.023718175888061525, 0.02389788818359375, 0.023765216827392577, 0.023604736328125, 0.02372230339050293, 0.023608928680419923, 0.023822944641113283]",tokens/s,41.97220738498381,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1841.913856,2760.835072,0.0,2365.587456,2314.318336,s,1,8.89811328125,8.89811328125,0.0,8.89811328125,8.89811328125,8.89811328125,8.89811328125,[8.89811328125],,kWh,5.511781417915623e-05,6.072691714418805e-06,1.7201402650005027e-05,7.839190854358007e-05,,MB,1845.071872,3092.185088,0.0,2682.257408,2607.60832,s,10,0.5073520698547364,0.05073520698547364,0.0002747971853071152,0.05066675186157227,0.05080905418395996,0.05117609539031982,0.051469728355407716,"[0.05154313659667969, 0.05068672180175781, 0.05066864013671875, 0.05062307357788086, 0.05066486358642578, 0.050727489471435544, 0.050714176177978514, 0.05058483123779297, 0.050576416015625, 0.050562721252441406]",tokens/s,5045.805767054369,kWh,1.5419928673027206e-06,1.700195465713278e-07,1.0264642714631476e-06,2.738476685337196e-06,tokens/kWh,93482628.99980763,MB,1849.36448,3092.185088,0.0,2682.257408,2607.61088,s,10,15.497951171874998,1.5497951171875,0.005800013507732758,1.5492863159179686,1.5571719116210936,1.558186114501953,1.5589974768066406,"[1.5518857421875, 1.5592003173828124, 1.544307861328125, 1.547433837890625, 1.55082470703125, 1.556946533203125, 1.555377685546875, 1.5431873779296874, 1.5477479248046875, 1.5410391845703124]",tokens/s,40.6505345779703,kWh,4.5299420457698424e-05,4.995927556806055e-06,2.1374414760337175e-05,7.166976277484166e-05,tokens/kWh,879031.7919416218,,s,630,15.495820976257319,0.024596541232154483,0.0003772357676909649,0.024506224632263186,0.024912055206298828,0.025060841846466064,0.02604591617584229,"[0.0248623046875, 0.024609184265136717, 0.024407039642333983, 0.024689664840698244, 0.02434867286682129, 0.024375040054321288, 0.024914176940917968, 0.024778495788574217, 0.024428287506103517, 0.02441472053527832, 0.0245166072845459, 0.024444927215576173, 0.02442848014831543, 0.02484230422973633, 0.024663488388061525, 0.024622751235961915, 0.02451968002319336, 0.024503807067871093, 0.02456822395324707, 0.02443878364562988, 0.024337696075439452, 0.02458844757080078, 0.024701503753662108, 0.02496512031555176, 0.02471731185913086, 0.02478643226623535, 0.024644031524658203, 0.02481977653503418, 0.02462905693054199, 0.024594623565673827, 0.024576095581054686, 0.02448784065246582, 0.024380512237548828, 0.02450115203857422, 0.024452512741088867, 0.024406335830688478, 0.024409664154052733, 0.02437398338317871, 0.024434688568115235, 0.028899328231811523, 0.024751903533935547, 0.024668384552001953, 0.02451251220703125, 0.02448579216003418, 0.024395488739013673, 0.02459257507324219, 0.024477888107299804, 0.024669824600219728, 0.024559999465942384, 0.024571903228759767, 0.02443059158325195, 0.024436735153198243, 0.024231935501098634, 0.024323551177978516, 0.02438748741149902, 0.024414783477783204, 0.02443187141418457, 0.02470361518859863, 0.02480496025085449, 0.024885215759277345, 0.024476287841796875, 0.024577312469482422, 0.02479587173461914, 0.02486502456665039, 0.024622848510742187, 0.024599903106689452, 0.024760288238525392, 0.024523456573486327, 0.024485727310180665, 0.02444211196899414, 0.024507295608520507, 0.024726943969726564, 0.024621664047241212, 0.024714496612548827, 0.024437503814697267, 0.024371200561523438, 0.024731552124023438, 0.02471513557434082, 0.025053632736206054, 0.024944000244140625, 0.024697248458862304, 0.024559616088867187, 0.025007360458374022, 0.02490025520324707, 0.024709215164184572, 0.02504409599304199, 0.024980352401733397, 0.024867967605590822, 0.024814464569091796, 0.02476851272583008, 0.02458847999572754, 0.024297279357910158, 0.024542335510253907, 0.024488607406616212, 0.024428768157958983, 0.024365055084228517, 0.024430400848388673, 0.024643775939941406, 0.02446067237854004, 0.024492639541625977, 0.024342559814453126, 0.024559616088867187, 0.024738943099975586, 0.024449920654296874, 0.024852479934692383, 0.02474332809448242, 0.024518335342407226, 0.02460256004333496, 0.02459132766723633, 0.024758399963378905, 0.025031904220581054, 0.02447427177429199, 0.025964351654052736, 0.02489356803894043, 0.024557632446289064, 0.024811199188232422, 0.024678720474243163, 0.024759647369384765, 0.02476304054260254, 0.025686016082763673, 0.026223936080932618, 0.02506208038330078, 0.02485856056213379, 0.024579456329345703, 0.024752960205078126, 0.02548294448852539, 0.02539091110229492, 0.0249051513671875, 0.024630016326904296, 0.02451046371459961, 0.024610271453857423, 0.02437174415588379, 0.02460032081604004, 0.024336639404296874, 0.02441935920715332, 0.024615392684936524, 0.024670719146728515, 0.025216224670410157, 0.024416608810424803, 0.024514368057250976, 0.024584096908569338, 0.02456972885131836, 0.024421215057373047, 0.02434867286682129, 0.024487871170043946, 0.02453715133666992, 0.024756128311157227, 0.024465120315551758, 0.02439583969116211, 0.02436265563964844, 0.02441632080078125, 0.024662624359130858, 0.02457804870605469, 0.02467020797729492, 0.02438956832885742, 0.02441788864135742, 0.024449440002441408, 0.02442451286315918, 0.024383487701416014, 0.0243056640625, 0.024311807632446288, 0.0244135684967041, 0.02434726333618164, 0.02444041633605957, 0.0256944637298584, 0.024508544921875, 0.02446339225769043, 0.024384895324707032, 0.024528928756713867, 0.02459913635253906, 0.02449203109741211, 0.024600576400756836, 0.02433126449584961, 0.024240383148193358, 0.024400543212890625, 0.024379487991333007, 0.024255680084228515, 0.024367168426513673, 0.024302335739135743, 0.024309984207153322, 0.024352607727050782, 0.024479135513305664, 0.024209056854248047, 0.02431056022644043, 0.02427503967285156, 0.02490163230895996, 0.024624927520751953, 0.02436841583251953, 0.024374048233032228, 0.024444704055786134, 0.024377567291259766, 0.024309120178222655, 0.024496768951416014, 0.024758272171020508, 0.024284351348876954, 0.02440457534790039, 0.02429110336303711, 0.024285184860229493, 0.02429587173461914, 0.02434662437438965, 0.02454732894897461, 0.02422547149658203, 0.024396095275878906, 0.024968896865844727, 0.024508575439453124, 0.0244553279876709, 0.024449024200439453, 0.02494054412841797, 0.024606719970703125, 0.024541183471679686, 0.024698879241943358, 0.02434048080444336, 0.02433024024963379, 0.02426041603088379, 0.024264896392822265, 0.02429257583618164, 0.02448633575439453, 0.02448419189453125, 0.024544479370117188, 0.02453993606567383, 0.024754240036010743, 0.024485824584960937, 0.0245166072845459, 0.02443878364562988, 0.024398847579956053, 0.02438041687011719, 0.02450227165222168, 0.02446272087097168, 0.02493283271789551, 0.024481952667236326, 0.024391679763793944, 0.02451046371459961, 0.024715263366699217, 0.025038400650024415, 0.0249136962890625, 0.02470159912109375, 0.024594432830810548, 0.024765472412109375, 0.024685535430908203, 0.02471049690246582, 0.02480793571472168, 0.024559776306152345, 0.024457216262817383, 0.02481132888793945, 0.02443283271789551, 0.02476032066345215, 0.024661056518554686, 0.024521152496337892, 0.024537599563598633, 0.025026559829711914, 0.024939872741699218, 0.025141664505004883, 0.024842432022094726, 0.024715744018554687, 0.02475993537902832, 0.024768960952758788, 0.02448793601989746, 0.02448591995239258, 0.024602592468261717, 0.024406015396118166, 0.024368671417236327, 0.02485500717163086, 0.024365055084228517, 0.024606719970703125, 0.024486080169677734, 0.024803136825561522, 0.024762367248535155, 0.024790752410888673, 0.024596063613891602, 0.024519359588623047, 0.024625152587890626, 0.024763904571533202, 0.02451315116882324, 0.024635263442993164, 0.024800832748413087, 0.02470751953125, 0.02442425537109375, 0.024422111511230468, 0.02440387153625488, 0.02439583969116211, 0.024243743896484374, 0.024509408950805663, 0.02733807945251465, 0.02493712043762207, 0.02458624076843262, 0.024606719970703125, 0.02461631965637207, 0.024609407424926757, 0.02441366386413574, 0.024523199081420897, 0.02451241683959961, 0.024466880798339842, 0.02443283271789551, 0.024398399353027345, 0.024432544708251954, 0.025038944244384766, 0.02448188781738281, 0.024572959899902345, 0.02450521659851074, 0.024426496505737305, 0.024453119277954103, 0.024411552429199217, 0.024512351989746092, 0.024466304779052733, 0.024670112609863282, 0.024544607162475585, 0.024484479904174804, 0.024678272247314455, 0.024569952011108398, 0.024485599517822264, 0.024457536697387695, 0.024385536193847656, 0.024600576400756836, 0.024567808151245117, 0.024754175186157225, 0.025006080627441408, 0.02467020797729492, 0.02446950340270996, 0.02477987289428711, 0.024415136337280274, 0.024633056640625, 0.024873247146606447, 0.024979455947875977, 0.024723295211791993, 0.024649663925170897, 0.024544767379760742, 0.024767200469970704, 0.024393375396728516, 0.024680767059326172, 0.024445087432861327, 0.024434560775756835, 0.0243240966796875, 0.02429132843017578, 0.024344287872314452, 0.02421379280090332, 0.027205631256103514, 0.024791040420532227, 0.025149440765380858, 0.02465996742248535, 0.024506080627441407, 0.024430303573608397, 0.02443280029296875, 0.02440233612060547, 0.02433827209472656, 0.02458025550842285, 0.02454528045654297, 0.024666112899780275, 0.024786943435668944, 0.02470854377746582, 0.0245948486328125, 0.024354368209838866, 0.024438495635986327, 0.024498111724853517, 0.024394527435302734, 0.02573958396911621, 0.02469856071472168, 0.02442367935180664, 0.024363935470581053, 0.024383487701416014, 0.024444063186645507, 0.02453590393066406, 0.025157632827758788, 0.02607923126220703, 0.02541923141479492, 0.02497529602050781, 0.02501420783996582, 0.024840608596801757, 0.02474825668334961, 0.025013792037963868, 0.024745664596557616, 0.02462179183959961, 0.02492326354980469, 0.024863712310791014, 0.024647680282592774, 0.02444697570800781, 0.024401023864746095, 0.024658592224121093, 0.02447808074951172, 0.025485567092895508, 0.02634419250488281, 0.024798208236694336, 0.024801279067993166, 0.024967168807983397, 0.024687616348266602, 0.02451968002319336, 0.024261695861816406, 0.024372159957885744, 0.024515743255615233, 0.024521568298339843, 0.024924415588378906, 0.024599903106689452, 0.02458255958557129, 0.02447769546508789, 0.02491187286376953, 0.0248371524810791, 0.024840991973876955, 0.024832191467285155, 0.02458995246887207, 0.02445350456237793, 0.025453632354736327, 0.02501487922668457, 0.024606271743774413, 0.024453920364379884, 0.02430156707763672, 0.02441212844848633, 0.024268831253051758, 0.024645631790161132, 0.025068927764892578, 0.02460531234741211, 0.02456559944152832, 0.024729440689086914, 0.024662336349487304, 0.02450432014465332, 0.024702560424804686, 0.02437318420410156, 0.02441788864135742, 0.024626272201538086, 0.024438560485839842, 0.024385536193847656, 0.024385599136352538, 0.025059328079223633, 0.02451375961303711, 0.024322240829467774, 0.024334880828857423, 0.024328191757202147, 0.024276992797851563, 0.024360960006713867, 0.02439900779724121, 0.02494761657714844, 0.02440934371948242, 0.025362432479858397, 0.024392383575439453, 0.024395776748657227, 0.024755840301513673, 0.02723580741882324, 0.024611743927001953, 0.024549375534057616, 0.024452991485595703, 0.024477184295654295, 0.02446940803527832, 0.024565759658813476, 0.024991743087768553, 0.02473369598388672, 0.024498176574707032, 0.0243503360748291, 0.024268800735473633, 0.02457360076904297, 0.024492992401123046, 0.024231712341308595, 0.024258560180664062, 0.02429747200012207, 0.024414239883422853, 0.024358879089355467, 0.024333856582641603, 0.024291872024536133, 0.024387327194213868, 0.024274751663208007, 0.024260480880737303, 0.02444339179992676, 0.024459264755249024, 0.024381439208984376, 0.02449571228027344, 0.024328607559204102, 0.024420352935791017, 0.0243787841796875, 0.02436128044128418, 0.024396064758300782, 0.02451433563232422, 0.02450044822692871, 0.024391551971435547, 0.02441347122192383, 0.024474464416503906, 0.02459382438659668, 0.02448240089416504, 0.024629247665405272, 0.024848255157470703, 0.02489356803894043, 0.024592384338378907, 0.025460351943969728, 0.02457638359069824, 0.024759391784667968, 0.024941471099853514, 0.024469120025634766, 0.024506752014160155, 0.02434867286682129, 0.024518655776977538, 0.024772640228271484, 0.024583488464355468, 0.024332000732421876, 0.024445600509643554, 0.024405344009399414, 0.024647743225097656, 0.02434547233581543, 0.02431737518310547, 0.024272607803344726, 0.024560735702514647, 0.024280479431152344, 0.02431420707702637, 0.024438207626342773, 0.024343103408813477, 0.02454252815246582, 0.024891551971435548, 0.024621248245239258, 0.02428163146972656, 0.02530240058898926, 0.024339008331298827, 0.024559135437011718, 0.02461516761779785, 0.024332000732421876, 0.02425449562072754, 0.02426019287109375, 0.025659360885620118, 0.025920480728149415, 0.024506368637084962, 0.024421823501586913, 0.024555391311645507, 0.024789695739746095, 0.025417728424072264, 0.02508121681213379, 0.0249800968170166, 0.02485798454284668, 0.024807552337646484, 0.024634111404418946, 0.024524543762207033, 0.024805376052856445, 0.02481100845336914, 0.02442470359802246, 0.02420966339111328, 0.024854528427124024, 0.02456985664367676, 0.024485824584960937, 0.024328256607055666, 0.02447974395751953, 0.024337663650512695, 0.024381696701049806, 0.024271520614624023, 0.024461151123046875, 0.024449024200439453, 0.024399967193603517, 0.024219263076782228, 0.024315488815307616, 0.02439593505859375, 0.024285728454589844, 0.024180543899536132, 0.024229888916015626, 0.02440563201904297, 0.024394304275512695, 0.02430499267578125, 0.02435545539855957, 0.02440950393676758, 0.02440012741088867, 0.024314239501953126, 0.024391616821289062, 0.024418367385864257, 0.024385536193847656, 0.024219648361206055, 0.024333696365356445, 0.024636032104492188, 0.025140575408935547, 0.024619935989379883, 0.024289024353027343, 0.02484422492980957, 0.024647743225097656, 0.02445654487609863, 0.02432579231262207, 0.024638111114501954, 0.024891807556152345, 0.024613088607788085, 0.024320159912109375, 0.02431590461730957, 0.024488096237182618, 0.024440671920776365, 0.024332351684570312, 0.02438956832885742, 0.024253759384155273, 0.024382144927978515, 0.024399871826171874, 0.02441324806213379, 0.02441049575805664, 0.024294111251831056, 0.024530048370361327, 0.024386272430419922, 0.024352767944335937, 0.02430156707763672, 0.02470025634765625, 0.024470176696777344, 0.024412160873413087, 0.024491071701049805, 0.024593343734741212, 0.024592384338378907, 0.024364639282226562, 0.02433244705200195, 0.024406272888183592, 0.025145376205444336, 0.02435868835449219, 0.024332063674926758, 0.02443894386291504, 0.025069215774536132, 0.024689247131347656, 0.02447932815551758, 0.024877056121826172, 0.02452521514892578, 0.02433433532714844, 0.024395519256591797, 0.02445747184753418, 0.024809503555297853, 0.024511520385742187, 0.024476768493652344, 0.024480672836303712, 0.024449119567871092, 0.02434079933166504, 0.024408607482910155, 0.024425792694091796, 0.024373952865600585, 0.024252416610717774, 0.024285280227661132, 0.024333248138427733, 0.024412384033203127, 0.02423788833618164, 0.024262912750244142, 0.02432614326477051, 0.024582080841064453, 0.024725696563720704, 0.02456585693359375, 0.024387456893920897, 0.02453590393066406, 0.024274656295776367, 0.024423776626586916, 0.02440675163269043, 0.02445254325866699]",tokens/s,40.6561227678924,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,811.737088,554.631168,0.0,159.383552,143.673856,s,1,7.2645595703125,7.2645595703125,0.0,7.2645595703125,7.2645595703125,7.2645595703125,7.2645595703125,[7.2645595703125],,kWh,1.059855921249664e-05,1.1614199558078427e-06,3.5919473179990558e-06,1.535192648630354e-05,,MB,1332.49024,609.15712,0.0,199.22944,186.684928,s,26,0.1986882557868958,0.007641855991803682,0.00010765275712770247,0.007627088069915772,0.007689568042755127,0.007762360215187072,0.008034847855567932,"[0.007783520221710205, 0.007632544040679931, 0.007615615844726562, 0.007631135940551758, 0.007592576026916504, 0.007626304149627686, 0.007538559913635254, 0.008118623733520508, 0.007698880195617676, 0.007650271892547607, 0.007627871990203858, 0.007642591953277588, 0.007630976200103759, 0.007672863960266113, 0.007640255928039551, 0.00756166410446167, 0.0076802558898925784, 0.007628064155578613, 0.007584799766540527, 0.0075781760215759275, 0.007624320030212402, 0.0075511040687561036, 0.0075608639717102055, 0.0076089282035827635, 0.0076212158203125, 0.007586271762847901]",tokens/s,33499.71528835067,kWh,2.2710448227302365e-07,2.5039945156926532e-08,9.852234427392751e-08,3.506667717038777e-07,tokens/kWh,730037804.1412503,MB,1371.762688,611.254272,0.0,201.326592,186.687488,s,26,10.00890411376953,0.38495785052959736,0.0016175626169597206,0.3846318359375,0.38742640686035157,0.38829644012451175,0.3885595932006836,"[0.3870013427734375, 0.38459976196289064, 0.3859169616699219, 0.384282470703125, 0.3852095642089844, 0.3845240783691406, 0.38484475708007815, 0.3884447631835938, 0.3885978698730469, 0.38477554321289065, 0.3828612976074219, 0.3863381042480469, 0.38619061279296873, 0.38514959716796876, 0.38294338989257815, 0.38269284057617187, 0.38385476684570313, 0.3836585388183594, 0.38410577392578127, 0.38320263671875, 0.38466390991210936, 0.38455780029296877, 0.38785147094726563, 0.3855151672363281, 0.38340081787109376, 0.3837202758789062]",tokens/s,163.65428036687425,kWh,1.1244997103624623e-05,1.240140582539681e-06,3.929803527956919e-06,1.641494121412122e-05,tokens/kWh,3837966.8363236794,,s,1638,9.997071547031386,0.006103218282680955,0.00012193710248929396,0.0060904319286346435,0.006164614391326905,0.006209982371330262,0.006590835213661193,"[0.006099040031433105, 0.0060965118408203125, 0.006154816150665283, 0.00604095983505249, 0.00616425609588623, 0.006230591773986816, 0.006178815841674804, 0.006094624042510986, 0.00619865608215332, 0.006209407806396484, 0.006185696125030517, 0.006164735794067383, 0.006086656093597412, 0.006123519897460937, 0.006047743797302246, 0.006168896198272705, 0.006061759948730469, 0.006155551910400391, 0.0061036162376403805, 0.006166368007659912, 0.006070591926574707, 0.006203392028808594, 0.006240255832672119, 0.0061658558845520016, 0.006145855903625488, 0.006069087982177734, 0.0061931519508361815, 0.006078559875488282, 0.006131616115570069, 0.006065248012542724, 0.006124512195587158, 0.006045536041259765, 0.006127711772918701, 0.006042719841003418, 0.006149312019348144, 0.006063839912414551, 0.006166143894195557, 0.0060538239479064945, 0.006422463893890381, 0.00618336009979248, 0.006080031871795654, 0.006107711791992187, 0.0060553278923034665, 0.006209919929504395, 0.006084799766540527, 0.00611737585067749, 0.006307648181915283, 0.006162079811096191, 0.006007328033447266, 0.006127232074737548, 0.00604963207244873, 0.0062349758148193355, 0.0060310401916503905, 0.006153952121734619, 0.006572319984436035, 0.006065855979919434, 0.006162176132202149, 0.0060544638633728024, 0.006111008167266846, 0.006068448066711426, 0.006127295970916748, 0.00605625581741333, 0.006141952037811279, 0.005943903923034668, 0.006113120079040527, 0.006031551837921143, 0.006188704013824463, 0.006022848129272461, 0.006118048191070557, 0.006088031768798828, 0.0060850238800048825, 0.006090976238250732, 0.006091872215270996, 0.006142367839813232, 0.0061420159339904785, 0.006123104095458984, 0.006067039966583252, 0.006170623779296875, 0.006061215877532959, 0.006161248207092285, 0.006045567989349365, 0.006142079830169678, 0.006053088188171386, 0.0061519680023193355, 0.006061024188995361, 0.00616483211517334, 0.006089759826660156, 0.006165311813354492, 0.006079999923706054, 0.00609657621383667, 0.006113952159881592, 0.006086175918579102, 0.006113759994506836, 0.006055647850036621, 0.006100543975830078, 0.006048799991607666, 0.006123295783996582, 0.0060529599189758304, 0.006128448009490967, 0.006039008140563965, 0.006160927772521973, 0.006036928176879883, 0.006169151782989502, 0.006067776203155518, 0.006164927959442138, 0.006039391994476318, 0.006125152111053467, 0.006033792018890381, 0.006117824077606201, 0.006059199810028076, 0.00607916784286499, 0.006053440093994141, 0.006058303833007812, 0.006107135772705078, 0.006074368000030517, 0.006103040218353272, 0.006059072017669678, 0.0061092801094055176, 0.006125664234161377, 0.0061543679237365725, 0.006027904033660889, 0.006265183925628662, 0.006013855934143066, 0.006243072032928467, 0.006037504196166992, 0.006123519897460937, 0.005929120063781738, 0.006117216110229492, 0.00605625581741333, 0.006286079883575439, 0.006222752094268799, 0.006074399948120117, 0.006158239841461181, 0.006090911865234375, 0.006168511867523194, 0.006053887844085694, 0.006148128032684326, 0.006051328182220459, 0.0061279358863830565, 0.006058303833007812, 0.006149983882904052, 0.006044672012329101, 0.006156544208526612, 0.006052608013153076, 0.006150144100189209, 0.006032512187957763, 0.006142848014831543, 0.006072480201721192, 0.0061008319854736325, 0.006104671955108642, 0.006070240020751953, 0.006175168037414551, 0.006105440139770508, 0.0061660480499267575, 0.00606774377822876, 0.006145887851715088, 0.006070720195770264, 0.006177087783813476, 0.00611084794998169, 0.006447904109954834, 0.006059616088867187, 0.006187007904052734, 0.00612556791305542, 0.0060661759376525876, 0.006135807991027832, 0.006135807991027832, 0.006184447765350342, 0.006101503849029541, 0.006133471965789795, 0.006080800056457519, 0.006170559883117676, 0.0060739521980285646, 0.006160192012786865, 0.006085279941558838, 0.006155456066131592, 0.006041408061981201, 0.006130559921264649, 0.006051360130310058, 0.006144832134246827, 0.0061001920700073245, 0.0060657281875610355, 0.006113791942596435, 0.006106624126434326, 0.006095871925354004, 0.006055935859680176, 0.006144000053405762, 0.006049536228179932, 0.006369311809539795, 0.006070655822753906, 0.00601087999343872, 0.0060065598487854, 0.006118720054626465, 0.006013855934143066, 0.006125408172607422, 0.006049952030181885, 0.006119423866271972, 0.00602342414855957, 0.006133600234985352, 0.006073760032653809, 0.006137792110443115, 0.006163008213043213, 0.006064256191253662, 0.006081535816192627, 0.006060256004333496, 0.006124351978302002, 0.006045631885528565, 0.006096864223480224, 0.006012864112854004, 0.00609881591796875, 0.006031455993652343, 0.006112480163574218, 0.00601580810546875, 0.006131360054016113, 0.006009535789489746, 0.006142720222473145, 0.0059909758567810055, 0.006125887870788574, 0.006096928119659424, 0.006260960102081299, 0.0061212477684020996, 0.006158336162567139, 0.006149983882904052, 0.006059296131134033, 0.006097856044769287, 0.006253600120544434, 0.006115935802459717, 0.006032927989959717, 0.00612175989151001, 0.006031968116760254, 0.006104000091552734, 0.006431712150573731, 0.006182911872863769, 0.006037504196166992, 0.006121568202972412, 0.0060128321647644044, 0.006158048152923584, 0.006088128089904785, 0.006089439868927002, 0.006155935764312745, 0.006089119911193847, 0.006090816020965576, 0.006033152103424072, 0.006095104217529297, 0.00602726411819458, 0.006095967769622803, 0.006015103816986084, 0.006091839790344238, 0.006029024124145508, 0.006118879795074463, 0.0060126399993896485, 0.006124351978302002, 0.00601907205581665, 0.006031424045562744, 0.006003392219543457, 0.0061171197891235355, 0.00611846399307251, 0.006131872177124024, 0.0060178241729736326, 0.006145120143890381, 0.006039552211761475, 0.006124512195587158, 0.006114463806152343, 0.006365983963012695, 0.006104479789733887, 0.006070655822753906, 0.006082272052764893, 0.0060033597946166995, 0.006098015785217285, 0.006040319919586181, 0.006091104030609131, 0.0060804481506347655, 0.006098368167877197, 0.006043935775756836, 0.006131519794464111, 0.006074368000030517, 0.006135456085205078, 0.006025760173797607, 0.0061337599754333495, 0.006004479885101319, 0.006139232158660889, 0.006056128025054931, 0.006219871997833252, 0.00607913589477539, 0.0060704960823059085, 0.006123263835906983, 0.006074399948120117, 0.006135359764099121, 0.006064511775970459, 0.0061047358512878415, 0.00605017614364624, 0.00611952018737793, 0.006207071781158448, 0.006131328105926513, 0.006058688163757324, 0.006111519813537598, 0.006270112037658692, 0.006169407844543457, 0.006038559913635254, 0.006224959850311279, 0.006083712100982666, 0.006101312160491944, 0.00614131212234497, 0.006118239879608155, 0.006144000053405762, 0.006082560062408447, 0.00612550401687622, 0.006076191902160645, 0.0061413440704345705, 0.006163328170776367, 0.006145567893981934, 0.006041567802429199, 0.006121119976043701, 0.006046559810638428, 0.006189280033111573, 0.006110527992248535, 0.006060031890869141, 0.006350560188293457, 0.0061197118759155275, 0.006146207809448242, 0.006072127819061279, 0.006120512008666992, 0.00603542423248291, 0.006112480163574218, 0.0060516161918640135, 0.0061188478469848635, 0.006012991905212402, 0.006099455833435059, 0.006027103900909424, 0.006144159793853759, 0.006051839828491211, 0.006131519794464111, 0.006027455806732178, 0.006151423931121826, 0.006060768127441407, 0.006102784156799317, 0.006082848072052002, 0.006067615985870361, 0.006074143886566162, 0.006036416053771972, 0.006087711811065674, 0.0060342721939086914, 0.006113279819488526, 0.006131711959838867, 0.006146143913269043, 0.006092864036560059, 0.006137824058532715, 0.00604966402053833, 0.006147903919219971, 0.006020639896392822, 0.006158783912658692, 0.006107232093811035, 0.006141503810882568, 0.006013279914855957, 0.006125792026519776, 0.00601907205581665, 0.006115327835083008, 0.0060663681030273435, 0.006076223850250244, 0.00606822395324707, 0.006178944110870361, 0.0062128958702087405, 0.006089503765106201, 0.006111040115356446, 0.006057695865631103, 0.006091360092163086, 0.006149087905883789, 0.006152927875518799, 0.006070367813110352, 0.006147583961486816, 0.0060215358734130855, 0.006089727878570556, 0.006012959957122803, 0.006162975788116455, 0.005999008178710938, 0.006176799774169922, 0.006060031890869141, 0.006074431896209717, 0.006100543975830078, 0.006034048080444336, 0.006078752040863037, 0.006112927913665771, 0.006105408191680908, 0.006057983875274659, 0.006082560062408447, 0.006042975902557373, 0.006084928035736084, 0.006023519992828369, 0.006082880020141602, 0.006014656066894531, 0.006078464031219482, 0.0061211838722229, 0.0064067840576171875, 0.006059679985046387, 0.006111231803894043, 0.0060234880447387696, 0.006188735961914062, 0.006094783782958984, 0.006196959972381592, 0.006188640117645264, 0.006115871906280518, 0.006154463768005371, 0.006104351997375489, 0.006118112087249756, 0.006060031890869141, 0.006123295783996582, 0.006041791915893555, 0.006108672142028809, 0.006025760173797607, 0.006176191806793213, 0.006078239917755127, 0.006210336208343506, 0.006049791812896729, 0.006129759788513184, 0.006180672168731689, 0.006107583999633789, 0.006137504100799561, 0.006121664047241211, 0.006114463806152343, 0.006049536228179932, 0.0061019201278686526, 0.006048799991607666, 0.006108479976654053, 0.006084256172180176, 0.006109151840209961, 0.006038847923278808, 0.0062039680480957034, 0.006009247779846192, 0.006120927810668945, 0.006030784130096435, 0.0061242241859436036, 0.006057983875274659, 0.006152224063873291, 0.006097023963928223, 0.006111231803894043, 0.006051104068756103, 0.0061058239936828615, 0.006099232196807861, 0.006090047836303711, 0.00609116792678833, 0.00605731201171875, 0.00610371208190918, 0.0060152320861816405, 0.006131487846374512, 0.006014944076538086, 0.0061562881469726565, 0.006107135772705078, 0.006137728214263916, 0.0060555520057678225, 0.0061055998802185055, 0.006088031768798828, 0.0060991358757019045, 0.006261216163635254, 0.006074368000030517, 0.006199295997619629, 0.0060412797927856445, 0.006152512073516846, 0.006027520179748535, 0.006198048114776611, 0.006112224102020264, 0.006113471984863281, 0.006002528190612793, 0.006160064220428467, 0.006080800056457519, 0.006161695957183838, 0.0060193600654602055, 0.006138495922088623, 0.005998303890228271, 0.006107232093811035, 0.006081535816192627, 0.006091392040252686, 0.006060416221618652, 0.006033408164978027, 0.006084832191467285, 0.006160416126251221, 0.006088448047637939, 0.00607587194442749, 0.006093344211578369, 0.006080512046813965, 0.006203487873077392, 0.006123136043548584, 0.0065550398826599125, 0.007563968181610107, 0.00660643196105957, 0.00670684814453125, 0.006532192230224609, 0.006109024047851562, 0.006158080101013184, 0.006080704212188721, 0.006160607814788818, 0.006051680088043213, 0.00615228796005249, 0.0060496959686279295, 0.006141952037811279, 0.00606822395324707, 0.006123519897460937, 0.006139743804931641, 0.006079872131347656, 0.006118303775787354, 0.006067264080047608, 0.006134592056274414, 0.0061151041984558105, 0.006164703845977783, 0.006030879974365235, 0.006162752151489257, 0.00599894380569458, 0.006110015869140625, 0.006032224178314209, 0.00611084794998169, 0.005988736152648926, 0.006122848033905029, 0.00601145601272583, 0.0061175041198730466, 0.006004703998565674, 0.006093952178955078, 0.0060488319396972655, 0.0060700798034667965, 0.006082528114318848, 0.006061312198638916, 0.006098944187164307, 0.006272928237915039, 0.0061121277809143065, 0.006096255779266357, 0.006127679824829101, 0.006027135848999023, 0.00610745620727539, 0.006196703910827637, 0.007695168018341064, 0.007882944107055664, 0.006240447998046875, 0.0060495038032531735, 0.0061718721389770505, 0.006066048145294189, 0.006126495838165283, 0.006031360149383545, 0.006109183788299561, 0.006028351783752442, 0.006114431858062744, 0.006059743881225586, 0.0061133761405944825, 0.006065343856811524, 0.006230432033538819, 0.0062137279510498045, 0.006058527946472168, 0.006128767967224121, 0.006041759967803955, 0.00610537576675415, 0.006039775848388672, 0.00611081600189209, 0.0060832958221435545, 0.006143680095672607, 0.00603545618057251, 0.00658022403717041, 0.006032671928405762, 0.006138591766357422, 0.0060347838401794435, 0.006092639923095703, 0.00608355188369751, 0.006070112228393555, 0.0062911038398742675, 0.006109151840209961, 0.006142591953277588, 0.0060919361114501955, 0.006184544086456299, 0.006085792064666748, 0.006186272144317627, 0.006040256023406982, 0.0061561598777771, 0.0059155840873718266, 0.006141952037811279, 0.0060878081321716305, 0.006160639762878418, 0.006061791896820068, 0.006073247909545898, 0.006196735858917236, 0.006037600040435791, 0.006093215942382812, 0.0060351681709289555, 0.006103328227996826, 0.006064127922058105, 0.006172416210174561, 0.006048319816589355, 0.006147776126861572, 0.006097184181213379, 0.0061140480041503905, 0.00604259204864502, 0.006126688003540039, 0.006011807918548584, 0.006120512008666992, 0.0060503678321838375, 0.006291200160980224, 0.006578815937042236, 0.006028575897216797, 0.006071008205413818, 0.006045695781707764, 0.006140223979949951, 0.006049215793609619, 0.006115647792816162, 0.006043583869934082, 0.006118688106536865, 0.00605456018447876, 0.006116799831390381, 0.006067071914672852, 0.006159264087677002, 0.006011648178100586, 0.006129759788513184, 0.006016160011291504, 0.006139967918395996, 0.006031455993652343, 0.006128575801849365, 0.006036448001861572, 0.006107711791992187, 0.006310527801513672, 0.006153632164001465, 0.0061298561096191405, 0.006074272155761719, 0.006183008193969727, 0.006049791812896729, 0.00612556791305542, 0.006092735767364502, 0.006110879898071289, 0.0060665922164916996, 0.006106719970703125, 0.005994336128234863, 0.006091328144073487, 0.006004608154296875, 0.006113408088684082, 0.0060119681358337404, 0.006121535778045654, 0.006095808029174804, 0.006119359970092773, 0.00595689582824707, 0.0060953922271728515, 0.006076608180999756, 0.006051839828491211, 0.006066431999206543, 0.006032800197601319, 0.006066656112670899, 0.006033184051513672, 0.006078271865844726, 0.006048064231872558, 0.0060845761299133305, 0.006029407978057861, 0.0061049599647521975, 0.005994751930236817, 0.0060713281631469725, 0.006021696090698242, 0.00607366418838501, 0.005995391845703125, 0.006078688144683838, 0.006022336006164551, 0.006097055912017822, 0.00601251220703125, 0.006080800056457519, 0.0060424962043762204, 0.006129312038421631, 0.0060414400100708004, 0.006107327938079834, 0.006012608051300048, 0.006118783950805664, 0.006009791851043701, 0.006129216194152832, 0.006037792205810547, 0.006121984004974365, 0.006202400207519531, 0.006088352203369141, 0.006161375999450684, 0.006127615928649902, 0.006119616031646728, 0.006061888217926025, 0.0060943360328674315, 0.0060349760055541995, 0.0060917439460754395, 0.006025216102600098, 0.006094848155975342, 0.006010240077972412, 0.006097631931304932, 0.005996448040008545, 0.006099199771881104, 0.006008575916290284, 0.006114975929260254, 0.006133408069610596, 0.0061242241859436036, 0.006075488090515137, 0.006119743824005127, 0.006034239768981934, 0.006089759826660156, 0.006053664207458496, 0.006067168235778808, 0.006039103984832764, 0.006034048080444336, 0.006091616153717041, 0.006040544033050537, 0.006154240131378174, 0.006378304004669189, 0.006119200229644775, 0.006504127979278565, 0.006744607925415039, 0.006352960109710693, 0.0060538239479064945, 0.006125823974609375, 0.006051199913024902, 0.006189663887023925, 0.00611516809463501, 0.006154047966003418, 0.006042975902557373, 0.006128352165222168, 0.006035679817199707, 0.006122623920440674, 0.006044159889221192, 0.006129983901977539, 0.006073984146118164, 0.00612995195388794, 0.006062399864196777, 0.006057663917541504, 0.006109344005584717, 0.006045536041259765, 0.0060661759376525876, 0.006044960021972657, 0.006165215969085694, 0.006052095890045166, 0.006094624042510986, 0.006016992092132569, 0.006206495761871338, 0.006026271820068359, 0.006102975845336914, 0.005992447853088379, 0.006106847763061523, 0.006617472171783447, 0.006460832118988037, 0.006097631931304932, 0.00603113603591919, 0.006086656093597412, 0.006016895771026611, 0.00612550401687622, 0.0061296639442443845, 0.006123104095458984, 0.006029920101165771, 0.006141952037811279, 0.006012928009033203, 0.006145823955535889, 0.006024576187133789, 0.006136672019958496, 0.006120448112487793, 0.006112063884735107, 0.006057695865631103, 0.006124000072479248, 0.00601308822631836, 0.006122432231903076, 0.006044479846954345, 0.006080863952636718, 0.006085696220397949, 0.006058688163757324, 0.006076416015625, 0.006051839828491211, 0.006100543975830078, 0.006027616024017334, 0.005961760044097901, 0.0060299839973449705, 0.006102911949157715, 0.006037407875061035, 0.006073599815368653, 0.006081503868103027, 0.006115327835083008, 0.0060183038711547855, 0.006093344211578369, 0.005998847961425781, 0.006080031871795654, 0.0060375361442565915, 0.006096799850463867, 0.005994495868682862, 0.006083072185516358, 0.006192575931549072, 0.0061363840103149415, 0.00606825590133667, 0.006119359970092773, 0.006064159870147705, 0.006086656093597412, 0.006088064193725586, 0.00609772777557373, 0.006080319881439209, 0.006025216102600098, 0.006096672058105468, 0.006078879833221435, 0.006091839790344238, 0.006046495914459229, 0.006059967994689942, 0.005987616062164307, 0.006079520225524902, 0.006041088104248047, 0.006076255798339844, 0.0060189437866210935, 0.006098847866058349, 0.006029024124145508, 0.006087135791778564, 0.006004288196563721, 0.0060936641693115235, 0.005977568149566651, 0.006119967937469483, 0.00601423978805542, 0.006310624122619629, 0.00707583999633789, 0.006176767826080322, 0.006633471965789795, 0.006054912090301514, 0.006121920108795166, 0.006855231761932373, 0.006351103782653808, 0.006156032085418701, 0.006082399845123291, 0.006154399871826172, 0.006103040218353272, 0.006161791801452637, 0.00602784013748169, 0.006149951934814453, 0.0060561919212341305, 0.0061337599754333495, 0.006108895778656006, 0.00611356782913208, 0.0061394238471984865, 0.0060293121337890625, 0.006131743907928467, 0.0060490560531616215, 0.0061283202171325685, 0.006041600227355957, 0.0061155838966369626, 0.006034304141998291, 0.006162911891937256, 0.006039968013763428, 0.006115424156188965, 0.006021024227142334, 0.006127615928649902, 0.00604918384552002, 0.006104991912841797, 0.006006495952606201, 0.006144991874694824, 0.005993567943572998, 0.006117919921875, 0.00605017614364624, 0.006094111919403076, 0.006077343940734863, 0.006127840042114258, 0.006090335845947266, 0.006019008159637451, 0.006109248161315918, 0.006028319835662842, 0.006063072204589843, 0.00603545618057251, 0.006094848155975342, 0.006051839828491211, 0.006104447841644287, 0.0060013761520385745, 0.006127007961273193, 0.006008768081665039, 0.006096704006195068, 0.006044415950775146, 0.006135807991027832, 0.0059985918998718265, 0.006106400012969971, 0.0060538239479064945, 0.006113887786865235, 0.006019264221191406, 0.006100992202758789, 0.005973311901092529, 0.006082240104675293, 0.006009856224060059, 0.006169792175292969, 0.006030015945434571, 0.006091040134429931, 0.006047584056854248, 0.0060702719688415525, 0.006360127925872803, 0.006351808071136475, 0.006922239780426025, 0.0065474557876586915, 0.006060256004333496, 0.00611030387878418, 0.0060722241401672365, 0.006128064155578613, 0.00603545618057251, 0.006175072193145752, 0.006090943813323975, 0.006181888103485108, 0.005944928169250488, 0.006095263957977295, 0.0060026879310607914, 0.006111104011535645, 0.006040768146514892, 0.006095808029174804, 0.006027328014373779, 0.006108352184295654, 0.006025983810424805, 0.006120639801025391, 0.006057919979095459, 0.006089600086212158, 0.006094848155975342, 0.006051392078399658, 0.00609935998916626, 0.006049983978271485, 0.006086495876312256, 0.006043647766113281, 0.006083807945251465, 0.006048255920410156, 0.006144288063049316, 0.006043647766113281, 0.00610313606262207, 0.006030655860900879, 0.006099455833435059, 0.006022687911987305, 0.006086656093597412, 0.006023744106292725, 0.00610700798034668, 0.005988831996917725, 0.006139552116394043, 0.006024640083312988, 0.0061075201034545895, 0.006013120174407959, 0.006108352184295654, 0.006031424045562744, 0.0061178879737854, 0.006014336109161377, 0.006113408088684082, 0.006002528190612793, 0.00614902400970459, 0.006053184032440186, 0.006046400070190429, 0.0060759038925170894, 0.006111775875091553, 0.006077919960021973, 0.00602569580078125, 0.006055679798126221, 0.006019519805908203, 0.0060752639770507814, 0.0061756157875061035, 0.00609657621383667, 0.006052256107330322, 0.006184959888458252, 0.00602947187423706, 0.006162464141845703, 0.0060332159996032714, 0.006166592121124268, 0.006027200222015381, 0.006116479873657227, 0.00600710391998291, 0.00608460807800293, 0.0060104641914367676, 0.006013120174407959, 0.006080800056457519, 0.006125279903411865, 0.006102047920227051, 0.006079040050506592, 0.006085375785827637, 0.006065824031829834, 0.0061147198677062985, 0.006041215896606445, 0.006087647914886475, 0.006037792205810547, 0.006096479892730713, 0.006029439926147461, 0.006115071773529053, 0.006030911922454834, 0.006136767864227295, 0.0060395197868347164, 0.006117152214050293, 0.006036960124969483, 0.006103775978088379, 0.006067647933959961, 0.006105472087860107, 0.006037504196166992, 0.006123839855194092, 0.005994175910949707, 0.006107135772705078, 0.006025055885314942, 0.006115327835083008, 0.006077983856201172, 0.00608028793334961, 0.006082784175872802, 0.006048543930053711, 0.006063839912414551, 0.006054175853729248, 0.006102880001068115, 0.006011168003082276, 0.006069983959197998, 0.006053887844085694, 0.006078464031219482, 0.006031360149383545, 0.006168575763702393, 0.00601907205581665, 0.006086656093597412, 0.006006368160247803, 0.0060965118408203125, 0.0059911999702453615, 0.006098400115966797, 0.005984928131103516, 0.006137343883514404, 0.006005119800567627, 0.006078815937042236, 0.006004384040832519, 0.0061042881011962894, 0.005983007907867432, 0.006080639839172363, 0.005982079982757568, 0.006089824199676514, 0.006047711849212646, 0.006165599822998047, 0.006025055885314942, 0.006098944187164307, 0.006030752182006836, 0.00609984016418457, 0.005958655834197998, 0.006043903827667237, 0.006097119808197021, 0.006044191837310791, 0.006149888038635254, 0.006027520179748535, 0.006115520000457763, 0.006038784027099609, 0.0061138558387756345, 0.006062111854553223, 0.006108352184295654, 0.0060731201171875, 0.006123424053192139, 0.006006591796875, 0.006138463973999023, 0.006179520130157471, 0.006116352081298828, 0.006008831977844238, 0.006098144054412842, 0.006034207820892334, 0.006120639801025391, 0.006045919895172119, 0.006152512073516846, 0.006084991931915283, 0.006039455890655517, 0.006078464031219482, 0.006042751789093017, 0.006078559875488282, 0.00602396821975708, 0.00618393611907959, 0.00602623987197876, 0.00614515209197998, 0.0060182719230651854, 0.006061791896820068, 0.006037439823150635, 0.006094848155975342, 0.00605292797088623, 0.006115712165832519, 0.006005152225494385, 0.006123680114746094, 0.006024479866027832, 0.00652566385269165, 0.006076416015625, 0.006078464031219482, 0.006395455837249756, 0.006068640232086182, 0.006094880104064941, 0.006053760051727295, 0.006109087944030762, 0.00605398416519165, 0.006072447776794433, 0.006041376113891601, 0.006084095954895019, 0.006025504112243652, 0.0061262397766113285, 0.005998496055603027, 0.00616864013671875, 0.006018879890441894, 0.00611356782913208, 0.006024928092956543, 0.00610700798034668, 0.005992576122283936, 0.006095935821533203, 0.005888000011444092, 0.006104703903198242, 0.00600054407119751, 0.006103519916534424, 0.006017119884490967, 0.00609225606918335, 0.006158048152923584, 0.006060544013977051, 0.006098336219787598, 0.006168384075164795, 0.006118400096893311, 0.006033408164978027, 0.006168575763702393, 0.006031072139739991, 0.0060910720825195315, 0.006027040004730225, 0.006101183891296387, 0.006027040004730225, 0.006099167823791504, 0.0060018239021301266, 0.006101408004760742, 0.00601043176651001, 0.0062473278045654295, 0.00606982421875, 0.006124991893768311, 0.0060234880447387696, 0.006132351875305176, 0.006042816162109375, 0.006097760200500488, 0.006049471855163574, 0.006047200202941894, 0.00607747220993042, 0.0060548801422119145, 0.006148767948150635, 0.006041791915893555, 0.006110976219177246, 0.006125823974609375, 0.006094848155975342, 0.006018784046173096, 0.006072383880615234, 0.006000832080841064, 0.006080480098724366, 0.0060059518814086915, 0.006084928035736084, 0.0060769920349121095, 0.006141695976257324, 0.006017280101776123, 0.0061562881469726565, 0.006019264221191406, 0.006164576053619385, 0.006026976108551026, 0.006584383964538574, 0.006116320133209228, 0.006111839771270752, 0.0060993280410766605, 0.006045695781707764, 0.006076159954071045, 0.0060145602226257325, 0.006077151775360107, 0.006045631885528565, 0.006072319984436035, 0.006017024040222168, 0.006090752124786377, 0.00593123197555542, 0.0060845441818237304, 0.006006847858428955, 0.00611737585067749, 0.005980160236358643, 0.006187007904052734, 0.006137504100799561, 0.006121664047241211, 0.006044991970062256, 0.00613647985458374, 0.006004928112030029, 0.006103263854980469, 0.006133535861968994, 0.006075456142425537, 0.006099904060363769, 0.006059391975402832, 0.006105728149414062, 0.0060555200576782224, 0.006101408004760742, 0.006056032180786133, 0.006111135959625244, 0.006168575763702393, 0.006116479873657227, 0.006100895881652832, 0.006146848201751709, 0.006003136157989502, 0.00609830379486084, 0.0059985599517822265, 0.006097311973571777, 0.006232063770294189, 0.006129312038421631, 0.00600713586807251, 0.006137119770050049, 0.006101727962493897, 0.006150144100189209, 0.0060764479637146, 0.00602668809890747, 0.006091584205627441, 0.0060497279167175295, 0.00607209587097168, 0.006033408164978027, 0.006073791980743409, 0.006064703941345215, 0.0060661759376525876, 0.006021120071411133, 0.0060841598510742185, 0.006013376235961914, 0.006594624042510986, 0.006074592113494873, 0.00613369607925415, 0.00599564790725708, 0.006148640155792236, 0.005988480091094971, 0.006107295989990234, 0.006110176086425782, 0.006097536087036132, 0.0061521601676940915, 0.006063712120056152, 0.0061356801986694335, 0.006022304058074951, 0.006078207969665527, 0.006031199932098389, 0.006096960067749024, 0.005920767784118652, 0.006104608058929443, 0.006017183780670166, 0.006125984191894532, 0.005994912147521973, 0.0060783357620239255, 0.006049856185913086, 0.006156223773956299, 0.006415808200836181, 0.006105887889862061, 0.0060128321647644044, 0.006111231803894043, 0.006059552192687988, 0.006114848136901855, 0.006121791839599609, 0.0061008319854736325, 0.006149919986724853, 0.006056032180786133, 0.006118656158447266, 0.006083839893341064, 0.006095615863800049, 0.006047391891479492, 0.006068128108978272, 0.006119296073913574, 0.0061073598861694335, 0.005985407829284668, 0.006107071876525879, 0.0060115838050842285, 0.006093056201934815, 0.00603545618057251, 0.006104864120483398, 0.00601094388961792, 0.00610265588760376, 0.005986847877502441, 0.006113279819488526, 0.006006015777587891, 0.006081088066101074, 0.0059845118522644045, 0.006088640213012695, 0.006013279914855957, 0.006176415920257568, 0.006033535957336426, 0.0060721921920776365, 0.0060928001403808595, 0.006038943767547608, 0.00606217622756958, 0.006050303936004638, 0.00611737585067749, 0.0060160961151123045, 0.006056064128875732, 0.006023583889007568, 0.006082335948944092, 0.006008480072021484, 0.0061363840103149415, 0.0060193600654602055, 0.006063936233520507, 0.0060850238800048825, 0.0061910400390625, 0.006025152206420898, 0.006147103786468506, 0.006017951965332031, 0.006107071876525879, 0.006049088001251221, 0.005980224132537842, 0.006062079906463623, 0.006081791877746582, 0.006020127773284912, 0.006163424015045166, 0.006039648056030273, 0.006075039863586425, 0.0060657281875610355, 0.006139935970306397, 0.006093215942382812, 0.006053887844085694, 0.006105088233947754, 0.006044896125793457, 0.0060813121795654295, 0.00602342414855957, 0.006100736141204834, 0.006062079906463623, 0.006098944187164307, 0.006066463947296143, 0.00646073579788208, 0.006035871982574463, 0.006139776229858398, 0.006037631988525391, 0.006137856006622314, 0.006024320125579834, 0.006132607936859131, 0.0060702719688415525, 0.006090144157409668, 0.0060854401588439945, 0.006368095874786377, 0.006142496109008789, 0.006074848175048828, 0.00613369607925415, 0.006078464031219482, 0.006160384178161621, 0.006004735946655273, 0.00613100814819336, 0.006019711971282959, 0.006127168178558349, 0.006023263931274414, 0.00611737585067749, 0.0060215358734130855, 0.006174592018127442, 0.00605401611328125, 0.006164480209350586, 0.006073887825012207, 0.006092735767364502, 0.006156832218170166, 0.006086656093597412, 0.00615180778503418, 0.006069952011108398, 0.0061344318389892575, 0.0060759038925170894, 0.0061456642150878905, 0.006052063941955566, 0.006126048088073731, 0.006125792026519776, 0.006155935764312745, 0.006029664039611816, 0.006123519897460937, 0.006035295963287354, 0.006119103908538819, 0.006033599853515625, 0.006162975788116455, 0.006112576007843018, 0.006101952075958252, 0.006110239982604981, 0.006050879955291748, 0.006059679985046387, 0.006052063941955566, 0.006082304000854492, 0.006062111854553223, 0.006096896171569824, 0.006043680191040039, 0.006084512233734131, 0.0060349760055541995, 0.0061066880226135255, 0.00604259204864502, 0.00608892822265625, 0.00601475191116333, 0.006148096084594727, 0.006026303768157959, 0.006179647922515869, 0.006037792205810547, 0.00613318395614624, 0.006080383777618408, 0.006130208015441894, 0.006071296215057373, 0.006104063987731933, 0.006086368083953858, 0.0060524802207946775, 0.006118624210357666, 0.006047935962677002, 0.006119135856628418, 0.006076576232910157, 0.0061270718574523925, 0.006050271987915039, 0.00609119987487793, 0.006049088001251221, 0.006125792026519776, 0.00603334379196167, 0.006131648063659668, 0.006103648185729981, 0.006127615928649902, 0.006013216018676758, 0.006116415977478027, 0.006001632213592529, 0.006108736038208008, 0.00667855978012085, 0.006311488151550293, 0.006097439765930176, 0.006041088104248047, 0.006105120182037353, 0.006038303852081299, 0.006119103908538819, 0.006055903911590576, 0.006110559940338134, 0.0060440640449523925, 0.0061504321098327635, 0.006021471977233886, 0.0061231679916381836, 0.006021408081054688, 0.006184671878814697, 0.006075647830963135, 0.006089375972747803, 0.006014688014984131, 0.006020736217498779, 0.006488416194915771, 0.006039360046386719, 0.006076288223266601, 0.006027743816375732, 0.0061047677993774415, 0.006067647933959961, 0.006099232196807861, 0.006040160179138184, 0.006558879852294922, 0.0060342721939086914, 0.006133600234985352, 0.006061503887176514, 0.0061561279296875, 0.006050687789916992, 0.006086656093597412, 0.006092512130737305, 0.006037792205810547, 0.0061171197891235355, 0.006037759780883789, 0.006086656093597412, 0.006047808170318604, 0.006115263938903809, 0.006170623779296875, 0.006129280090332031, 0.00605452823638916, 0.00613267183303833, 0.00609164810180664, 0.006318016052246094, 0.006119423866271972, 0.006426623821258545, 0.006148096084594727, 0.006838272094726563, 0.007007552146911621, 0.006165440082550049, 0.006115071773529053, 0.0061972479820251464, 0.006086656093597412, 0.006210912227630616, 0.006158368110656738, 0.0061569280624389645, 0.006038911819458008, 0.006229695796966553, 0.006038047790527344, 0.006156703948974609, 0.005992447853088379, 0.006135807991027832, 0.006017024040222168, 0.006116799831390381, 0.006009407997131348, 0.006107423782348633, 0.006047455787658691, 0.006109407901763916, 0.006102208137512207, 0.006099487781524658, 0.006108543872833252, 0.006193664073944092, 0.00622815990447998, 0.006105088233947754, 0.006107135772705078, 0.006057151794433594, 0.0062568001747131344, 0.0060340800285339355, 0.006021120071411133, 0.006011903762817383, 0.007173376083374023, 0.006127359867095947, 0.006037504196166992, 0.006119423866271972, 0.006117663860321045, 0.006205152034759522, 0.006049312114715576, 0.006117856025695801, 0.006013984203338623, 0.006106080055236817, 0.006028351783752442, 0.006089663982391357, 0.00603872013092041, 0.006137887954711914, 0.006065087795257568, 0.0066722240447998045, 0.006217728137969971, 0.006082560062408447, 0.006161695957183838, 0.006058623790740967, 0.006146143913269043, 0.0060210561752319335, 0.006141248226165771, 0.0061138558387756345, 0.006144192218780518, 0.00603334379196167, 0.006131072044372559, 0.006087488174438477, 0.0061049599647521975, 0.006085760116577148, 0.0060588159561157226, 0.006111392021179199, 0.00608019208908081, 0.006350080013275147, 0.006062687873840332, 0.006127200126647949, 0.006054751873016358, 0.006128960132598877, 0.006023808002471924, 0.006107135772705078, 0.0060124797821044925, 0.006106783866882324, 0.006004992008209228, 0.006121471881866455, 0.005995039939880371, 0.006115488052368164, 0.006012415885925293, 0.006107295989990234, 0.006008863925933838, 0.00611568021774292, 0.006035071849822998, 0.006090240001678467, 0.00610700798034668, 0.006068511962890625, 0.006087007999420166, 0.006019264221191406, 0.0060928001403808595, 0.00603545618057251, 0.006069695949554444, 0.006040063858032227, 0.00606601619720459, 0.005933184146881104, 0.006119423866271972, 0.006051839828491211, 0.006092127799987793, 0.00599948787689209, 0.006075744152069092, 0.006013376235961914, 0.006072480201721192, 0.006005631923675537, 0.006136799812316894, 0.0060128321647644044, 0.00608787202835083, 0.006022240161895752, 0.006121471881866455, 0.005998400211334229, 0.006103104114532471, 0.006001984119415283, 0.006133376121520996, 0.0060382719039917, 0.006148128032684326, 0.006074592113494873, 0.0061040959358215335, 0.006079455852508545, 0.006061151981353759, 0.006097663879394531, 0.006060192108154297, 0.006096704006195068, 0.006105279922485352, 0.006113599777221679, 0.006051519870758057, 0.006128640174865723, 0.006062431812286377, 0.00610595178604126, 0.006110688209533691, 0.006142591953277588, 0.006022880077362061, 0.006551136016845703, 0.006021312236785889, 0.006093023777008056, 0.006067359924316406, 0.0060711359977722165, 0.006079936027526855, 0.006047679901123047, 0.006080671787261963, 0.006055935859680176, 0.006162911891937256, 0.006080512046813965, 0.006083936214447022, 0.006023839950561523, 0.006110335826873779, 0.0060424962043762204, 0.0060661759376525876, 0.006019008159637451, 0.006150207996368408, 0.005992447853088379, 0.006115327835083008, 0.006002143859863281, 0.00610316801071167, 0.006015168190002441, 0.006136256217956543, 0.006014848232269287, 0.0061131839752197265, 0.006017055988311768, 0.005997344017028809, 0.0060234560966491695, 0.006090688228607178, 0.005996416091918946, 0.0061415362358093264, 0.006029344081878662, 0.00613372802734375, 0.00612992000579834, 0.0060356159210205075, 0.006096159934997559, 0.006052576065063477, 0.006111328125, 0.006036992073059082, 0.006093215942382812, 0.006047264099121093, 0.006140384197235107, 0.006023168087005615, 0.006067327976226807, 0.006038080215454102, 0.006105760097503662, 0.006034207820892334, 0.006098879814147949, 0.006016223907470703, 0.006147136211395263, 0.006036128044128418, 0.006211584091186524, 0.0060430078506469725, 0.006195839881896973, 0.0061356801986694335, 0.0061669120788574215, 0.0061212158203125, 0.006118912220001221, 0.006123775959014892, 0.006088736057281494, 0.006150591850280762, 0.006049824237823486, 0.006102784156799317, 0.006076032161712646, 0.006104479789733887, 0.006013919830322265, 0.006126944065093994, 0.006024064064025879, 0.006124991893768311, 0.006021312236785889, 0.006147903919219971, 0.006039008140563965, 0.006132607936859131, 0.006016223907470703, 0.006140927791595459, 0.006090528011322022, 0.006122719764709472, 0.0060854082107543945, 0.006052127838134766, 0.006100480079650879, 0.006049119949340821, 0.006093696117401123, 0.006042943954467773, 0.006103519916534424, 0.006037727832794189, 0.006092959880828857, 0.006022751808166504, 0.00608460807800293, 0.006098847866058349]",tokens/s,163.84798211096114,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1849.581568,2552.102912,0.0,2149.5808,2147.522048,s,1,8.932517578125,8.932517578125,0.0,8.932517578125,8.932517578125,8.932517578125,8.932517578125,[8.932517578125],,kWh,4.587654665417631e-05,5.049788951118639e-06,1.4799456283999535e-05,6.572579188929449e-05,,MB,1950.629888,2621.308928,0.0,2204.106752,2190.263808,s,10,0.6575093994140624,0.06575093994140625,0.0003201275195384166,0.06584153366088868,0.06603841476440431,0.06615483894348145,0.06624797828674317,"[0.0662712631225586, 0.06594774627685547, 0.06601254272460938, 0.06579401397705079, 0.06531948852539063, 0.06588230133056641, 0.06527225494384765, 0.06531542205810546, 0.06589360046386719, 0.06580076599121094]",tokens/s,3893.4804616958118,kWh,1.975344695073149e-06,2.177883469633441e-07,1.3117484217703086e-06,3.5048814638068013e-06,tokens/kWh,73040986.59072694,MB,1972.543488,2623.40608,0.0,2206.203904,2190.266368,s,10,16.42819201660156,1.642819201660156,0.012774036982459025,1.6418552856445312,1.658219775390625,1.6635571533203126,1.6678270556640626,"[1.650435791015625, 1.66889453125, 1.64455517578125, 1.628999267578125, 1.626092041015625, 1.6447880859375, 1.65703369140625, 1.6391553955078124, 1.6291549072265625, 1.6390831298828126]",tokens/s,38.3487117367116,kWh,4.794069749492657e-05,5.287607962529969e-06,2.603182900863022e-05,7.926013446608675e-05,tokens/kWh,794851.036077361,,s,630,16.42602668571472,0.026073058231293206,0.0004316849353683703,0.026001808166503906,0.026532703399658204,0.026792141151428223,0.027710996437072755,"[0.02674278450012207, 0.026243072509765625, 0.026229984283447267, 0.026191680908203126, 0.026148927688598632, 0.026071487426757814, 0.026029823303222656, 0.025975519180297852, 0.02605183982849121, 0.026056863784790038, 0.026423904418945314, 0.026034175872802736, 0.026417152404785156, 0.026068992614746093, 0.026068288803100585, 0.026128288269042968, 0.02644806480407715, 0.026116704940795897, 0.026167295455932618, 0.02598297691345215, 0.026103424072265624, 0.026159488677978515, 0.02614886474609375, 0.02750054359436035, 0.027043840408325196, 0.026005504608154296, 0.02613248062133789, 0.026464128494262697, 0.026341184616088868, 0.026461599349975586, 0.026277952194213867, 0.02624787139892578, 0.02624118423461914, 0.02617344093322754, 0.02613248062133789, 0.026080671310424804, 0.02629487991333008, 0.026124063491821288, 0.026134719848632814, 0.02607417678833008, 0.026042816162109374, 0.026047008514404297, 0.026015199661254883, 0.025932319641113283, 0.025968639373779297, 0.026042367935180662, 0.02591862487792969, 0.02594492721557617, 0.02605244827270508, 0.026012832641601563, 0.025999807357788087, 0.026218175888061523, 0.0265467529296875, 0.02611235237121582, 0.026020959854125978, 0.026112672805786132, 0.02598111915588379, 0.026148895263671874, 0.02611568069458008, 0.026097440719604494, 0.02651807975769043, 0.026111263275146485, 0.02651420783996582, 0.027114944458007814, 0.026743616104125977, 0.02748361587524414, 0.026464223861694336, 0.02629075241088867, 0.02622390365600586, 0.02608742332458496, 0.02602249526977539, 0.02625446319580078, 0.026102783203125, 0.026177440643310547, 0.026123743057250976, 0.02619046401977539, 0.026317983627319335, 0.026294208526611327, 0.026569599151611327, 0.027146272659301758, 0.026967424392700196, 0.02705846405029297, 0.02729404830932617, 0.026927104949951174, 0.02676736068725586, 0.02673151969909668, 0.026704799652099608, 0.02655241584777832, 0.02633113670349121, 0.02654739189147949, 0.026569536209106445, 0.026178720474243165, 0.026415103912353514, 0.026275840759277344, 0.02636595153808594, 0.026350431442260742, 0.026432672500610353, 0.026339519500732423, 0.026330783843994142, 0.026178495407104492, 0.02657267189025879, 0.026149152755737304, 0.026544031143188478, 0.026468351364135743, 0.02635366439819336, 0.026462207794189452, 0.026206207275390626, 0.028513952255249022, 0.02770979118347168, 0.026472448348999023, 0.026572799682617186, 0.026535295486450197, 0.026245439529418945, 0.026191808700561522, 0.02602182388305664, 0.026061248779296876, 0.02607923126220703, 0.02608076858520508, 0.02599776077270508, 0.025847103118896483, 0.025808832168579102, 0.02587116813659668, 0.026232831954956053, 0.026664960861206056, 0.026553728103637694, 0.02747862434387207, 0.027448928833007813, 0.026734752655029295, 0.028401920318603516, 0.026857376098632812, 0.026654592514038088, 0.02630259132385254, 0.026436800003051757, 0.02592860794067383, 0.026050079345703126, 0.026054752349853515, 0.02606937599182129, 0.026338687896728517, 0.026474687576293947, 0.02627833557128906, 0.02611814308166504, 0.026046464920043946, 0.025948160171508788, 0.02600943946838379, 0.025905311584472655, 0.026214271545410155, 0.026009727478027343, 0.026048511505126954, 0.02610963249206543, 0.026059072494506837, 0.025989120483398437, 0.02594963264465332, 0.025836095809936524, 0.025847103118896483, 0.025796768188476562, 0.025698848724365234, 0.025836799621582033, 0.02686390495300293, 0.025980735778808595, 0.026048704147338866, 0.02600214385986328, 0.025886463165283202, 0.025786367416381836, 0.026045663833618164, 0.02579360008239746, 0.02586595153808594, 0.025964544296264647, 0.025758783340454103, 0.025903871536254883, 0.02597452735900879, 0.026275327682495117, 0.025911424636840822, 0.02587321662902832, 0.025886720657348632, 0.025800703048706054, 0.025792671203613282, 0.02620195198059082, 0.026036224365234374, 0.026115936279296877, 0.025980991363525392, 0.02593596839904785, 0.025835519790649415, 0.025847808837890625, 0.026204160690307617, 0.025846975326538086, 0.025874719619750977, 0.02577257537841797, 0.025700063705444337, 0.02609708786010742, 0.026495391845703126, 0.025786079406738282, 0.025703296661376954, 0.02565052795410156, 0.025690143585205077, 0.025627264022827147, 0.025966175079345705, 0.025622943878173828, 0.02599519920349121, 0.026164863586425783, 0.025685632705688476, 0.02562495994567871, 0.025698272705078126, 0.028697055816650392, 0.0259237117767334, 0.025998559951782227, 0.02590787124633789, 0.02694963264465332, 0.02622812843322754, 0.02601430320739746, 0.025928768157958984, 0.025729984283447267, 0.025728799819946288, 0.025843936920166014, 0.026247167587280275, 0.025907199859619142, 0.02614476776123047, 0.025695903778076172, 0.02573347282409668, 0.025673120498657227, 0.025658143997192382, 0.025749311447143555, 0.02611958312988281, 0.02577836799621582, 0.025694623947143554, 0.025817087173461914, 0.025640544891357423, 0.02566595268249512, 0.025693920135498045, 0.02578611183166504, 0.025641504287719726, 0.025651199340820312, 0.025661407470703126, 0.025656864166259764, 0.02561484718322754, 0.025665536880493164, 0.025573375701904297, 0.025620479583740235, 0.025503936767578124, 0.02561414337158203, 0.025577280044555666, 0.025599775314331056, 0.02563033676147461, 0.02569500732421875, 0.025794496536254884, 0.025634143829345705, 0.025905887603759767, 0.025863967895507812, 0.02596393585205078, 0.02596451187133789, 0.02587731170654297, 0.025656448364257813, 0.02575859260559082, 0.02646224021911621, 0.025960447311401368, 0.025972543716430666, 0.02593401527404785, 0.025934879302978515, 0.025850847244262697, 0.02582476806640625, 0.025895423889160156, 0.025839616775512695, 0.025811967849731447, 0.025827455520629882, 0.025852800369262696, 0.02595430374145508, 0.02584886360168457, 0.025780351638793945, 0.025783136367797853, 0.02588467216491699, 0.025813087463378907, 0.02593577575683594, 0.025825279235839844, 0.0260581111907959, 0.025971328735351563, 0.025818975448608398, 0.02575129508972168, 0.025733535766601562, 0.025742496490478516, 0.025719648361206056, 0.02571468734741211, 0.025708608627319336, 0.025644191741943358, 0.025676576614379883, 0.025634815216064453, 0.025808895111083984, 0.02596028709411621, 0.025749664306640625, 0.02589286422729492, 0.025728096008300783, 0.025967391967773437, 0.025979007720947266, 0.025782272338867186, 0.025870336532592773, 0.025694208145141603, 0.025665536880493164, 0.025742464065551758, 0.025654144287109375, 0.025689727783203126, 0.025671615600585937, 0.025714975357055664, 0.025741472244262695, 0.025770143508911134, 0.025709791183471678, 0.025759552001953127, 0.025721664428710937, 0.02575564765930176, 0.025914560317993163, 0.02568275260925293, 0.02573721694946289, 0.0259051513671875, 0.025757696151733397, 0.025738624572753905, 0.025606496810913086, 0.025733407974243165, 0.02562777519226074, 0.02609152030944824, 0.025971712112426756, 0.02571980857849121, 0.025644512176513673, 0.02577667236328125, 0.02571673583984375, 0.02572287940979004, 0.025814207077026367, 0.02598566436767578, 0.025699871063232422, 0.026581024169921873, 0.02584435272216797, 0.025837663650512696, 0.026208160400390625, 0.02574336051940918, 0.02561542320251465, 0.02565558433532715, 0.02620185661315918, 0.02572550392150879, 0.02566793632507324, 0.025683967590332032, 0.02574950408935547, 0.02589388847351074, 0.0259368953704834, 0.025915391921997072, 0.026077024459838866, 0.026031360626220704, 0.026112768173217775, 0.02604662322998047, 0.026111295700073242, 0.026432191848754883, 0.026464256286621093, 0.026136512756347655, 0.02609347152709961, 0.02617945671081543, 0.026304704666137695, 0.026210399627685548, 0.026225984573364256, 0.0264136962890625, 0.026269760131835938, 0.02632089614868164, 0.026351615905761717, 0.026402816772460938, 0.02627743911743164, 0.02606675148010254, 0.026062816619873048, 0.026269535064697265, 0.02627872085571289, 0.026437631607055666, 0.026326688766479492, 0.02635811233520508, 0.026328832626342773, 0.027212032318115233, 0.02608723258972168, 0.026372287750244142, 0.026477823257446288, 0.026599775314331053, 0.026247583389282226, 0.02614271926879883, 0.026125471115112306, 0.026061567306518554, 0.026053855895996094, 0.026173568725585936, 0.026969343185424804, 0.026237695693969727, 0.02643484878540039, 0.026378976821899415, 0.02633283233642578, 0.02660121536254883, 0.026480607986450196, 0.02633907127380371, 0.02636684799194336, 0.026259456634521484, 0.026212287902832032, 0.026122592926025392, 0.0262458553314209, 0.02657587242126465, 0.027598592758178712, 0.026549760818481444, 0.02655523109436035, 0.02640892791748047, 0.02631999969482422, 0.026379072189331054, 0.02657244873046875, 0.02689878463745117, 0.026556255340576172, 0.02646441650390625, 0.02654982376098633, 0.02644326400756836, 0.02644633674621582, 0.02632156753540039, 0.026826528549194335, 0.026263551712036134, 0.02620732879638672, 0.026338207244873048, 0.02607673645019531, 0.026060319900512694, 0.02604921531677246, 0.02610812759399414, 0.026023935317993165, 0.026015743255615235, 0.026063072204589845, 0.026064064025878905, 0.026145376205444337, 0.02609152030944824, 0.026413055419921876, 0.026092544555664062, 0.026265951156616212, 0.026153152465820312, 0.026238527297973633, 0.02651024055480957, 0.02604595184326172, 0.026110464096069336, 0.026096736907958985, 0.026214879989624025, 0.026209760665893554, 0.026118560791015624, 0.02611052894592285, 0.026140928268432617, 0.02597657585144043, 0.02609971237182617, 0.02584681510925293, 0.025903520584106447, 0.02614944076538086, 0.02608064079284668, 0.026083295822143554, 0.02653241539001465, 0.02600147247314453, 0.025831424713134765, 0.025769056320190428, 0.02577894401550293, 0.025792831420898436, 0.02581283187866211, 0.02597887992858887, 0.026019840240478515, 0.025870367050170897, 0.0258287353515625, 0.025895519256591795, 0.025882623672485353, 0.025987071990966795, 0.026021888732910156, 0.025774080276489256, 0.02572697639465332, 0.02582649612426758, 0.02570719909667969, 0.02566771125793457, 0.025835519790649415, 0.025850143432617188, 0.026322656631469727, 0.026521600723266602, 0.02644528007507324, 0.026446239471435547, 0.026563968658447266, 0.026501888275146483, 0.026177440643310547, 0.026361951828002928, 0.02610380744934082, 0.025900447845458984, 0.026325216293334962, 0.026056671142578126, 0.027052448272705077, 0.027234304428100587, 0.027763839721679687, 0.02610883140563965, 0.025966144561767577, 0.026018144607543946, 0.0260402889251709, 0.02583523178100586, 0.02578825569152832, 0.026083871841430663, 0.025935871124267578, 0.02573481559753418, 0.025762304306030274, 0.025855487823486328, 0.025770336151123046, 0.02586134338378906, 0.025598751068115235, 0.025632095336914063, 0.025764511108398436, 0.026118175506591797, 0.02574496078491211, 0.027072927474975587, 0.025595903396606445, 0.02568592071533203, 0.025593791961669922, 0.02555628776550293, 0.025567583084106445, 0.025510400772094727, 0.025597951889038087, 0.02588435173034668, 0.025711999893188477, 0.02556118392944336, 0.025694719314575197, 0.025637216567993164, 0.02573619270324707, 0.025746431350708008, 0.026054399490356445, 0.025785856246948242, 0.025872671127319335, 0.025688543319702148, 0.025668832778930666, 0.025602848052978515, 0.02572902488708496, 0.025542240142822265, 0.0255568962097168, 0.025600255966186522, 0.025565439224243165, 0.025587711334228515, 0.025661439895629884, 0.025825056076049804, 0.02576582336425781, 0.025907487869262696, 0.025849376678466797, 0.02579199981689453, 0.026223583221435545, 0.02574473571777344, 0.025638975143432618, 0.02575424003601074, 0.025663455963134765, 0.025659391403198242, 0.025610240936279297, 0.025752960205078126, 0.025694847106933594, 0.025612287521362305, 0.025654592514038087, 0.025776704788208007, 0.025604223251342772, 0.0257445125579834, 0.02578521537780762, 0.025780223846435548, 0.025694208145141603, 0.025566303253173828, 0.025598400115966796, 0.028389856338500975, 0.027711488723754882, 0.02635968017578125, 0.02623910331726074, 0.025794559478759766, 0.025843711853027345, 0.02572854423522949, 0.02560867118835449, 0.02570649528503418, 0.025697471618652344, 0.026640800476074217, 0.026690303802490236, 0.026061887741088866, 0.02580956840515137, 0.025911231994628907, 0.02584752082824707, 0.02581648063659668, 0.025799680709838867, 0.025710464477539063, 0.026094432830810546, 0.025967872619628907, 0.025885568618774415, 0.025767936706542968, 0.025815040588378906, 0.026812416076660156, 0.02842624092102051, 0.025748928070068358, 0.027050559997558593, 0.02569366455078125, 0.025621023178100586, 0.02559312057495117, 0.025741600036621095, 0.0255548152923584, 0.02571731185913086, 0.025523551940917967, 0.025585887908935546, 0.02651513671875, 0.02577401542663574, 0.025602880477905272, 0.02591961669921875, 0.025714080810546876, 0.025683584213256835, 0.0258855037689209, 0.02573030471801758, 0.0257728328704834, 0.025579519271850586, 0.025582656860351563, 0.025616384506225585, 0.025537055969238283, 0.0264237117767334, 0.02591744041442871, 0.02581865692138672, 0.025732736587524414, 0.02582143974304199, 0.025542688369750977, 0.025655872344970704, 0.025993215560913087, 0.025916831970214844, 0.025815359115600588, 0.02578256034851074, 0.02574336051940918, 0.026047487258911133, 0.026170303344726562, 0.026323007583618163, 0.02643574333190918, 0.026536895751953126, 0.026411935806274413, 0.026388479232788087, 0.026293472290039064, 0.02646505546569824, 0.026458112716674805, 0.026269695281982423, 0.026330432891845702, 0.026198720932006835, 0.02645180892944336, 0.02620636749267578, 0.025862144470214843, 0.026269695281982423, 0.026178911209106447, 0.026059423446655273, 0.026004671096801758, 0.025846591949462892]",tokens/s,38.35376698540824,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 128.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 255337 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 16.07 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4763.762688,6194.855936,0.0,5792.333824,5786.358272,s,1,11.557708984375,11.557708984375,0.0,11.557708984375,11.557708984375,11.557708984375,11.557708984375,[11.557708984375],,kWh,0.00012397253522500478,1.3667743280193871e-05,3.925780918399152e-05,0.00017689808768919018,,MB,1793.384448,6396.182528,0.0,5978.980352,5957.943808,s,10,2.3005235443115235,0.23005235443115235,0.00035828436726894464,0.2301343994140625,0.23039855041503907,0.23049457092285155,0.23057138732910157,"[0.22934649658203124, 0.23007139587402345, 0.22965939331054688, 0.22992620849609374, 0.23059059143066407, 0.2303185272216797, 0.23037721252441407, 0.23019740295410157, 0.22976786804199217, 0.23026844787597656]",tokens/s,1112.7901761014707,kWh,6.744941210795453e-06,7.436481644725298e-07,4.4611209931363915e-06,1.1949710368404374e-05,tokens/kWh,21423113.3732644,MB,1801.261056,6408.76544,0.0,5991.563264,5959.126016,s,10,24.643014648437504,2.4643014648437505,0.014121482163275259,2.4597720947265627,2.4792564453124997,2.48619365234375,2.49174341796875,"[2.47771484375, 2.493130859375, 2.46991357421875, 2.462212646484375, 2.45733154296875, 2.451925048828125, 2.456629638671875, 2.47704638671875, 2.44479296875, 2.452317138671875]",tokens/s,25.56505399147443,kWh,7.131078090420475e-05,7.86574090149259e-06,4.7312781537063585e-05,0.0001264893033427609,tokens/kWh,498065.8311421204,,s,630,24.640621738433815,0.03911209799751403,0.0005528795307636468,0.03898497772216797,0.03963233680725098,0.039965106964111326,0.04107597820281983,"[0.04013987350463867, 0.039458976745605466, 0.03951891326904297, 0.03917027282714844, 0.03963478469848633, 0.039272449493408204, 0.039185630798339845, 0.03926505661010742, 0.03928054428100586, 0.041134078979492186, 0.0398535041809082, 0.039313888549804686, 0.03914361572265625, 0.03909008026123047, 0.03908208084106445, 0.03909952163696289, 0.03941465759277344, 0.03904217529296875, 0.03911945724487305, 0.038887489318847654, 0.038770912170410156, 0.038694911956787106, 0.038795265197753906, 0.03897139358520508, 0.03894681549072266, 0.038877185821533204, 0.038860191345214845, 0.03881148910522461, 0.03874687957763672, 0.03882124710083008, 0.03937139129638672, 0.03909817504882813, 0.039092449188232424, 0.038983104705810546, 0.03897923278808594, 0.03912726211547852, 0.038857345581054685, 0.038811134338378905, 0.03877536010742187, 0.038925537109375, 0.03883488082885742, 0.03899808120727539, 0.03902259063720703, 0.03917619323730469, 0.03933184051513672, 0.03931955337524414, 0.03927449417114258, 0.039329792022705076, 0.039427425384521486, 0.03926492691040039, 0.03936870574951172, 0.03959331130981445, 0.03959190368652344, 0.03945289611816406, 0.0394686393737793, 0.039392127990722656, 0.04437401580810547, 0.03982950210571289, 0.03960335922241211, 0.039526657104492186, 0.039834209442138675, 0.03941580963134766, 0.03959603118896484, 0.04061676788330078, 0.04001398468017578, 0.03970150375366211, 0.03954774475097656, 0.03944857788085938, 0.03973263931274414, 0.04001395034790039, 0.03990544128417969, 0.03992966461181641, 0.039887359619140625, 0.03970457458496094, 0.03963206481933594, 0.03974636840820313, 0.03996489715576172, 0.03960166549682617, 0.03927872085571289, 0.03954508972167969, 0.03970207977294922, 0.03940940856933594, 0.039295360565185546, 0.039207134246826175, 0.03923875045776367, 0.03926927947998047, 0.03955916976928711, 0.039567359924316405, 0.03968806457519531, 0.03961260986328125, 0.03974342346191406, 0.040136703491210936, 0.0397760009765625, 0.039717121124267576, 0.03957555389404297, 0.039572959899902345, 0.03946140670776367, 0.040033279418945314, 0.03925411224365234, 0.03927094268798828, 0.03915200042724609, 0.039749889373779296, 0.039436031341552734, 0.039218528747558594, 0.039346752166748045, 0.040093505859375, 0.040618270874023435, 0.03948659133911133, 0.03914227294921875, 0.03914137649536133, 0.039360511779785154, 0.03903692626953125, 0.03929702377319336, 0.039106559753417966, 0.03952457427978515, 0.03906732940673828, 0.03910665512084961, 0.039594207763671875, 0.040957313537597656, 0.039340446472167966, 0.03941980743408203, 0.03929510498046875, 0.039298206329345706, 0.0394002571105957, 0.039023841857910156, 0.03931331253051758, 0.0401162223815918, 0.03921920013427734, 0.03914342498779297, 0.03900601577758789, 0.038836414337158204, 0.038776832580566405, 0.03888556671142578, 0.03867219161987305, 0.0389222412109375, 0.038694911956787106, 0.038637569427490234, 0.03998025512695313, 0.041124446868896485, 0.03931769561767578, 0.038983646392822265, 0.039096351623535155, 0.03956121444702149, 0.038932479858398435, 0.03879689788818359, 0.03876291275024414, 0.03887865447998047, 0.03936108779907226, 0.03907708740234375, 0.03912579345703125, 0.039331550598144534, 0.03914166259765625, 0.03943833541870117, 0.03926015853881836, 0.03963603210449219, 0.039164863586425784, 0.03911395263671875, 0.039717662811279295, 0.039128543853759766, 0.039424545288085935, 0.03970563125610352, 0.039369697570800784, 0.039288257598876955, 0.03972768020629883, 0.03914547348022461, 0.03967795181274414, 0.0395366096496582, 0.03927862548828125, 0.039609664916992186, 0.03915980911254883, 0.03906607818603516, 0.03896950531005859, 0.03945417785644531, 0.039241439819335935, 0.03893337631225586, 0.039198719024658206, 0.03912089538574219, 0.03899801635742187, 0.039056705474853515, 0.039514080047607425, 0.039252033233642576, 0.03912099075317383, 0.03900579071044922, 0.038859745025634766, 0.038849857330322264, 0.03887532806396484, 0.038850208282470704, 0.038832992553710935, 0.03871561431884766, 0.04034726333618164, 0.040220703125, 0.039174560546875, 0.03955945587158203, 0.0391894416809082, 0.03900252914428711, 0.03887142562866211, 0.03927824020385742, 0.03921955108642578, 0.03875219345092774, 0.03898374557495117, 0.03897958374023437, 0.038786815643310546, 0.03932969665527344, 0.039158111572265626, 0.038973438262939454, 0.040235008239746094, 0.03919251251220703, 0.03919187164306641, 0.038902206420898436, 0.03897379302978515, 0.03902870559692383, 0.03870828628540039, 0.03880598449707031, 0.038885505676269534, 0.03869731140136719, 0.038809505462646485, 0.03890985488891602, 0.03875990295410156, 0.039434974670410156, 0.03910246276855469, 0.038916095733642575, 0.039370750427246096, 0.03925801467895508, 0.03899107360839844, 0.03906224060058594, 0.03956137466430664, 0.039214496612548826, 0.03892601776123047, 0.03877964782714844, 0.03873183822631836, 0.03880559921264649, 0.038932479858398435, 0.038716896057128906, 0.038672447204589844, 0.038986209869384766, 0.03883391952514648, 0.03927628707885742, 0.038878814697265625, 0.03876956939697265, 0.03905257415771484, 0.038857345581054685, 0.03879740905761719, 0.03863961410522461, 0.03903299331665039, 0.03936979293823242, 0.03880220794677734, 0.04050124740600586, 0.03917004776000976, 0.039005470275878903, 0.038889598846435544, 0.038857120513916016, 0.03885894393920898, 0.039663646697998045, 0.039002113342285157, 0.03895046234130859, 0.03903327941894531, 0.03888127899169922, 0.038866943359375, 0.03884646224975586, 0.038916095733642575, 0.039026081085205076, 0.038957664489746094, 0.0387380485534668, 0.03912691116333008, 0.03942300796508789, 0.03917513656616211, 0.03901830291748047, 0.03903916931152344, 0.03881372833251953, 0.03874403381347656, 0.03881683349609375, 0.038907936096191406, 0.03907062530517578, 0.03910246276855469, 0.038894847869873045, 0.03885561752319336, 0.03880668640136719, 0.03890380859375, 0.03894953536987305, 0.039684097290039064, 0.03899596786499023, 0.03889766311645508, 0.038991870880126955, 0.038973087310791014, 0.03906595230102539, 0.03897139358520508, 0.03913040161132812, 0.039207359313964844, 0.03996527862548828, 0.04050092697143555, 0.03944374465942383, 0.03920150375366211, 0.03904716873168945, 0.03890300750732422, 0.03866255950927734, 0.038867328643798826, 0.03863142395019531, 0.03877027130126953, 0.03923529434204102, 0.039027393341064455, 0.0390041618347168, 0.0388089599609375, 0.038760929107666015, 0.0388458251953125, 0.03887593460083008, 0.03907977676391602, 0.038810783386230466, 0.039019519805908204, 0.038776832580566405, 0.03874816131591797, 0.03878092956542969, 0.03873996734619141, 0.03878092956542969, 0.03878297424316406, 0.03858227157592774, 0.0401544303894043, 0.03887603378295899, 0.03899372863769531, 0.038651103973388674, 0.03868137741088867, 0.03893657684326172, 0.03884767913818359, 0.03890857696533203, 0.03892563247680664, 0.03875219345092774, 0.039061729431152346, 0.038840351104736326, 0.038717121124267576, 0.038757343292236325, 0.03860275268554687, 0.03891164779663086, 0.03881001663208008, 0.038684574127197266, 0.03868633651733398, 0.03865788650512696, 0.038951488494873045, 0.03956531143188476, 0.039214366912841796, 0.039312095642089845, 0.039018497467041016, 0.03897753524780274, 0.03883996963500977, 0.03872963333129883, 0.038702846527099606, 0.03888719940185547, 0.03877686309814453, 0.03869375991821289, 0.03886284637451172, 0.03871052932739258, 0.03879193496704102, 0.038950912475585936, 0.038649311065673826, 0.03875279998779297, 0.038828033447265625, 0.0387665901184082, 0.03878297424316406, 0.03901030349731445, 0.03874816131591797, 0.03888127899169922, 0.03873177719116211, 0.03883961486816406, 0.03875091171264648, 0.03919209671020508, 0.03907017517089844, 0.03902182388305664, 0.0388595199584961, 0.03907788848876953, 0.03877791976928711, 0.03874092864990234, 0.038809600830078124, 0.0387583999633789, 0.03888332748413086, 0.038911136627197265, 0.038733856201171875, 0.03910943984985352, 0.03883139038085937, 0.0406514892578125, 0.03907571029663086, 0.039957569122314456, 0.039031742095947265, 0.038939872741699216, 0.03897139358520508, 0.039324447631835936, 0.0391596794128418, 0.03891212844848633, 0.03878297424316406, 0.03885990524291992, 0.03873580932617188, 0.03879417419433594, 0.039051265716552735, 0.0389304313659668, 0.03892428970336914, 0.0385816650390625, 0.03872633743286133, 0.03877190399169922, 0.03873865509033203, 0.038854400634765626, 0.038670558929443356, 0.0387031364440918, 0.03882393646240234, 0.03918844985961914, 0.03876051330566406, 0.03877471923828125, 0.038709278106689456, 0.039041023254394534, 0.03894681549072266, 0.03865599822998047, 0.03876249694824219, 0.03916595077514649, 0.03905737686157226, 0.03864579010009766, 0.038694911956787106, 0.038749408721923825, 0.038855457305908205, 0.03866995239257812, 0.03886118316650391, 0.03872931289672851, 0.038767009735107424, 0.03871916961669922, 0.03891836929321289, 0.038615135192871096, 0.03908169555664062, 0.03883468627929688, 0.03896092987060547, 0.03878649520874024, 0.03890643310546875, 0.03890995025634766, 0.038889312744140626, 0.04046976089477539, 0.04369910430908203, 0.038991870880126955, 0.039067520141601565, 0.03877081680297852, 0.03902988815307617, 0.03924636840820313, 0.03904694366455078, 0.038812225341796874, 0.03888070297241211, 0.03894739151000977, 0.038763553619384765, 0.03876681518554687, 0.039903392791748045, 0.03926425552368164, 0.03912879943847656, 0.0385497932434082, 0.03869033432006836, 0.03886537551879883, 0.0387845458984375, 0.038935009002685546, 0.03892633438110352, 0.03883123016357422, 0.038898399353027344, 0.03901190567016601, 0.03912140655517578, 0.039323745727539064, 0.03926736068725586, 0.03906790542602539, 0.03897116851806641, 0.03943929672241211, 0.03926220703125, 0.03924787139892578, 0.03942166519165039, 0.03913321685791016, 0.03917440032958985, 0.03921100616455078, 0.039515872955322266, 0.03937267303466797, 0.039426559448242186, 0.03946636962890625, 0.039275039672851564, 0.039388320922851563, 0.04379324722290039, 0.03951203155517578, 0.042186782836914065, 0.039779552459716795, 0.03935078430175781, 0.03939561462402344, 0.03948339080810547, 0.039929855346679685, 0.040302593231201174, 0.03975084686279297, 0.03922614288330078, 0.03935395050048828, 0.03904556655883789, 0.039051265716552735, 0.039150753021240235, 0.03908678436279297, 0.0390843505859375, 0.03896819305419922, 0.038882270812988284, 0.038836223602294925, 0.03892166519165039, 0.03889731216430664, 0.04064131164550781, 0.0389879035949707, 0.03886227035522461, 0.039019073486328125, 0.03916185760498047, 0.03901030349731445, 0.03879462432861328, 0.03885120010375977, 0.039065601348876954, 0.03887513732910156, 0.038680416107177734, 0.03986796951293945, 0.03915398406982422, 0.038933696746826174, 0.038781856536865236, 0.03891939163208008, 0.038812480926513675, 0.03876976013183594, 0.03875932693481445, 0.038725345611572266, 0.03880579376220703, 0.03908182525634766, 0.03894230270385742, 0.038987934112548826, 0.038907585144042967, 0.03867692947387695, 0.038617374420166016, 0.038621185302734375, 0.038705024719238285, 0.041242752075195316, 0.03871744155883789, 0.038719135284423827, 0.03911100769042969, 0.03885593414306641, 0.038865280151367185, 0.03870348739624024, 0.038555168151855466, 0.03855817413330078, 0.038913280487060546, 0.03842550277709961, 0.0385063362121582, 0.038520736694335936, 0.03846342468261719, 0.038551712036132814, 0.03856588745117188, 0.0385865592956543, 0.03852624130249024, 0.03858076858520508, 0.038882305145263675, 0.03867340850830078, 0.03871680068969727, 0.03861568069458008, 0.03873791885375977, 0.03867443084716797, 0.03895849609375, 0.03860310363769531, 0.038691070556640624, 0.03869257736206055, 0.039085758209228515, 0.03907180786132813, 0.03898729705810547, 0.038795360565185545, 0.038746593475341796, 0.038664257049560544, 0.038784767150878904, 0.038609535217285156, 0.03879731369018555, 0.038624385833740234, 0.03860569763183594, 0.03862073516845703, 0.03862982559204101, 0.03853311920166016, 0.03879670333862305, 0.03888729476928711, 0.04002627182006836, 0.03896102523803711, 0.03916819381713867, 0.03907583999633789, 0.0394021110534668, 0.03957561492919922, 0.03906355285644531, 0.03904512023925781, 0.038981632232666014, 0.03937484741210937, 0.039370590209960935, 0.038987934112548826, 0.039174144744873046, 0.039057407379150394, 0.03892127990722656, 0.038860897064208984, 0.038701919555664065, 0.038637569427490234, 0.03870249557495117, 0.03861769485473633, 0.03880550384521484, 0.03908819198608399, 0.04008534240722656, 0.0389117431640625, 0.03868502426147461, 0.039202625274658204, 0.039534111022949216, 0.03865871810913086, 0.038610206604003904, 0.03856252670288086, 0.03887308883666992, 0.039153919219970704, 0.038604190826416016, 0.0385437126159668, 0.03865599822998047, 0.03865190505981445, 0.03866550445556641, 0.038516799926757814, 0.03872367858886719, 0.038628990173339844, 0.03850271987915039, 0.039170688629150394, 0.04014694213867188, 0.03886214447021484, 0.03858854293823242, 0.03876428985595703, 0.03869475173950195, 0.03873686218261719, 0.03886220932006836, 0.03870579147338867, 0.038563713073730466, 0.03859817504882813, 0.038451168060302736, 0.03845785522460937, 0.03860396957397461, 0.03893280029296875, 0.03879385757446289, 0.03917136001586914, 0.0386383056640625, 0.03892793655395508, 0.03867692947387695, 0.03919462585449219, 0.03917574310302734]",tokens/s,25.56753667531617,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,814.20288,538.836992,0.0,136.31488,130.303488,s,1,7.63646923828125,7.63646923828125,0.0,7.63646923828125,7.63646923828125,7.63646923828125,7.63646923828125,[7.63646923828125],,kWh,1.3435531637492204e-05,1.4748280618082226e-06,3.4375027500033184e-06,1.8347862449303745e-05,,MB,1285.30432,616.431616,0.0,199.22944,174.868992,s,19,0.22155593395233153,0.011660838629070079,0.00015955394109839742,0.011625215530395508,0.011856383895874024,0.011876995277404784,0.011931392707824708,"[0.01184118366241455, 0.011625215530395508, 0.011628992080688476, 0.011605279922485352, 0.011869440078735352, 0.011753439903259277, 0.011944992065429687, 0.0115894718170166, 0.011819135665893555, 0.011613504409790039, 0.011830623626708985, 0.01151968002319336, 0.011720959663391112, 0.011528287887573242, 0.011853119850158692, 0.011445664405822753, 0.011470239639282226, 0.011429183959960938, 0.011467519760131837]",tokens/s,21953.8240896157,kWh,3.355739868725958e-07,3.700785848307786e-08,1.686687173103406e-07,5.412505626660143e-07,tokens/kWh,472978723.08720005,MB,1318.629376,629.014528,0.0,211.812352,174.871552,s,19,10.2297783203125,0.5384093852796054,0.008846364402158519,0.5383507690429687,0.5456586669921875,0.5498354980468749,0.5601568457031251,"[0.5484019775390625, 0.5382801513671875, 0.5361609497070312, 0.5383507690429687, 0.5449728393554687, 0.540945556640625, 0.5627371826171875, 0.5342178344726562, 0.5384573364257812, 0.5369503784179688, 0.544735107421875, 0.5366571044921875, 0.5440146484375, 0.540578125, 0.5384151000976563, 0.5245339965820313, 0.5232603149414062, 0.52432861328125, 0.5337803344726563]",tokens/s,117.01133323907979,kWh,1.5454832351066345e-05,1.7043991639598486e-06,5.5412773711106555e-06,2.270050888613685e-05,tokens/kWh,2775268.180814835,,s,1197,10.21945331096647,0.00853755497992188,0.0003395791310526462,0.008482144355773926,0.00879378547668457,0.008940006065368654,0.009569785537719719,"[0.008351743698120117, 0.008641504287719726, 0.008648703575134278, 0.008827936172485352, 0.00862889575958252, 0.008481087684631348, 0.008476672172546386, 0.008499199867248536, 0.008435711860656739, 0.008413536071777344, 0.008429408073425293, 0.008380224227905273, 0.008914943695068359, 0.008500896453857421, 0.009820511817932129, 0.00885529613494873, 0.00854860782623291, 0.008605695724487305, 0.008591360092163085, 0.008677375793457032, 0.008705632209777832, 0.00869007968902588, 0.008728575706481934, 0.008664928436279296, 0.008783871650695801, 0.008832287788391114, 0.00890777587890625, 0.009018752098083496, 0.008946175575256348, 0.009016736030578613, 0.00887065601348877, 0.009072480201721191, 0.008998847961425782, 0.009011039733886719, 0.008909088134765625, 0.008973567962646485, 0.00884115219116211, 0.008772352218627929, 0.008953856468200684, 0.008941184043884278, 0.00892147159576416, 0.008849696159362793, 0.008871647834777832, 0.00879635238647461, 0.008789823532104493, 0.008787167549133302, 0.008758048057556152, 0.008841216087341308, 0.008750335693359375, 0.00880620765686035, 0.00858124828338623, 0.008464256286621093, 0.00865516757965088, 0.008473407745361329, 0.00835523223876953, 0.008329312324523925, 0.00834182357788086, 0.008378432273864747, 0.008400192260742187, 0.00829641628265381, 0.008384896278381347, 0.008337696075439454, 0.008362048149108887, 0.008446080207824707, 0.008667327880859376, 0.008597184181213378, 0.008496352195739746, 0.008522527694702148, 0.0086179838180542, 0.008649920463562012, 0.008834943771362305, 0.008747584342956543, 0.008742464065551759, 0.008710592269897462, 0.00865875244140625, 0.008683072090148927, 0.0087010555267334, 0.008703392028808593, 0.008784288406372071, 0.00868563175201416, 0.008486911773681641, 0.008390656471252441, 0.008337663650512696, 0.008400768280029296, 0.008349408149719238, 0.00829155158996582, 0.008307776451110839, 0.008353728294372559, 0.008380352020263672, 0.008374272346496582, 0.00832921600341797, 0.008308735847473145, 0.008306591987609864, 0.008280159950256348, 0.00838150405883789, 0.008360896110534668, 0.008339455604553223, 0.008298175811767578, 0.008319295883178712, 0.008278016090393067, 0.00870406436920166, 0.008525568008422852, 0.0086430082321167, 0.008736512184143067, 0.008543744087219238, 0.008460800170898437, 0.00844927978515625, 0.008415424346923828, 0.008424160003662109, 0.008367967605590821, 0.00834175968170166, 0.008359040260314941, 0.008387200355529785, 0.00849897575378418, 0.008597663879394531, 0.008456031799316406, 0.008642751693725586, 0.00880835247039795, 0.009143551826477051, 0.008717120170593262, 0.008746175765991212, 0.008604543685913086, 0.00947920036315918, 0.008662079811096192, 0.00848044776916504, 0.008448224067687988, 0.008291616439819335, 0.008635007858276367, 0.008501248359680176, 0.008377728462219238, 0.008710847854614259, 0.00848038387298584, 0.008368448257446289, 0.008415231704711914, 0.008345600128173827, 0.00841113567352295, 0.008427519798278809, 0.00858518409729004, 0.00862825584411621, 0.008603008270263671, 0.008568608283996582, 0.008453280448913573, 0.008550080299377441, 0.008373503684997558, 0.008406047821044921, 0.008364928245544434, 0.008331680297851562, 0.008452544212341309, 0.008405152320861817, 0.008373727798461914, 0.008417216300964356, 0.008415679931640625, 0.00841318416595459, 0.008347583770751952, 0.008476287841796874, 0.008346367835998535, 0.008427231788635255, 0.008549471855163575, 0.008676095962524414, 0.008985983848571777, 0.008786848068237305, 0.00868131160736084, 0.008623456001281738, 0.008630592346191407, 0.008541983604431152, 0.008610655784606933, 0.00843337631225586, 0.008566368103027343, 0.008446368217468261, 0.008410752296447754, 0.008432160377502442, 0.008559519767761231, 0.008600704193115235, 0.008654656410217286, 0.008638496398925782, 0.008666496276855468, 0.008575584411621094, 0.008642560005187988, 0.008527872085571288, 0.00845849609375, 0.008479840278625488, 0.008503007888793945, 0.008455103874206542, 0.008417280197143554, 0.008396127700805663, 0.00845686435699463, 0.008435680389404296, 0.008409119606018067, 0.008501248359680176, 0.008240063667297364, 0.008352095603942872, 0.008385824203491212, 0.008382847785949707, 0.008384351730346679, 0.00842563247680664, 0.008617440223693847, 0.00879635238647461, 0.00869820785522461, 0.00863871955871582, 0.008716032028198243, 0.008591360092163085, 0.008591327667236328, 0.008591391563415528, 0.008601216316223145, 0.008641247749328613, 0.008637696266174317, 0.008483231544494629, 0.009158687591552734, 0.008638431549072265, 0.008691712379455567, 0.008651007652282714, 0.008705087661743164, 0.008499903678894043, 0.00840617561340332, 0.00856924819946289, 0.008452544212341309, 0.008400896072387695, 0.008272128105163574, 0.00828166389465332, 0.008316320419311523, 0.008292575836181641, 0.008284031867980957, 0.008397855758666993, 0.008395456314086914, 0.009116640090942383, 0.008802304267883301, 0.008366304397583008, 0.008353023529052735, 0.00866102409362793, 0.008354111671447754, 0.008418751716613769, 0.008431455612182618, 0.008450976371765137, 0.009041791915893554, 0.008624128341674805, 0.008648832321166993, 0.008531519889831543, 0.008536767959594727, 0.00871769618988037, 0.0084966402053833, 0.008393600463867187, 0.008393792152404786, 0.008381183624267579, 0.008431903839111328, 0.008397855758666993, 0.008446880340576172, 0.008483967781066894, 0.008474559783935546, 0.008516511917114257, 0.008639871597290039, 0.008727295875549317, 0.008738688468933106, 0.008482848167419434, 0.00869974422454834, 0.00862399959564209, 0.008589568138122559, 0.008570879936218261, 0.008605152130126954, 0.008461024284362793, 0.00844985580444336, 0.008421376228332519, 0.008427359580993653, 0.008480704307556153, 0.008524224281311036, 0.008513312339782714, 0.008587136268615722, 0.008668831825256348, 0.008944095611572266, 0.008939711570739747, 0.008703392028808593, 0.008619647979736328, 0.008659744262695312, 0.008693504333496093, 0.008595071792602538, 0.008565376281738282, 0.00851910400390625, 0.00852336025238037, 0.008559328079223633, 0.008523903846740722, 0.00851676845550537, 0.008561951637268066, 0.008660703659057618, 0.008560480117797852, 0.008482144355773926, 0.008612640380859375, 0.008558239936828613, 0.008504799842834473, 0.00853279972076416, 0.008635904312133789, 0.00873904037475586, 0.009004799842834473, 0.008876640319824219, 0.00897555160522461, 0.008870719909667969, 0.00899891185760498, 0.008728447914123536, 0.0085731201171875, 0.00851091194152832, 0.008562848091125489, 0.009769311904907227, 0.008640512466430664, 0.008937472343444825, 0.008802111625671388, 0.008812735557556153, 0.008586655616760254, 0.008526016235351562, 0.008563103675842286, 0.00851353645324707, 0.008498175621032715, 0.008440768241882324, 0.008452032089233398, 0.00846454429626465, 0.008844672203063965, 0.008437472343444824, 0.008713088035583495, 0.008151167869567872, 0.008287487983703613, 0.008450431823730469, 0.008382399559020997, 0.00835206413269043, 0.008361984252929687, 0.008355839729309082, 0.00835923194885254, 0.008383168220520019, 0.008357888221740722, 0.008550399780273438, 0.008464320182800293, 0.008536128044128418, 0.008738816261291504, 0.008773056030273438, 0.008698431968688965, 0.00868678379058838, 0.008705056190490723, 0.008681247711181641, 0.0087010555267334, 0.008614656448364257, 0.008666848182678223, 0.008901023864746093, 0.009037823677062988, 0.009186464309692383, 0.008671775817871093, 0.008593952178955078, 0.008610591888427735, 0.008557567596435547, 0.008451680183410644, 0.008490592002868653, 0.00868563175201416, 0.008477536201477051, 0.00844486427307129, 0.00843785572052002, 0.008620927810668946, 0.008509696006774902, 0.008603679656982421, 0.008477631568908692, 0.008436256408691407, 0.00840339183807373, 0.008384320259094238, 0.008409088134765624, 0.008420384407043457, 0.008416223526000976, 0.008392959594726563, 0.008605152130126954, 0.008400863647460937, 0.008345919609069824, 0.008390751838684082, 0.008461759567260743, 0.008586943626403809, 0.008760095596313477, 0.008643967628479004, 0.008665727615356446, 0.008572192192077636, 0.00860643196105957, 0.008799936294555664, 0.008769856452941895, 0.008822784423828126, 0.008976575851440429, 0.009019264221191406, 0.00901318359375, 0.008644736289978028, 0.008639167785644532, 0.008615200042724609, 0.008618720054626465, 0.00870751953125, 0.008599712371826171, 0.0086278076171875, 0.008663392066955567, 0.008591615676879882, 0.008604000091552735, 0.008766816139221192, 0.00884995174407959, 0.008806591987609863, 0.008776639938354492, 0.008714367866516113, 0.008685407638549804, 0.008956671714782715, 0.008586624145507812, 0.008565376281738282, 0.008581119537353516, 0.008622048377990722, 0.008579423904418945, 0.010622655868530274, 0.012289536476135255, 0.01336575984954834, 0.008740863800048827, 0.00868556785583496, 0.00854537582397461, 0.008590432167053222, 0.008400704383850098, 0.008343263626098633, 0.00834166431427002, 0.008413311958312988, 0.008293472290039062, 0.008303520202636718, 0.008482815742492676, 0.00864668846130371, 0.00866921615600586, 0.008877216339111328, 0.00867136001586914, 0.008714559555053712, 0.008849760055541993, 0.008678815841674804, 0.008652576446533203, 0.008652671813964844, 0.008657055854797363, 0.00902239990234375, 0.009902112007141113, 0.009014911651611328, 0.012383808135986328, 0.01116425609588623, 0.00935324764251709, 0.008588895797729493, 0.0085316162109375, 0.008532832145690918, 0.008621120452880859, 0.008756064414978027, 0.00859062385559082, 0.00854911994934082, 0.008591584205627441, 0.008400863647460937, 0.008396575927734375, 0.00852079963684082, 0.00812224006652832, 0.008376031875610352, 0.008375712394714355, 0.00839577579498291, 0.008488960266113281, 0.00841305637359619, 0.008382880210876464, 0.00838047981262207, 0.008570528030395508, 0.00848185634613037, 0.008546943664550782, 0.00864902400970459, 0.008893888473510742, 0.008497216224670411, 0.00850175952911377, 0.008501248359680176, 0.008466015815734864, 0.008605183601379395, 0.008649632453918457, 0.0086179838180542, 0.008620160102844238, 0.008406815528869628, 0.008429439544677734, 0.008296671867370606, 0.008285504341125488, 0.008358592033386231, 0.008352928161621094, 0.008325663566589356, 0.008427455902099609, 0.00848844814300537, 0.00855123233795166, 0.008513600349426269, 0.008418463706970214, 0.008348511695861817, 0.008300512313842774, 0.008271967887878418, 0.008311967849731445, 0.008290944099426269, 0.008492287635803222, 0.008401887893676758, 0.008301759719848633, 0.008309503555297852, 0.0083221435546875, 0.00831328010559082, 0.008322591781616212, 0.008348608016967773, 0.008376319885253907, 0.0083755521774292, 0.008581472396850585, 0.008868255615234376, 0.008976575851440429, 0.008697471618652343, 0.008568127632141114, 0.00869215965270996, 0.008714688301086427, 0.008470463752746581, 0.00852336025238037, 0.008446240425109863, 0.008458016395568847, 0.008501664161682129, 0.00850153636932373, 0.008511199951171874, 0.00864412784576416, 0.008300095558166505, 0.008442303657531739, 0.008564736366271973, 0.009029631614685058, 0.01102883243560791, 0.009320096015930176, 0.008751104354858399, 0.008496255874633788, 0.008485631942749023, 0.00837235164642334, 0.008361984252929687, 0.008370431900024414, 0.008316096305847167, 0.00839737606048584, 0.008405216217041015, 0.008308768272399902, 0.008349568367004394, 0.008288127899169922, 0.008383808135986328, 0.008286496162414551, 0.008249407768249511, 0.00825500774383545, 0.008395872116088866, 0.008482815742492676, 0.008550111770629882, 0.008619296073913573, 0.008532928466796876, 0.008437760353088379, 0.008374048233032226, 0.008335359573364258, 0.008400639533996583, 0.008503135681152343, 0.008507807731628418, 0.008316543579101563, 0.008429951667785644, 0.008340895652770996, 0.008354656219482422, 0.008375519752502442, 0.008563232421875, 0.008560704231262208, 0.008508735656738282, 0.008490688323974609, 0.00860870361328125, 0.008720576286315918, 0.00878163242340088, 0.008785920143127441, 0.008712191581726075, 0.00859545612335205, 0.008662272453308105, 0.00847539234161377, 0.008427519798278809, 0.008488639831542969, 0.008378399848937988, 0.008378656387329101, 0.008332927703857422, 0.008331071853637696, 0.008507967948913574, 0.008478240013122558, 0.00855724811553955, 0.008846879959106446, 0.008601792335510254, 0.008703807830810546, 0.008618495941162109, 0.009083552360534668, 0.0092359037399292, 0.00981164836883545, 0.008668031692504883, 0.008509183883666992, 0.008697248458862305, 0.008336000442504883, 0.008361791610717774, 0.008507328033447266, 0.008383135795593261, 0.0083373441696167, 0.008368032455444336, 0.008398816108703614, 0.008343040466308594, 0.008336159706115722, 0.008358816146850585, 0.008397631645202637, 0.008370176315307617, 0.008347040176391601, 0.008398655891418458, 0.008673855781555176, 0.008658880233764649, 0.00877344036102295, 0.008824447631835937, 0.008717280387878418, 0.008646528244018554, 0.008607647895812988, 0.008679360389709472, 0.008719712257385255, 0.00895468807220459, 0.008697855949401855, 0.00851375961303711, 0.008560416221618652, 0.008447999954223634, 0.008363903999328614, 0.008342816352844238, 0.008321887969970703, 0.00830361557006836, 0.008399456024169923, 0.008388640403747559, 0.00831116771697998, 0.008326335906982422, 0.008426303863525391, 0.008381695747375488, 0.008386303901672363, 0.008329248428344727, 0.008311776161193848, 0.008302687644958496, 0.008289631843566894, 0.008303168296813965, 0.00825705623626709, 0.008259712219238281, 0.00827836799621582, 0.008230912208557128, 0.00837769603729248, 0.008436384201049805, 0.008654848098754882, 0.00850928020477295, 0.008468640327453614, 0.00837337589263916, 0.009122015953063965, 0.008479328155517578, 0.008634271621704102, 0.008623807907104493, 0.00861622428894043, 0.008744799613952636, 0.008894335746765137, 0.00892950439453125, 0.008734175682067871, 0.008542112350463867, 0.0084136962890625, 0.008406944274902343, 0.008382464408874512, 0.008376447677612305, 0.008356032371520996, 0.00835142421722412, 0.008353631973266601, 0.008345215797424317, 0.008475168228149414, 0.008441856384277344, 0.008365280151367187, 0.008321215629577636, 0.008409855842590332, 0.008931424140930176, 0.009561471939086914, 0.00905020809173584, 0.008839455604553223, 0.008654848098754882, 0.008597503662109375, 0.008648799896240235, 0.008629599571228027, 0.008667712211608886, 0.008628543853759765, 0.008668671607971192, 0.008700096130371093, 0.008584223747253417, 0.00854092788696289, 0.008573151588439942, 0.008646944046020507, 0.008741632461547851, 0.008673664093017577, 0.008672127723693848, 0.008672991752624511, 0.008508671760559082, 0.008606464385986327, 0.008439807891845704, 0.008512767791748047, 0.008659711837768555, 0.00882688045501709, 0.008817983627319335, 0.008838175773620605, 0.008787615776062012, 0.009903743743896484, 0.008712448120117188, 0.008560768127441406, 0.008572928428649903, 0.008914943695068359, 0.008529919624328614, 0.008962047576904298, 0.00844495964050293, 0.008422368049621582, 0.008670304298400879, 0.008518560409545899, 0.008377663612365723, 0.00848147201538086, 0.00838771152496338, 0.008177663803100586, 0.008347040176391601, 0.008327775955200196, 0.008345600128173827, 0.008441632270812988, 0.008574527740478516, 0.008743807792663575, 0.008752927780151368, 0.008566783905029298, 0.008531968116760253, 0.008385984420776368, 0.008538944244384766, 0.008429311752319335, 0.008352992057800292, 0.008382816314697266, 0.008368576049804687, 0.008425472259521484, 0.008295488357543946, 0.008364768028259277, 0.00848630428314209, 0.008636832237243652, 0.008702367782592773, 0.008658368110656739, 0.008757472038269043, 0.008595840454101562, 0.008673215866088867, 0.008728351593017578, 0.008653056144714356, 0.008560640335083008, 0.008485183715820313, 0.008449728012084962, 0.008415200233459473, 0.008511520385742188, 0.008332384109497071, 0.008327232360839844, 0.008320927619934082, 0.0083056001663208, 0.008326560020446778, 0.008595871925354003, 0.008528063774108887, 0.008348768234252929, 0.00836678409576416, 0.008548576354980469, 0.00882585620880127, 0.008896639823913574, 0.008759615898132324, 0.008567359924316406, 0.008521727561950684, 0.008479999542236329, 0.00847696018218994, 0.00844816017150879, 0.008481216430664063, 0.008447775840759278, 0.008374367713928223, 0.00838163185119629, 0.008437888145446777, 0.008602304458618164, 0.008465951919555664, 0.008583935737609863, 0.008654656410217286, 0.008767680168151855, 0.008708064079284667, 0.008560383796691894, 0.008354847908020019, 0.008645536422729493, 0.008722304344177246, 0.008636672019958496, 0.008844511985778809, 0.008872447967529297, 0.008793439865112305, 0.008647551536560058, 0.008652735710144042, 0.008515423774719238, 0.00860364818572998, 0.008437439918518067, 0.008440671920776367, 0.008502079963684081, 0.008419584274291993, 0.008587776184082031, 0.008530048370361329, 0.008646656036376953, 0.008714240074157715, 0.008784223556518554, 0.008667872428894044, 0.0085349760055542, 0.008478719711303711, 0.008452095985412598, 0.008529919624328614, 0.008400416374206543, 0.008487392425537109, 0.00863644790649414, 0.008466400146484376, 0.00841055965423584, 0.00838918399810791, 0.008544256210327148, 0.008648768424987793, 0.008654720306396484, 0.008794303894042968, 0.008707967758178711, 0.008582464218139648, 0.008601984024047852, 0.008683839797973632, 0.008546303749084473, 0.008509440422058106, 0.008472895622253418, 0.00854297637939453, 0.008577343940734863, 0.008624608039855958, 0.008833279609680176, 0.009164511680603027, 0.008829119682312012, 0.008810272216796875, 0.008816864013671875, 0.008874015808105469, 0.008656864166259765, 0.008668831825256348, 0.008638879776000977, 0.008531071662902833, 0.009024319648742675, 0.008638463973999023, 0.008617856025695802, 0.008867039680480956, 0.008643744468688964, 0.008455167770385743, 0.008653568267822266, 0.008474623680114746, 0.008098431587219238, 0.008298463821411133, 0.00830668830871582, 0.008309856414794922, 0.008399423599243163, 0.008347999572753907, 0.008345600128173827, 0.008492256164550781, 0.008698847770690918, 0.00886355209350586, 0.00868115234375, 0.008646976470947266, 0.008630528450012206, 0.008574848175048827, 0.008585087776184082, 0.008654848098754882, 0.008654848098754882, 0.008691712379455567, 0.008560928344726562, 0.008598527908325194, 0.008626912117004394, 0.008549440383911132, 0.008504256248474121, 0.008412863731384277, 0.00869222354888916, 0.008677184104919433, 0.008601696014404296, 0.0086746244430542, 0.008757823944091797, 0.008615967750549316, 0.0086179838180542, 0.008846367835998535, 0.008909791946411132, 0.008736767768859864, 0.008725791931152345, 0.008573792457580566, 0.008543135643005371, 0.008545248031616211, 0.008626175880432128, 0.008585503578186035, 0.008568032264709473, 0.00869222354888916, 0.00858233642578125, 0.008557375907897949, 0.00868556785583496, 0.008740863800048827, 0.0087326717376709, 0.008633919715881348, 0.008663647651672364, 0.008539999961853027, 0.008493184089660644, 0.008433216094970703, 0.00846617603302002, 0.008415583610534669, 0.008468000411987305, 0.008410112380981445, 0.008465760231018067, 0.008423775672912597, 0.008477791786193848, 0.008567168235778808, 0.008507935523986817, 0.008540160179138183, 0.008665184020996093, 0.008694111824035644, 0.008742848396301269, 0.008662752151489258, 0.00860979175567627, 0.00861184024810791, 0.00859545612335205, 0.008512800216674804, 0.00854047966003418, 0.008523327827453613, 0.00849392032623291, 0.008458208084106446, 0.008530976295471192, 0.00837939167022705, 0.008472000122070313, 0.008637311935424805, 0.008562368392944336, 0.008741151809692383, 0.00873852825164795, 0.009278911590576172, 0.008534144401550293, 0.008485312461853028, 0.008449888229370118, 0.008443648338317872, 0.008360639572143554, 0.008400128364562989, 0.00877184009552002, 0.008422944068908691, 0.008372927665710449, 0.00863167953491211, 0.00834771156311035, 0.008473024368286133, 0.008996992111206055, 0.00856230354309082, 0.008746975898742676, 0.00911302375793457, 0.008578335762023926, 0.008557855606079101, 0.008548768043518066, 0.00860086441040039, 0.008647263526916504, 0.008393983840942382, 0.008323391914367676, 0.008300415992736817, 0.00831497573852539, 0.008346207618713379, 0.00835193634033203, 0.008447808265686035, 0.008552448272705078, 0.009043295860290528, 0.00868553638458252, 0.008794816017150878, 0.008605695724487305, 0.008481023788452148, 0.008419072151184082, 0.008409119606018067, 0.008296256065368652, 0.0083306884765625, 0.00830128002166748, 0.008437760353088379, 0.00840726375579834, 0.00825551986694336, 0.008234432220458984, 0.008361887931823731, 0.00797273588180542, 0.00822492790222168, 0.008237024307250977, 0.008292351722717285, 0.008272159576416015, 0.00824454402923584, 0.008333951950073241, 0.008247072219848633, 0.008294624328613281, 0.008252767562866211, 0.008343423843383789, 0.008304863929748536, 0.008280415534973145, 0.00828006362915039, 0.00829849624633789, 0.008300352096557616, 0.00827616024017334, 0.00828166389465332, 0.008241600036621093, 0.008421119689941406, 0.008278271675109864, 0.008234272003173828, 0.0083854398727417, 0.008356703758239745, 0.008253919601440429, 0.008253952026367188, 0.008226816177368163, 0.008239104270935058, 0.008257696151733399, 0.00823855972290039, 0.008229248046875, 0.008227904319763184, 0.0084301118850708, 0.008359456062316894, 0.00937657642364502, 0.008341440200805664, 0.008296575546264649, 0.008333375930786133, 0.008324095726013184, 0.008307647705078126, 0.008306015968322754, 0.008357760429382324, 0.008350496292114257, 0.00829849624633789, 0.008331263542175293, 0.008309087753295898, 0.008314528465270997, 0.008314720153808594, 0.0082957124710083, 0.008348575592041016, 0.008294367790222168, 0.008322367668151856, 0.00827462387084961, 0.008253439903259278, 0.008289728164672851, 0.008288479804992676, 0.00830089569091797, 0.008278112411499023, 0.008507295608520508, 0.008384415626525879, 0.00847702407836914, 0.008334815979003906, 0.008465951919555664, 0.008056991577148438, 0.008361887931823731, 0.008281920433044434, 0.008292511940002441, 0.008276127815246581, 0.008495103836059571, 0.008293567657470703, 0.008275808334350586, 0.008254079818725587, 0.008269791603088378, 0.008284223556518555, 0.008327679634094238, 0.008272000312805176, 0.008316191673278808, 0.008265407562255859, 0.008479104042053223, 0.008385120391845703, 0.008273056030273437, 0.0083438720703125, 0.008240480422973633, 0.008207584381103515, 0.00823094367980957, 0.008230208396911621, 0.008229248046875, 0.008218751907348634, 0.008619487762451172, 0.00827228832244873, 0.008271807670593262, 0.00828217601776123, 0.008232640266418458, 0.008405311584472656, 0.008242591857910157, 0.008259391784667969, 0.008257663726806641, 0.008766112327575683, 0.00825331211090088, 0.00825494384765625, 0.008282431602478027, 0.008278176307678223, 0.008278271675109864, 0.008228256225585937, 0.008243712425231933, 0.008361215591430665, 0.008264479637145996, 0.008259103775024415, 0.008278271675109864, 0.008279616355895997, 0.008346272468566894, 0.008445952415466309, 0.008285375595092773, 0.008253952026367188, 0.008243328094482421, 0.008237600326538086, 0.00826159954071045, 0.008363712310791015, 0.008264863967895508, 0.008252256393432618, 0.008279775619506836, 0.008243231773376465, 0.008445823669433594, 0.008266112327575684, 0.008260640144348144, 0.008300767898559571, 0.007989088058471679, 0.008210623741149902, 0.008333663940429687, 0.008262911796569824, 0.008290719985961915, 0.008277503967285157, 0.008242015838623046, 0.008295968055725097, 0.008323200225830078, 0.008295583724975585, 0.008260448455810547, 0.008382271766662598, 0.008323455810546875, 0.008282079696655273, 0.008441535949707032, 0.008338687896728515, 0.008309663772583008, 0.008281408309936523, 0.00826358413696289, 0.00829520034790039, 0.00828006362915039, 0.008265088081359864, 0.00829417610168457, 0.008278271675109864, 0.008258144378662109, 0.008302816390991211, 0.00838633632659912, 0.008286208152770995, 0.008681471824645997, 0.008357888221740722, 0.00834716796875, 0.00832969570159912, 0.008341695785522461, 0.008313887596130371, 0.008257344245910644, 0.008237631797790527, 0.008228544235229493, 0.008230624198913574, 0.008430591583251953, 0.00832431983947754, 0.008251775741577148, 0.008472991943359374, 0.00832636833190918, 0.008391391754150391, 0.008443936347961426, 0.008258912086486816, 0.00824988842010498, 0.008482975959777832, 0.008275967597961426, 0.00828991985321045, 0.008482751846313476, 0.008269280433654784, 0.008317791938781738, 0.008277119636535645, 0.008407520294189454, 0.008270591735839843, 0.008308511734008789, 0.008341407775878907, 0.00831702423095703, 0.008294400215148925, 0.008376319885253907, 0.00825046443939209, 0.008358816146850585, 0.007969791889190675, 0.008220255851745606, 0.008381088256835938, 0.008193632125854493, 0.008214559555053711, 0.008243328094482421, 0.008443455696105957, 0.008558719635009766, 0.011220640182495117, 0.008382847785949707, 0.008724767684936524, 0.008376319885253907, 0.00826313591003418, 0.008774432182312011, 0.008315936088562011, 0.008222975730895997, 0.008503487586975098, 0.008242815971374511, 0.00824118423461914, 0.00826972770690918, 0.008250304222106934, 0.008293536186218262, 0.00824345588684082, 0.0085797758102417, 0.008678560256958007, 0.009235039710998535, 0.009101119995117188, 0.008465696334838867, 0.008379008293151856, 0.00832710361480713, 0.008388095855712891, 0.008372735977172852, 0.008338944435119629, 0.008371264457702636, 0.008447744369506837, 0.008317119598388672, 0.00826460838317871, 0.009308352470397949, 0.008323776245117187, 0.008259136199951172, 0.008243647575378418, 0.008263680458068847, 0.008268896102905274, 0.008276191711425782, 0.008300800323486329, 0.008410880088806152, 0.008331968307495117, 0.008289728164672851, 0.008304896354675293, 0.008339648246765136, 0.008521856307983399, 0.008553631782531738, 0.008553631782531738, 0.008380096435546875, 0.00836201572418213, 0.008357855796813965, 0.008330752372741699, 0.0083023681640625, 0.00833407974243164, 0.008840895652770997, 0.00859705638885498, 0.008614527702331542, 0.008710399627685547]",tokens/s,117.12955317438553,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3900, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,930.025472,641.59744,0.0,239.075328,225.530368,s,1,8.3208642578125,8.3208642578125,0.0,8.3208642578125,8.3208642578125,8.3208642578125,8.3208642578125,[8.3208642578125],,kWh,2.2302540895839228e-05,2.4528961789635882e-06,6.915283309996689e-06,3.16707203847995e-05,,MB,1320.726528,733.872128,0.0,316.669952,285.824512,s,10,0.20532976150512697,0.020532976150512697,0.0004285717108414186,0.020285167694091798,0.02109200038909912,0.021108992290496828,0.02112258581161499,"[0.02107948875427246, 0.020902368545532228, 0.02009059143066406, 0.02014787292480469, 0.020173343658447265, 0.020286687850952147, 0.020151552200317384, 0.020283647537231445, 0.021125984191894532, 0.021088224411010742]",tokens/s,12467.749347364232,kWh,6.220872521765983e-07,6.860488641940567e-08,3.681956274157693e-07,1.0588877660117731e-06,tokens/kWh,241763110.51756328,MB,1354.051584,775.815168,0.0,358.612992,297.747968,s,10,9.887811218261719,0.9887811218261717,0.02167799515446516,0.9833120422363282,1.0199381958007812,1.0272988708496094,1.0331874108886718,"[0.992834228515625, 0.9797584228515624, 0.9868656616210938, 0.9697926025390625, 0.9673882446289063, 0.9713040771484375, 0.9686026611328125, 0.9983032836914062, 1.0346595458984376, 1.018302490234375]",tokens/s,63.71480867640941,kWh,2.8908675988240202e-05,3.1881045338908655e-06,1.0581174243184032e-05,4.267795476531509e-05,tokens/kWh,1476171.956843651,,s,630,9.881662103652952,0.015685177942306276,0.0004858996088117791,0.015508959770202637,0.0162648099899292,0.01635374069213867,0.01699263513565064,"[0.015949695587158204, 0.015929183959960937, 0.015565312385559082, 0.015512639999389649, 0.016214303970336914, 0.016141984939575197, 0.01612406349182129, 0.015712767601013183, 0.015632384300231932, 0.01544547176361084, 0.015458847999572754, 0.015476832389831543, 0.015740832328796386, 0.015837183952331545, 0.01597868824005127, 0.015900480270385743, 0.0160501766204834, 0.015964159965515135, 0.01600070381164551, 0.01586207962036133, 0.015646047592163086, 0.015578080177307128, 0.015472224235534668, 0.015480928421020508, 0.015342944145202637, 0.015466303825378417, 0.015371135711669922, 0.015420543670654296, 0.015513824462890626, 0.01571878433227539, 0.01632598304748535, 0.016048896789550782, 0.015956128120422363, 0.015958016395568847, 0.01584332847595215, 0.01587772846221924, 0.015960160255432128, 0.01582521629333496, 0.015634431838989257, 0.015476896286010742, 0.01537939167022705, 0.015334591865539551, 0.01541500759124756, 0.015445280075073241, 0.015397600173950195, 0.01573583984375, 0.01627849578857422, 0.016133951187133788, 0.01599897575378418, 0.01588028812408447, 0.015704159736633302, 0.01587398433685303, 0.015662783622741698, 0.0156430082321167, 0.015563039779663087, 0.01567263984680176, 0.015817119598388673, 0.01565065574645996, 0.0156079683303833, 0.015525888442993165, 0.015831328392028807, 0.0161297607421875, 0.01604230308532715, 0.01567734432220459, 0.015456352233886719, 0.015337504386901856, 0.015281472206115723, 0.015401056289672851, 0.015403936386108399, 0.015381471633911133, 0.015410143852233887, 0.015272064208984375, 0.015330975532531738, 0.01573859214782715, 0.01599721622467041, 0.01588217639923096, 0.015714303970336914, 0.01574502372741699, 0.015670783996582033, 0.015507967948913574, 0.015378047943115234, 0.015288607597351074, 0.01532528018951416, 0.015206496238708496, 0.01532316780090332, 0.015339391708374023, 0.01560985565185547, 0.015839327812194826, 0.015720288276672365, 0.015495231628417969, 0.015325183868408204, 0.015242783546447754, 0.015310463905334472, 0.015288703918457032, 0.015422240257263183, 0.015337311744689941, 0.0157260799407959, 0.01605241584777832, 0.015830944061279297, 0.015692031860351563, 0.01569372844696045, 0.015802528381347655, 0.01585923194885254, 0.01579599952697754, 0.016063360214233397, 0.016072351455688475, 0.01576908779144287, 0.015467103958129882, 0.015544320106506348, 0.01542080020904541, 0.015520383834838867, 0.015521504402160645, 0.01584598445892334, 0.015982303619384765, 0.015843584060668946, 0.015382464408874513, 0.015206175804138184, 0.015220959663391114, 0.01529526424407959, 0.015274239540100097, 0.015581567764282227, 0.01583071994781494, 0.015504063606262207, 0.015340831756591797, 0.015282912254333496, 0.01514128017425537, 0.015233920097351075, 0.01578812789916992, 0.015725695610046386, 0.015417920112609864, 0.01527513599395752, 0.01540822410583496, 0.016152544021606444, 0.015738847732543946, 0.015753055572509767, 0.01590060806274414, 0.015847328186035157, 0.01572665596008301, 0.015451423645019531, 0.015477567672729493, 0.015392352104187011, 0.015473055839538574, 0.015499263763427735, 0.015927552223205566, 0.015892224311828615, 0.01568278408050537, 0.015634943962097168, 0.015677727699279786, 0.015550463676452637, 0.01567129611968994, 0.01563212776184082, 0.015974656105041504, 0.015945376396179198, 0.015656800270080568, 0.015597087860107422, 0.015575200080871583, 0.015730624198913575, 0.015968576431274414, 0.01618374443054199, 0.016111743927001952, 0.016024831771850587, 0.01603971290588379, 0.016156959533691406, 0.01601366424560547, 0.015803872108459474, 0.015938431739807128, 0.01597644805908203, 0.01616864013671875, 0.01603206443786621, 0.015817888259887697, 0.015786848068237304, 0.01577894401550293, 0.015696767807006837, 0.015575039863586425, 0.015529760360717773, 0.015507935523986816, 0.015417152404785157, 0.015439776420593262, 0.015269439697265625, 0.015221088409423829, 0.01522492790222168, 0.015332799911499023, 0.01528656005859375, 0.015279775619506835, 0.015202976226806641, 0.015263744354248047, 0.01524131202697754, 0.015317119598388673, 0.015318464279174805, 0.01522655963897705, 0.015205056190490722, 0.01521884822845459, 0.015218815803527831, 0.015195839881896973, 0.01589776039123535, 0.015909024238586426, 0.015360095977783202, 0.015249152183532715, 0.015317888259887695, 0.015316864013671876, 0.01528451156616211, 0.015238080024719238, 0.015146240234375, 0.015236767768859863, 0.015226880073547363, 0.015339520454406739, 0.015172767639160156, 0.015291232109069824, 0.015232768058776856, 0.015249664306640626, 0.015280159950256347, 0.015225055694580079, 0.015152704238891602, 0.015214271545410156, 0.015245823860168458, 0.015239520072937012, 0.015244192123413085, 0.015245759963989258, 0.015180095672607422, 0.015196160316467285, 0.015239423751831054, 0.01535974407196045, 0.015304320335388184, 0.015405280113220215, 0.015251392364501954, 0.015240608215332031, 0.01524732780456543, 0.015270751953125, 0.015623807907104492, 0.015324640274047851, 0.015706656455993652, 0.015360383987426758, 0.015273088455200196, 0.015301600456237793, 0.015316896438598633, 0.016030752182006835, 0.015495231628417969, 0.01599180793762207, 0.018083744049072266, 0.015622143745422363, 0.01538428783416748, 0.015332703590393066, 0.015360671997070313, 0.015268128395080566, 0.015361280441284179, 0.015485695838928223, 0.01529036808013916, 0.015402496337890625, 0.015417856216430664, 0.015231231689453124, 0.015262656211853028, 0.015256383895874023, 0.015170592308044434, 0.015291359901428223, 0.015191840171813966, 0.01517142391204834, 0.01535366439819336, 0.015226816177368164, 0.015768192291259767, 0.015575039863586425, 0.015262016296386719, 0.015312159538269043, 0.015284640312194824, 0.015532032012939453, 0.01546668815612793, 0.01538809585571289, 0.015274399757385254, 0.015318400382995605, 0.015231328010559082, 0.015228320121765136, 0.015467488288879394, 0.01541312026977539, 0.015340607643127441, 0.015367103576660155, 0.015281184196472168, 0.015322367668151855, 0.015417183876037598, 0.015439743995666505, 0.015368191719055176, 0.0153372163772583, 0.015287775993347168, 0.015147487640380859, 0.015284064292907715, 0.01524783992767334, 0.015372287750244141, 0.015341567993164062, 0.01526371192932129, 0.01540732765197754, 0.015411328315734864, 0.01527571201324463, 0.015545663833618164, 0.01541801643371582, 0.015269920349121094, 0.015197216033935547, 0.015266783714294433, 0.015415295600891114, 0.01537446403503418, 0.015298432350158691, 0.01604374313354492, 0.015552800178527832, 0.015339136123657227, 0.015339903831481934, 0.015240608215332031, 0.015314944267272949, 0.015300640106201172, 0.015352383613586425, 0.015238719940185547, 0.015316736221313477, 0.015379072189331055, 0.015261823654174805, 0.015218624114990234, 0.015388671875, 0.015263615608215332, 0.015251680374145508, 0.015393823623657226, 0.015273887634277344, 0.015509951591491699, 0.015585280418395997, 0.015249695777893067, 0.015236319541931153, 0.015227999687194824, 0.015265664100646973, 0.015650143623352052, 0.015460639953613282, 0.015405247688293457, 0.015484031677246093, 0.015410016059875489, 0.015321248054504395, 0.01554214382171631, 0.015727840423583984, 0.015700063705444335, 0.015733344078063965, 0.015657024383544924, 0.01557215976715088, 0.015518303871154784, 0.015454463958740235, 0.015370304107666016, 0.015289759635925293, 0.015280672073364258, 0.015339520454406739, 0.015361568450927735, 0.01541478443145752, 0.015414239883422852, 0.01534768009185791, 0.015386528015136718, 0.015372223854064941, 0.015274175643920898, 0.015300000190734863, 0.015349535942077637, 0.015260448455810547, 0.015211647987365722, 0.015260576248168945, 0.015316255569458008, 0.015275872230529786, 0.015286751747131348, 0.016615840911865236, 0.015372287750244141, 0.015892191886901854, 0.015289952278137207, 0.015436767578125, 0.015325984001159669, 0.015341823577880859, 0.016048095703125, 0.015333632469177245, 0.015335904121398927, 0.015481951713562012, 0.015335647583007813, 0.015356320381164551, 0.015301088333129882, 0.015191871643066407, 0.015224703788757324, 0.015322879791259765, 0.015176223754882813, 0.01529635238647461, 0.015138815879821778, 0.015291744232177735, 0.015219327926635742, 0.015339808464050293, 0.015140255928039551, 0.01520639991760254, 0.015264351844787598, 0.015239359855651856, 0.015222399711608887, 0.015225055694580079, 0.01520803165435791, 0.015241408348083496, 0.015275839805603028, 0.015340991973876953, 0.015365056037902832, 0.015366175651550292, 0.01533779239654541, 0.015298015594482422, 0.015309087753295899, 0.015263775825500488, 0.015169407844543457, 0.015170751571655273, 0.01519820785522461, 0.01521337604522705, 0.01574502372741699, 0.015474687576293946, 0.015376031875610352, 0.015354207992553711, 0.015338784217834472, 0.015504096031188965, 0.015308159828186035, 0.01552790355682373, 0.015378591537475585, 0.015357983589172364, 0.015319519996643067, 0.015258848190307617, 0.015108736038208007, 0.015292832374572754, 0.015363424301147462, 0.015336000442504882, 0.015273823738098144, 0.015183967590332031, 0.01534761619567871, 0.015231200218200684, 0.015451935768127442, 0.01593139171600342, 0.015466496467590332, 0.015331680297851563, 0.015302304267883302, 0.015339296340942382, 0.015380288124084473, 0.015368320465087891, 0.01618515205383301, 0.015571423530578613, 0.015593215942382813, 0.01544166374206543, 0.015630847930908204, 0.015534079551696778, 0.015466239929199219, 0.01536025619506836, 0.015359007835388183, 0.015393280029296874, 0.015415679931640626, 0.015388832092285157, 0.015284159660339355, 0.01532960033416748, 0.015398464202880859, 0.015390912055969238, 0.015633919715881347, 0.015599871635437012, 0.01551200008392334, 0.015542176246643067, 0.015457792282104492, 0.015397215843200683, 0.015400768280029297, 0.01533142375946045, 0.01569215965270996, 0.01548902416229248, 0.015404095649719238, 0.015412320137023925, 0.015339232444763184, 0.015374431610107422, 0.015404704093933106, 0.015370847702026368, 0.01533027172088623, 0.015452896118164063, 0.015259743690490723, 0.015302656173706054, 0.015351391792297364, 0.015345248222351074, 0.016536544799804688, 0.017024864196777345, 0.016726015090942382, 0.017241376876831055, 0.01662575912475586, 0.0160600643157959, 0.015696479797363282, 0.015319359779357911, 0.015376447677612304, 0.01533679962158203, 0.015407103538513184, 0.015471487998962403, 0.015672160148620606, 0.015510368347167968, 0.015269856452941894, 0.015208191871643066, 0.01536963176727295, 0.01563270378112793, 0.01625974464416504, 0.016045440673828126, 0.016181631088256834, 0.016002815246582033, 0.015974271774291993, 0.01597702407836914, 0.016052288055419923, 0.01701273536682129, 0.01640652847290039, 0.01639628791809082, 0.016237791061401368, 0.016130399703979493, 0.016060640335083008, 0.016238304138183595, 0.016384639739990235, 0.016473983764648436, 0.016270559310913087, 0.016314367294311523, 0.016239391326904298, 0.01606559944152832, 0.016217376708984373, 0.016354719161987306, 0.01632784080505371, 0.01632419204711914, 0.01626460838317871, 0.016171648025512696, 0.016250240325927735, 0.016209728240966798, 0.016099615097045897, 0.016417280197143554, 0.01624239921569824, 0.01613590431213379, 0.0162106876373291, 0.01624678421020508, 0.016261119842529297, 0.01627449607849121, 0.016096031188964844, 0.016224191665649413, 0.016104799270629883, 0.016128255844116212, 0.016224063873291016, 0.016210208892822264, 0.016161312103271486, 0.016210016250610353, 0.016502527236938475, 0.016256351470947266, 0.016229183197021484, 0.01622220802307129, 0.016215391159057617, 0.01635103988647461, 0.016276351928710936, 0.0162774715423584, 0.016246400833129882, 0.01632614326477051, 0.01635807991027832, 0.016164384841918945, 0.016267711639404298, 0.01623472023010254, 0.01620582389831543, 0.016359519958496094, 0.01642857551574707, 0.016886144638061523, 0.020797119140625, 0.016943424224853516, 0.016508928298950197, 0.016352544784545897, 0.016386783599853516, 0.01718681526184082, 0.016306495666503905, 0.01636083221435547, 0.0162573127746582, 0.016197664260864258, 0.016215551376342775, 0.01632419204711914, 0.016212543487548827, 0.016877920150756835, 0.01845359992980957, 0.016303104400634767, 0.016269216537475584, 0.016234495162963866, 0.016189567565917967, 0.016274368286132813, 0.016146976470947264, 0.01623263931274414, 0.016294111251831056, 0.016190176010131837, 0.016309728622436525, 0.01627712059020996, 0.01608732795715332, 0.01621443176269531, 0.016341087341308593, 0.016223743438720704, 0.016005535125732422, 0.016375455856323242, 0.016263776779174805, 0.016195327758789062, 0.016280832290649413, 0.016327423095703127, 0.01611555290222168, 0.016220512390136718, 0.016174911499023437, 0.01603993606567383, 0.01590505599975586, 0.015884287834167482, 0.015949536323547363, 0.016109535217285156, 0.01604528045654297, 0.015813440322875977, 0.015958335876464842, 0.016080575942993162, 0.016060575485229493, 0.016095071792602538, 0.015913120269775392, 0.01584220790863037, 0.015850048065185546, 0.015958399772644044, 0.015922592163085936, 0.015917375564575197, 0.015910719871520995, 0.016040416717529298, 0.016312320709228514, 0.016103424072265626, 0.016457887649536134, 0.016278400421142578, 0.016384992599487305, 0.016058399200439454, 0.016205728530883787, 0.016924320220947267, 0.016332319259643555, 0.01670028877258301, 0.016326656341552736, 0.01621811294555664, 0.0163450870513916, 0.016266624450683595, 0.016245088577270507, 0.016042272567749025, 0.01618124771118164, 0.016148351669311525, 0.01601958465576172, 0.01598591995239258, 0.01592963218688965, 0.01612835121154785, 0.0161527042388916, 0.01613520050048828, 0.016104223251342774, 0.016181440353393556, 0.01618534469604492, 0.016287647247314452]",tokens/s,63.75445683040588,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,6409.621504,8461.877248,0.0,8059.355136,8042.68544,s,1,14.704271484375,14.704271484375,0.0,14.704271484375,14.704271484375,14.704271484375,14.704271484375,[14.704271484375],,kWh,0.00021405123063334487,2.360408454615112e-05,6.570255256199697e-05,0.000303357867741493,,MB,2238.820352,8577.220608,0.0,8160.018432,8140.420096,s,10,3.1416740722656247,0.31416740722656245,0.0003543989676867605,0.3142519836425781,0.3144785278320313,0.31452857055664063,0.3145686047363281,"[0.31338479614257814, 0.31384317016601565, 0.31410845947265625, 0.3139375915527344, 0.3140594482421875, 0.31457861328125, 0.3144570922851562, 0.3144419860839844, 0.3144674072265625, 0.3143955078125]",tokens/s,814.8521906200954,kWh,9.253681677735888e-06,1.0205067928193293e-06,6.138563938625115e-06,1.6412752409180334e-05,tokens/kWh,15597627.601864545,MB,2245.533696,8608.677888,0.0,8191.475712,8140.422656,s,10,47.80365917968749,4.78036591796875,0.01367328844570056,4.783456298828125,4.79804609375,4.79920712890625,4.80013595703125,"[4.76052783203125, 4.7623701171875, 4.76755322265625, 4.78576953125, 4.79197607421875, 4.78442041015625, 4.7703935546875, 4.7824921875, 4.8003681640625, 4.7977880859375]",tokens/s,13.178907447898812,kWh,0.00013921097283351325,1.535553920260906e-05,8.232753461197544e-05,0.00023689404664809776,tokens/kWh,265941.67684418627,,s,630,47.800407501220725,0.07587366270035031,0.0008743016903564545,0.07579884719848633,0.0766981704711914,0.07716386070251464,0.07933067901611329,"[0.07541452789306641, 0.07498137664794922, 0.07507266998291015, 0.07418147277832031, 0.07427378845214844, 0.07528144073486329, 0.07473353576660156, 0.07438540649414062, 0.07446851348876952, 0.07621462249755859, 0.074837890625, 0.07413005065917969, 0.0746987533569336, 0.07432396697998046, 0.07440383911132813, 0.07566950225830078, 0.07496908569335937, 0.0760995864868164, 0.07542508697509766, 0.07511920166015625, 0.07511459350585938, 0.07524492645263672, 0.07531724548339844, 0.075669921875, 0.07557552337646484, 0.07566483306884765, 0.075176513671875, 0.07524352264404296, 0.07656038665771485, 0.07537459564208984, 0.07573299407958985, 0.07584278106689453, 0.07630108642578125, 0.07573680114746094, 0.07537693023681641, 0.07503257751464844, 0.07494159698486329, 0.07515964508056641, 0.07554975891113282, 0.07768646240234375, 0.07579030609130859, 0.07655967712402344, 0.07599132537841796, 0.07561670684814453, 0.07606585693359375, 0.07645894622802735, 0.0751902084350586, 0.07531321716308594, 0.07551181030273438, 0.07816957092285157, 0.07652201843261719, 0.07611548614501953, 0.07603862762451172, 0.07549318695068359, 0.07564307403564453, 0.07616051483154297, 0.07618358612060547, 0.07614921569824219, 0.07610777282714844, 0.0758554916381836, 0.0759525146484375, 0.07594598388671875, 0.07639244842529297, 0.07744572448730469, 0.07705478668212891, 0.07587318420410157, 0.07614259338378906, 0.07617097473144531, 0.07642550659179688, 0.07641686248779297, 0.07603215789794922, 0.0756937255859375, 0.07598524475097657, 0.07638835144042969, 0.07643241882324218, 0.07583424377441406, 0.07609763336181641, 0.07634528350830078, 0.07584159851074218, 0.07519846343994141, 0.07540249633789063, 0.079198974609375, 0.07562770843505859, 0.07611170959472656, 0.0757565155029297, 0.07553801727294922, 0.07558185577392579, 0.0757488021850586, 0.07563283538818359, 0.07553472137451171, 0.07522918701171875, 0.07442841339111328, 0.07495884704589843, 0.0749751968383789, 0.07548457336425782, 0.0746665267944336, 0.07441417694091797, 0.07458201599121093, 0.07438098907470703, 0.07356822204589844, 0.07468195343017578, 0.0742551040649414, 0.07441817474365234, 0.07514316558837891, 0.07568793487548828, 0.07455648040771484, 0.07513593292236329, 0.07508547210693359, 0.07572310638427734, 0.0757923812866211, 0.07554873657226563, 0.07692076873779297, 0.07607283020019531, 0.0758519058227539, 0.07580390167236328, 0.07526067352294921, 0.07575347137451172, 0.07565106964111327, 0.07526592254638671, 0.07540512084960938, 0.077214111328125, 0.07528966522216797, 0.07476233673095703, 0.07484697723388672, 0.07498067474365235, 0.07475705718994141, 0.0762529296875, 0.07505289459228516, 0.075255615234375, 0.07503472137451171, 0.07538873291015626, 0.07542387390136719, 0.07494483184814453, 0.07548287963867187, 0.07512499237060546, 0.07527129364013672, 0.07545945739746093, 0.07536784362792968, 0.07529122924804688, 0.07511449432373046, 0.07542150115966798, 0.0759249267578125, 0.07512339019775391, 0.07494416046142578, 0.07630889892578124, 0.07552819061279296, 0.07532953643798829, 0.07526515197753907, 0.07546351623535157, 0.0754681625366211, 0.0752278060913086, 0.07544217681884766, 0.07553849792480469, 0.07546160125732422, 0.07521692657470704, 0.07563279724121094, 0.07508422088623047, 0.07576767730712891, 0.07648623657226562, 0.0771203842163086, 0.07502982330322265, 0.0753194580078125, 0.07559661102294922, 0.07681318664550782, 0.07539183807373047, 0.07528652954101563, 0.07511820983886719, 0.07494898986816406, 0.07580054473876953, 0.07562652587890625, 0.07934544372558594, 0.07595136260986328, 0.07543292999267578, 0.07711129760742187, 0.07551168060302735, 0.07576383972167969, 0.07581081390380859, 0.07576691436767578, 0.07612095642089843, 0.0758497314453125, 0.0757039337158203, 0.07580095672607422, 0.07563651275634765, 0.07577804565429687, 0.07610800170898438, 0.07700201416015626, 0.07650787353515626, 0.07543193817138671, 0.07542578887939454, 0.07617945861816407, 0.0766732177734375, 0.07530294036865234, 0.07540038299560547, 0.07611475372314454, 0.07576140594482422, 0.07558924865722656, 0.07575001525878906, 0.07577369689941406, 0.07554617309570312, 0.07548588562011718, 0.07576531219482421, 0.07611846160888672, 0.07631667327880859, 0.0759089584350586, 0.07673593902587891, 0.07981078338623047, 0.0772510757446289, 0.07532953643798829, 0.07610070037841797, 0.07541180419921875, 0.07526252746582031, 0.07544525146484375, 0.07538175964355469, 0.07590911865234375, 0.07592755126953125, 0.07554662322998047, 0.07603609466552734, 0.07604755401611328, 0.07566969299316406, 0.07584137725830079, 0.07615721893310547, 0.07553225708007813, 0.07576220703125, 0.07624687957763672, 0.0751597137451172, 0.07608684539794922, 0.07668486022949218, 0.07672512054443359, 0.07575068664550781, 0.07580335998535156, 0.07544790649414063, 0.07502275085449218, 0.07519785308837891, 0.07585648345947266, 0.07535001373291016, 0.07575100708007812, 0.07579484558105469, 0.07603199768066406, 0.0756654052734375, 0.07563600158691407, 0.07589900970458985, 0.07583958435058594, 0.07550319671630859, 0.07643638610839844, 0.07743452453613281, 0.07626172637939453, 0.07638387298583985, 0.07599552154541016, 0.07618560028076173, 0.07586815643310547, 0.07659839630126954, 0.07597350311279297, 0.0768463363647461, 0.07571868896484375, 0.07549378967285156, 0.07538719940185547, 0.07513676452636718, 0.07480681610107422, 0.07474002838134766, 0.0753463363647461, 0.07472262573242187, 0.07488937377929687, 0.07511001586914062, 0.07522806549072265, 0.07515129852294922, 0.07643910217285156, 0.07587071990966797, 0.07582470703125, 0.07478214263916015, 0.07551897430419922, 0.07571392059326172, 0.07473126220703125, 0.07514185333251953, 0.07516726684570313, 0.0752441635131836, 0.07565516662597656, 0.07574937438964843, 0.07509606170654297, 0.07536137390136718, 0.07526697540283203, 0.07558914947509765, 0.0757498550415039, 0.07649485015869141, 0.07676518249511718, 0.07618355560302735, 0.07622211456298827, 0.07720355224609375, 0.07818675231933593, 0.07683071899414062, 0.07682160186767578, 0.07636627197265625, 0.07719888305664062, 0.07675904083251953, 0.07629692840576172, 0.07659747314453125, 0.07637731170654297, 0.07897718048095703, 0.07687846374511718, 0.0760931167602539, 0.07679363250732422, 0.07598320007324219, 0.07742012786865235, 0.07658966064453125, 0.07587423706054687, 0.07599219512939454, 0.07633945465087891, 0.07652217864990235, 0.07583487701416015, 0.07612989044189453, 0.0766760025024414, 0.07638220977783203, 0.07645494079589844, 0.07686774444580079, 0.07772592163085937, 0.07632857513427735, 0.07762998199462891, 0.07655136108398437, 0.07610399627685546, 0.0756659164428711, 0.07604940795898438, 0.07580467224121094, 0.07578931427001953, 0.07619993591308594, 0.07584508514404296, 0.07596086120605469, 0.07636492919921875, 0.07642934417724609, 0.07640553283691406, 0.07618924713134766, 0.0781296615600586, 0.07571456146240234, 0.07534591674804687, 0.07527129364013672, 0.07582393646240235, 0.0757754898071289, 0.07600758361816407, 0.07593721771240235, 0.07497321319580078, 0.07514208221435546, 0.07544012451171875, 0.07569817352294922, 0.07548518371582032, 0.07554048156738281, 0.07582412719726563, 0.0761313247680664, 0.075399169921875, 0.07583907318115235, 0.075849853515625, 0.07635997009277344, 0.07567155456542969, 0.07587430572509765, 0.07691654205322265, 0.08056137847900391, 0.07635247802734375, 0.07591321563720703, 0.07620156860351562, 0.07592550659179688, 0.07590707397460937, 0.07524527740478516, 0.0758031997680664, 0.07550502777099609, 0.0759447021484375, 0.07509606170654297, 0.07495475006103515, 0.07496435546875, 0.0752236785888672, 0.07570361328125, 0.079295166015625, 0.07550962829589844, 0.07537267303466796, 0.07577702331542968, 0.07603711700439453, 0.07599922943115234, 0.07538893127441407, 0.07518370819091796, 0.07504438018798829, 0.07479545593261719, 0.07526777648925781, 0.07641487884521485, 0.0758128662109375, 0.07610297393798827, 0.07560227203369141, 0.07575587463378906, 0.07589411163330079, 0.07599763488769531, 0.07583907318115235, 0.07569062042236328, 0.07628185272216798, 0.07666483306884765, 0.07609868621826171, 0.07572377777099609, 0.075938720703125, 0.07572374725341798, 0.07674674987792969, 0.07572172546386718, 0.07546688079833984, 0.07599603271484374, 0.07588044738769531, 0.07565299224853515, 0.07543411254882812, 0.07522694396972657, 0.075321533203125, 0.07586796569824218, 0.07575981140136719, 0.0757323226928711, 0.07579714965820313, 0.07590627288818359, 0.07546931457519532, 0.07502467346191406, 0.07517183685302735, 0.0803855972290039, 0.07486460876464844, 0.07436943817138672, 0.07454930877685546, 0.07499763488769531, 0.07522013092041016, 0.07520310211181641, 0.07506578826904296, 0.0749277114868164, 0.0752147216796875, 0.07915164947509766, 0.07556883239746094, 0.07519468688964843, 0.0749813461303711, 0.07538626861572266, 0.07568672180175781, 0.07607071685791016, 0.07619280242919922, 0.07610467529296874, 0.0757923812866211, 0.07536399841308594, 0.07489366149902343, 0.07530496215820312, 0.07559782409667969, 0.07544153594970703, 0.07515974426269531, 0.07535987091064453, 0.07558995056152344, 0.0753996810913086, 0.07569932556152344, 0.07560076904296875, 0.07609932708740234, 0.07588249969482422, 0.07569190216064453, 0.07546803283691406, 0.07560912322998047, 0.07599257659912109, 0.0764480972290039, 0.07640473937988282, 0.07597875213623047, 0.07600851440429687, 0.07665325164794921, 0.07602098846435547, 0.07904768371582031, 0.07684505462646485, 0.07669532775878907, 0.07616246032714843, 0.07591609954833985, 0.07613235473632812, 0.07586761474609376, 0.0761262435913086, 0.07610585784912109, 0.07617369842529297, 0.07657638549804688, 0.07640921783447266, 0.07629618835449219, 0.07557529449462891, 0.07557324981689453, 0.07556038665771485, 0.07553814697265625, 0.07555567932128907, 0.07635340881347656, 0.07604236602783203, 0.07602585601806641, 0.07793049621582031, 0.07595388793945312, 0.07638585662841797, 0.07627235412597656, 0.07712105560302734, 0.07613632202148438, 0.07720806121826172, 0.07726908874511719, 0.07631980895996093, 0.07580556488037109, 0.07623686218261719, 0.07625676727294922, 0.07596288299560547, 0.07601692962646485, 0.07612899017333985, 0.07655423736572266, 0.0768548126220703, 0.07629666900634766, 0.07507148742675782, 0.07443583679199219, 0.07476710510253906, 0.074821533203125, 0.07468185424804688, 0.07433830261230469, 0.07360883331298829, 0.07311459350585937, 0.07451033782958984, 0.07491337585449219, 0.0738861083984375, 0.07449577331542968, 0.07621794891357422, 0.07581737518310547, 0.07564054107666016, 0.07493046569824219, 0.07464256286621093, 0.0749901123046875, 0.07508560180664063, 0.07481401824951171, 0.0755445785522461, 0.07572592163085938, 0.07542585754394532, 0.07549014282226563, 0.07549747467041015, 0.07558348846435547, 0.07561420440673829, 0.07602937316894531, 0.07535635375976563, 0.07526790618896484, 0.07629020690917969, 0.07608080291748047, 0.07584844970703125, 0.07584767913818359, 0.07620547485351563, 0.07606537628173828, 0.07628390502929687, 0.0764067840576172, 0.07676518249511718, 0.0802467498779297, 0.0763612823486328, 0.07691516876220703, 0.07934518432617188, 0.07635750579833984, 0.07598140716552734, 0.07623680114746094, 0.0767376937866211, 0.07638304138183594, 0.07592352294921875, 0.07599654388427735, 0.07644127655029297, 0.07852534484863281, 0.07650918579101562, 0.0766623077392578, 0.07663977813720703, 0.07639750671386719, 0.07663616180419922, 0.07650508880615234, 0.07632281494140625, 0.07643081665039063, 0.07614518737792969, 0.0762449951171875, 0.07598489379882813, 0.07611186981201172, 0.07636736297607422, 0.0756864013671875, 0.07570425415039063, 0.07604608154296875, 0.07592742156982422, 0.07607135772705079, 0.07689949035644532, 0.07595708465576172, 0.07642892456054687, 0.07704537963867188, 0.07637478637695312, 0.07743004608154297, 0.07730233764648438, 0.07580242919921874, 0.07607430267333984, 0.07616505432128906, 0.07590624237060548, 0.07573286437988282, 0.07611504364013671, 0.07570928192138672, 0.07606451416015625, 0.08156594848632813, 0.07670329284667969, 0.0765546875, 0.0763736343383789, 0.07645632171630859, 0.07669760131835937, 0.07594374084472656, 0.07610182189941406, 0.07635968017578125, 0.07643126678466797, 0.07643145751953125, 0.07663763427734376, 0.0761899185180664, 0.07645791625976563, 0.07608975982666015, 0.07560396575927734, 0.07560505676269531, 0.07596249389648438, 0.07707676696777344, 0.07705859375, 0.07689113616943359, 0.0757927017211914, 0.07608748626708985, 0.07632332611083985, 0.07605859375, 0.0759682846069336, 0.07593353271484375, 0.076065185546875, 0.07643545532226563, 0.07560591888427734, 0.07572406768798828, 0.07579714965820313, 0.07546419525146485, 0.07576643371582031, 0.07583539581298829, 0.07645104217529297, 0.07599183654785156, 0.07805097961425782, 0.07526640319824218, 0.07543807983398437, 0.07501618957519532, 0.07525360107421875, 0.07550601959228516, 0.07572051239013672, 0.07547203063964844, 0.07547743988037109, 0.07515586853027344, 0.07512678527832031, 0.0767488021850586, 0.07569398498535156, 0.0758285140991211, 0.07524230194091797, 0.07565494537353516]",tokens/s,13.179803958447664,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,7434.891264,8041.463808,0.0,7646.216192,7627.584,s,1,12.864208984375,12.864208984375,0.0,12.864208984375,12.864208984375,12.864208984375,12.864208984375,[12.864208984375],,kWh,0.00017115813892916567,1.8867194484805756e-05,5.499921066600029e-05,0.0002450245440799717,,MB,1770.37312,8687.386624,0.0,8277.458944,8199.8592,s,10,3.5576812744140627,0.3557681274414063,0.0009765429764703398,0.35559803771972653,0.3571989959716797,0.35724894256591794,0.35728889984130857,"[0.35387228393554687, 0.35567178344726563, 0.3555242919921875, 0.35504611206054687, 0.356594970703125, 0.35571722412109374, 0.35729888916015623, 0.35718789672851564, 0.3553077087402344, 0.35546011352539064]",tokens/s,719.5697991303683,kWh,1.040425594511478e-05,1.1474067470608325e-06,6.919526608413771e-06,1.8471189300589384e-05,tokens/kWh,13859421.601609133,MB,1777.283072,9001.959424,0.0,8592.031744,8476.849152,s,10,29.4489931640625,2.94489931640625,0.004694868985522543,2.9471679687499996,2.9489606689453125,2.950297595214844,2.951367136230469,"[2.9393134765625, 2.937792724609375, 2.941801513671875, 2.951634521484375, 2.947964111328125, 2.938910400390625, 2.948576904296875, 2.947670654296875, 2.946665283203125, 2.94866357421875]",tokens/s,21.392921533521495,kWh,8.607475950238492e-05,9.494233178704241e-06,5.7117385732186307e-05,0.00015268637841327548,tokens/kWh,412610.48074293963,,s,630,29.442196475982673,0.046733645199972494,0.00039584723390524517,0.04672246360778809,0.047179973983764646,0.04730684471130371,0.04758142642974854,"[0.04675174331665039, 0.04630732727050781, 0.04612227249145508, 0.04621382522583008, 0.04601222229003906, 0.04603513717651367, 0.045977375030517575, 0.046186145782470704, 0.04600214385986328, 0.04604787063598633, 0.04617942428588867, 0.046322593688964846, 0.04635238265991211, 0.04619488143920898, 0.04595267105102539, 0.047589054107666014, 0.04664163208007813, 0.04619468688964844, 0.046137344360351565, 0.04655923080444336, 0.046532608032226565, 0.0466921272277832, 0.04645084762573242, 0.046775775909423827, 0.04659574508666992, 0.04660707092285156, 0.046514175415039063, 0.0465307502746582, 0.046610462188720704, 0.04665456008911133, 0.046824542999267575, 0.04699894332885742, 0.04659212875366211, 0.046515968322753905, 0.04667855834960938, 0.046647262573242185, 0.04655107116699219, 0.04651827239990235, 0.04657356643676758, 0.046598014831542967, 0.04691088104248047, 0.046701278686523434, 0.046870529174804686, 0.04680089569091797, 0.04681887817382813, 0.04688326263427734, 0.04726988983154297, 0.047314945220947265, 0.04676812744140625, 0.04679065704345703, 0.04725078582763672, 0.04715711975097656, 0.0470263671875, 0.04682403182983398, 0.047108097076416014, 0.047179359436035156, 0.04681513595581055, 0.04697753524780274, 0.046992542266845704, 0.046936065673828124, 0.04703881454467773, 0.047027839660644534, 0.04711075210571289, 0.04678668975830078, 0.04634348678588867, 0.04602937698364258, 0.04615167999267578, 0.04604083251953125, 0.046117118835449215, 0.04604278564453125, 0.046112991333007815, 0.04624806213378906, 0.046059070587158205, 0.04613167953491211, 0.046142784118652344, 0.046412448883056644, 0.04644454574584961, 0.04618841552734375, 0.04636275100708008, 0.046405406951904295, 0.04638947296142578, 0.046378753662109376, 0.04655539321899414, 0.046561088562011715, 0.04673286437988281, 0.04660697555541992, 0.04645251083374023, 0.04626192092895508, 0.04644716644287109, 0.04657148742675781, 0.04634627151489258, 0.04653635025024414, 0.046569278717041016, 0.04647951889038086, 0.04656911849975586, 0.04654108810424805, 0.04648390579223633, 0.04695852661132813, 0.04680505752563477, 0.04665139389038086, 0.0466732177734375, 0.04684255981445312, 0.046782302856445315, 0.04661468887329102, 0.046656929016113284, 0.04688070297241211, 0.046907806396484376, 0.04685030364990234, 0.04668620681762695, 0.04675718307495117, 0.046903553009033205, 0.04694675064086914, 0.04683161544799805, 0.046911487579345705, 0.047023582458496097, 0.04688057708740234, 0.04682825469970703, 0.04681488037109375, 0.047687553405761716, 0.04710015869140625, 0.04689712142944336, 0.04690470504760742, 0.04720729446411133, 0.04728358459472656, 0.04714966583251953, 0.04723305511474609, 0.046950878143310545, 0.046276607513427735, 0.046053375244140625, 0.046298145294189456, 0.046391902923583986, 0.04619820785522461, 0.046107486724853514, 0.04644259262084961, 0.046209022521972655, 0.04628611373901367, 0.046545631408691404, 0.04641177749633789, 0.046578784942626954, 0.04634921646118164, 0.046198719024658205, 0.04683987045288086, 0.04657670211791992, 0.04654560089111328, 0.04636608123779297, 0.046387264251708984, 0.046492481231689455, 0.04659548950195312, 0.04657187271118164, 0.04680313491821289, 0.046626880645751954, 0.0465428466796875, 0.046639102935791016, 0.04648080062866211, 0.046588512420654295, 0.04672716903686523, 0.04665139389038086, 0.0465715217590332, 0.046709983825683594, 0.04660713577270508, 0.0464832649230957, 0.04665267181396485, 0.04670969772338867, 0.047304222106933594, 0.04710652923583984, 0.04703619384765625, 0.04677257537841797, 0.046575103759765625, 0.0468375358581543, 0.046790721893310544, 0.04671132659912109, 0.04687420654296875, 0.04707145690917969, 0.04687686538696289, 0.04673936080932617, 0.046857566833496095, 0.046793472290039065, 0.04679827117919922, 0.04704108810424805, 0.04687177658081055, 0.04691775894165039, 0.04696745681762695, 0.04695040130615234, 0.04698099136352539, 0.0470748176574707, 0.047075294494628904, 0.04735657501220703, 0.04726784133911133, 0.04704630279541016, 0.046771488189697265, 0.04653152084350586, 0.046195838928222654, 0.04627260971069336, 0.04611481475830078, 0.04648787307739258, 0.04628121566772461, 0.046383102416992186, 0.04628275299072265, 0.04667391967773438, 0.046577503204345706, 0.04658396911621094, 0.047043617248535154, 0.04635286331176758, 0.04654959869384766, 0.04636415863037109, 0.04645724868774414, 0.04655104064941406, 0.046690303802490236, 0.046698497772216796, 0.046712833404541014, 0.046827518463134765, 0.04659814453125, 0.04686643218994141, 0.046632575988769534, 0.04665996932983398, 0.04673503875732422, 0.04665731048583984, 0.046725440979003906, 0.046747871398925785, 0.04674764633178711, 0.046731487274169925, 0.046870304107666017, 0.04697644805908203, 0.04708819198608399, 0.04674345779418945, 0.04669200134277344, 0.04674105453491211, 0.04686140823364258, 0.046947456359863284, 0.046949024200439456, 0.046944255828857424, 0.0470362548828125, 0.04706067276000977, 0.047032798767089844, 0.046964542388916015, 0.04718764877319336, 0.04708607864379883, 0.047355712890625, 0.04789456176757813, 0.04701609420776367, 0.047088638305664066, 0.047378593444824216, 0.04722979354858398, 0.047388671875, 0.04740630340576172, 0.047112640380859376, 0.04718550491333008, 0.04723174285888672, 0.04712857437133789, 0.047085567474365236, 0.04717907333374023, 0.047532737731933596, 0.046886878967285155, 0.04640470504760742, 0.04616672134399414, 0.04625616073608398, 0.04635881423950195, 0.04633190536499023, 0.04627257537841797, 0.04637472152709961, 0.04632793426513672, 0.04638057708740234, 0.046549121856689454, 0.046526817321777346, 0.04638627243041992, 0.046549919128417966, 0.04652032089233398, 0.04656947326660156, 0.04644659042358398, 0.046772224426269535, 0.04675971221923828, 0.04668150329589844, 0.046911487579345705, 0.046805824279785156, 0.04672512054443359, 0.04671897506713867, 0.046628543853759766, 0.046551361083984374, 0.0467242546081543, 0.04657443237304688, 0.04653180694580078, 0.04678940963745117, 0.046632095336914064, 0.04652732849121094, 0.046618175506591794, 0.04686892700195312, 0.04678601455688477, 0.046763809204101565, 0.04661280059814453, 0.04675990295410156, 0.046938175201416014, 0.04693443298339844, 0.04671846389770508, 0.04678092956542969, 0.04696176147460938, 0.046870590209960934, 0.04685641479492188, 0.046895328521728515, 0.046858657836914064, 0.04683308792114258, 0.04675823974609375, 0.04678678512573242, 0.04701513671875, 0.04691628646850586, 0.04674979019165039, 0.046766399383544925, 0.04702105712890625, 0.04704915237426758, 0.047167713165283204, 0.04698934555053711, 0.05101363372802734, 0.046992385864257816, 0.04718899154663086, 0.047017982482910156, 0.04716857528686524, 0.0469502067565918, 0.04632777786254883, 0.046007553100585935, 0.046236129760742185, 0.04599014282226563, 0.04598175811767578, 0.04607084655761719, 0.04626512145996094, 0.04623580932617188, 0.046063617706298826, 0.04633190536499023, 0.04641177749633789, 0.04627807998657227, 0.04620550537109375, 0.04622332763671875, 0.0462380485534668, 0.04642575836181641, 0.04644457626342773, 0.04628438568115235, 0.04657603073120117, 0.046483070373535156, 0.04647155380249023, 0.04640972900390625, 0.046585662841796875, 0.04639968109130859, 0.046927871704101565, 0.04662681579589844, 0.04660224151611328, 0.046604286193847655, 0.046564510345458984, 0.046469982147216794, 0.046572574615478514, 0.04651084899902344, 0.04661455917358399, 0.046400798797607425, 0.046715328216552734, 0.0467276496887207, 0.04673235321044922, 0.04671379089355469, 0.047067134857177735, 0.04703033447265625, 0.04680435180664062, 0.04695302581787109, 0.04717363357543945, 0.046974433898925784, 0.04689769744873047, 0.046876670837402344, 0.04688076782226563, 0.046935264587402346, 0.046741886138916014, 0.04703417587280274, 0.047019870758056644, 0.04681308746337891, 0.04661743927001953, 0.04711782455444336, 0.04725763320922852, 0.04694883346557617, 0.046865535736083985, 0.046951297760009766, 0.04711334228515625, 0.04722880172729492, 0.04698828887939453, 0.04722809600830078, 0.047352577209472654, 0.04655500793457031, 0.04614361572265625, 0.04616396713256836, 0.04604927825927734, 0.04619878387451172, 0.046186496734619144, 0.046080001831054686, 0.04636483383178711, 0.046333633422851565, 0.04637449645996094, 0.04641030502319336, 0.046432254791259765, 0.04627983856201172, 0.046377536773681644, 0.04661379241943359, 0.04652668762207031, 0.04658598327636719, 0.04649635314941406, 0.04657555389404297, 0.04688294219970703, 0.046720001220703126, 0.04659491348266601, 0.04659225463867187, 0.04657756805419922, 0.0465299186706543, 0.04648614501953125, 0.0465530891418457, 0.04659164810180664, 0.04650947189331055, 0.04654380798339844, 0.046611808776855466, 0.046817214965820315, 0.04690198516845703, 0.046794750213623046, 0.046936065673828124, 0.047282176971435545, 0.046876224517822265, 0.046956993103027346, 0.048097278594970705, 0.04684185409545898, 0.046847488403320314, 0.04699801635742187, 0.04717513656616211, 0.04698076629638672, 0.046873470306396485, 0.04692323303222656, 0.046774078369140625, 0.046754528045654296, 0.04701593780517578, 0.04731903839111328, 0.047562751770019535, 0.04712172698974609, 0.04693619155883789, 0.04728070449829101, 0.047308990478515625, 0.04722022247314453, 0.04727974319458008, 0.047282878875732424, 0.04724313735961914, 0.04750956726074219, 0.0471736946105957, 0.04748672103881836, 0.04725417709350586, 0.04667107009887695, 0.04606032180786133, 0.046238849639892575, 0.046107521057128904, 0.04626432037353516, 0.04598988723754883, 0.046266368865966793, 0.046317569732666014, 0.04634624099731445, 0.046438369750976566, 0.04638518524169922, 0.04652851104736328, 0.04637417602539062, 0.04641251373291016, 0.04661862564086914, 0.046642879486083984, 0.04675411224365234, 0.04655718231201172, 0.04668975830078125, 0.04669494247436524, 0.04700710296630859, 0.04669635009765625, 0.046785247802734374, 0.0465530891418457, 0.04639539337158203, 0.04681913757324219, 0.04664748764038086, 0.04639904022216797, 0.04653302383422851, 0.04650191879272461, 0.04664022445678711, 0.04673827362060547, 0.046702014923095704, 0.046567169189453125, 0.04680179214477539, 0.04696268844604492, 0.046923454284667966, 0.04696249771118164, 0.0470340461730957, 0.04696665573120117, 0.04688931274414063, 0.04721654510498047, 0.04742540740966797, 0.04712326431274414, 0.04698112106323242, 0.046919456481933595, 0.047067230224609374, 0.04703388977050781, 0.04668463897705078, 0.046887039184570316, 0.046835136413574216, 0.047061569213867185, 0.047092929840087894, 0.04710406494140625, 0.04705356979370117, 0.04709785461425781, 0.047034366607666016, 0.04717567825317383, 0.04700774383544922, 0.04738627243041992, 0.048173408508300784, 0.04740095901489258, 0.046900352478027346, 0.04637948989868164, 0.046260414123535154, 0.04635465621948242, 0.04627206420898437, 0.0462729606628418, 0.04611276626586914, 0.04628803253173828, 0.046330718994140624, 0.04650185775756836, 0.04651190567016601, 0.046370174407958986, 0.0464692497253418, 0.046408447265625, 0.046432254791259765, 0.04661161422729492, 0.04686934280395508, 0.04674764633178711, 0.04640288162231445, 0.04658041763305664, 0.04658790588378906, 0.04652851104736328, 0.046301185607910154, 0.04637081527709961, 0.04646297454833984, 0.04640153503417969, 0.046637054443359374, 0.047332511901855466, 0.04661948776245117, 0.04657916641235352, 0.046527008056640624, 0.046720672607421875, 0.046696800231933594, 0.046635009765625, 0.046532608032226565, 0.04681932830810547, 0.04680704116821289, 0.0470648307800293, 0.04705257415771484, 0.04683148956298828, 0.04678102493286133, 0.04741849517822266, 0.047317344665527346, 0.04697507095336914, 0.04700950241088867, 0.04690790557861328, 0.04692745590209961, 0.046977375030517576, 0.04690563201904297, 0.046886913299560545, 0.04700569534301758, 0.04714236831665039, 0.047093505859375, 0.04699625778198242, 0.04714684677124024, 0.04704025650024414, 0.047198112487792966, 0.04712819290161133, 0.047227775573730466, 0.04735404968261719, 0.04737577438354492, 0.047178142547607424, 0.04736159896850586, 0.047143009185791014, 0.04648729705810547, 0.04593679809570313, 0.04614227294921875, 0.046196670532226564, 0.04627619171142578, 0.046227935791015626, 0.046321247100830076, 0.04644112014770508, 0.04656224060058594, 0.04671366500854492, 0.046415870666503906, 0.046380161285400394, 0.04662771224975586, 0.04628889465332031, 0.046519390106201174, 0.04634822463989258, 0.046595039367675783, 0.046483455657958986, 0.04659369659423828, 0.0465733757019043, 0.04668636703491211, 0.04647907257080078, 0.046527137756347654, 0.04644659042358398, 0.04631264114379883, 0.046498622894287106, 0.04683161544799805, 0.04676156616210937, 0.04669203186035156, 0.0466495361328125, 0.04700214385986328, 0.046747230529785154, 0.04671939086914063, 0.047019870758056644, 0.04720848083496094, 0.04716352081298828, 0.046943614959716794, 0.047325374603271485, 0.04681363296508789, 0.047034366607666016, 0.04703961563110352, 0.04683456039428711, 0.04698268890380859, 0.047159648895263674, 0.04700787353515625, 0.04679884719848633, 0.04689446258544922, 0.046905982971191404, 0.046811134338378906, 0.047075328826904295, 0.04694015884399414, 0.047298561096191405, 0.047933441162109375, 0.047202144622802734, 0.04711439895629883, 0.04751769638061523, 0.04726764678955078, 0.04721078491210937, 0.046992927551269534, 0.04710847854614258, 0.047242305755615235, 0.04745235061645508]",tokens/s,21.39786005823035,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,828.747776,551.419904,0.0,148.897792,141.633024,s,1,7.98030078125,7.98030078125,0.0,7.98030078125,7.98030078125,7.98030078125,7.98030078125,[7.98030078125],,kWh,1.3114280550007608e-05,1.4389054138560913e-06,3.463891660018814e-06,1.8017077623882515e-05,,MB,1313.529856,666.763264,0.0,249.561088,216.900608,s,17,0.2114805097579956,0.012440029985764448,7.532251414136488e-05,0.012409215927124023,0.012553395080566406,0.012558079910278321,0.012561458816528322,"[0.012557024002075196, 0.012437631607055664, 0.012515423774719238, 0.012390015602111816, 0.012491071701049804, 0.01235427188873291, 0.012353407859802245, 0.012471776008605957, 0.012484224319458007, 0.01240783977508545, 0.012382431983947754, 0.01230134391784668, 0.012562303543090821, 0.012406911849975586, 0.012404640197753907, 0.012550975799560547, 0.012409215927124023]",tokens/s,20578.728531438395,kWh,3.609384322517634e-07,3.9805107724010096e-08,1.935281729517943e-07,5.942717129275678e-07,tokens/kWh,430779379.92179394,MB,1346.88768,691.929088,0.0,274.726912,216.903168,s,17,9.889499084472655,0.5817352402630975,0.004541329502752051,0.58014697265625,0.5861553588867188,0.5907772338867188,0.5952288354492188,"[0.5893861083984375, 0.5794647216796875, 0.58014697265625, 0.5799638061523438, 0.5809121704101563, 0.5787658081054687, 0.5799323120117188, 0.58037255859375, 0.5785889282226563, 0.5773050537109375, 0.5797046508789062, 0.5963417358398437, 0.5840015258789063, 0.5810536499023438, 0.57846044921875, 0.5815494995117187, 0.5835491333007813]",tokens/s,108.29668832080279,kWh,1.67704734161262e-05,1.8495102049753531e-06,5.929177342492492e-06,2.454916096359405e-05,tokens/kWh,2566279.1528161727,,s,1071,9.881024981498713,0.00922598037488209,0.00022799924920853017,0.009170880317687988,0.009369440078735351,0.009474031925201416,0.010105347061157224,"[0.00930191993713379, 0.011237471580505372, 0.011155455589294434, 0.009602879524230957, 0.009520480155944824, 0.009451583862304687, 0.009441472053527833, 0.00935091209411621, 0.0093274564743042, 0.009289695739746093, 0.009305472373962403, 0.009303775787353516, 0.009318880081176758, 0.009316831588745118, 0.009318495750427246, 0.009351072311401367, 0.00943712043762207, 0.009551199913024902, 0.00974307155609131, 0.009320544242858888, 0.009303008079528809, 0.009258943557739258, 0.009251168251037598, 0.009251328468322753, 0.009230143547058105, 0.009234623908996582, 0.009237888336181641, 0.009216287612915038, 0.009173503875732422, 0.009180831909179687, 0.00923033618927002, 0.00923193645477295, 0.00920246410369873, 0.009381888389587402, 0.009166239738464355, 0.00923027229309082, 0.009464799880981445, 0.009260736465454101, 0.009232383728027344, 0.009219840049743652, 0.009244288444519043, 0.009221920013427734, 0.009274208068847656, 0.009202752113342285, 0.00922719955444336, 0.009158623695373536, 0.009244768142700196, 0.009232319831848145, 0.009205599784851075, 0.009267711639404297, 0.009252511978149414, 0.009180224418640137, 0.009229151725769043, 0.009232831954956054, 0.00925046443939209, 0.009215999603271484, 0.009246720314025878, 0.00918943977355957, 0.009207743644714356, 0.009248767852783203, 0.009226240158081055, 0.009251839637756347, 0.009271519660949707, 0.008962047576904298, 0.009164416313171386, 0.009419327735900878, 0.009180992126464844, 0.00916864013671875, 0.009181440353393555, 0.009138175964355469, 0.00918131160736084, 0.009279359817504882, 0.009278528213500977, 0.009198464393615723, 0.00917903995513916, 0.00915881633758545, 0.00932249641418457, 0.009127936363220214, 0.00934502410888672, 0.009190752029418945, 0.009196191787719727, 0.00912508773803711, 0.009175135612487792, 0.009153152465820312, 0.00919539165496826, 0.009184831619262696, 0.009110143661499023, 0.009256671905517579, 0.00919375991821289, 0.009115903854370118, 0.009139967918395995, 0.009137887954711914, 0.009150752067565918, 0.009107456207275391, 0.00928179168701172, 0.009216768264770508, 0.009153056144714355, 0.009380479812622071, 0.009700320243835448, 0.00917311954498291, 0.009124608039855956, 0.009193471908569336, 0.009244671821594238, 0.009133248329162598, 0.009157440185546876, 0.009119744300842286, 0.009162752151489258, 0.009176768302917481, 0.009203840255737304, 0.00917728042602539, 0.009063488006591796, 0.009257951736450195, 0.009151968002319336, 0.009116383552551269, 0.009155967712402343, 0.009148832321166991, 0.00905843162536621, 0.009084927558898925, 0.009086655616760255, 0.009113984107971191, 0.009236288070678712, 0.00911302375793457, 0.00918175983428955, 0.009331999778747558, 0.009165535926818848, 0.009355135917663574, 0.008949695587158203, 0.009168895721435547, 0.009234432220458985, 0.009338879585266113, 0.009210975646972656, 0.009278719902038574, 0.009154208183288574, 0.009147487640380859, 0.009157535552978515, 0.009140224456787109, 0.009133855819702148, 0.00972003173828125, 0.009240575790405273, 0.00947382354736328, 0.009285856246948242, 0.009179136276245118, 0.009138367652893066, 0.00912883186340332, 0.009173567771911622, 0.009095104217529296, 0.009150431632995606, 0.009140576362609863, 0.009149951934814453, 0.009119839668273925, 0.009084927558898925, 0.009407008171081543, 0.00923033618927002, 0.009472255706787109, 0.009184512138366699, 0.00913868808746338, 0.009112607955932618, 0.009279680252075196, 0.009150688171386718, 0.00967516803741455, 0.00915017604827881, 0.00915881633758545, 0.00925011157989502, 0.009249695777893066, 0.00937171173095703, 0.009148415565490722, 0.009099648475646973, 0.009195136070251466, 0.009098943710327148, 0.009101599693298339, 0.009132096290588379, 0.009140031814575195, 0.009072799682617188, 0.009269311904907226, 0.009148351669311524, 0.009121024131774902, 0.009232671737670898, 0.0091593599319458, 0.009314080238342286, 0.0091810884475708, 0.009127967834472657, 0.009168736457824707, 0.009126079559326173, 0.009177120208740234, 0.009156607627868652, 0.009277631759643554, 0.009108672142028809, 0.009125887870788574, 0.009155200004577637, 0.008951647758483887, 0.009752287864685058, 0.009261631965637207, 0.009180704116821288, 0.009191712379455567, 0.010084128379821778, 0.009199904441833497, 0.009223296165466308, 0.00917081642150879, 0.00913260841369629, 0.009261216163635254, 0.009163040161132813, 0.009143424034118653, 0.009141119956970215, 0.00914457607269287, 0.009141951560974122, 0.009127360343933105, 0.009167488098144532, 0.009174655914306641, 0.009167231559753419, 0.009129216194152832, 0.00911625576019287, 0.009205632209777832, 0.009169183731079101, 0.009226400375366211, 0.009119359970092774, 0.009113823890686035, 0.009115648269653321, 0.009107647895812988, 0.009068544387817384, 0.009135583877563476, 0.009162272453308105, 0.009104191780090332, 0.009094688415527344, 0.009095616340637207, 0.00908694362640381, 0.009126239776611329, 0.009113344192504882, 0.00911513614654541, 0.009136608123779297, 0.00912179183959961, 0.009170720100402832, 0.009501983642578125, 0.009313216209411621, 0.009531552314758301, 0.009387871742248536, 0.00924073600769043, 0.009159520149230957, 0.009192447662353515, 0.009142271995544434, 0.009152511596679687, 0.00921395206451416, 0.009123871803283692, 0.009111680030822755, 0.009197407722473145, 0.009145407676696778, 0.009075648307800293, 0.00943724822998047, 0.009265088081359863, 0.009291775703430176, 0.009146368026733399, 0.009114687919616699, 0.009120415687561036, 0.008898528099060058, 0.009203712463378906, 0.009144319534301757, 0.009142271995544434, 0.00911299228668213, 0.00913263988494873, 0.009225536346435546, 0.00913212776184082, 0.009158880233764648, 0.009302592277526856, 0.009121024131774902, 0.009197600364685058, 0.009144864082336425, 0.0090863037109375, 0.009120415687561036, 0.009149439811706543, 0.009274304389953613, 0.009224255561828614, 0.009259072303771973, 0.01007430362701416, 0.009403648376464844, 0.00969324779510498, 0.011002464294433594, 0.009311360359191894, 0.009208640098571778, 0.00925385570526123, 0.009132063865661622, 0.00913702392578125, 0.009179264068603516, 0.009113408088684083, 0.009230688095092773, 0.009170687675476075, 0.009172672271728516, 0.00918892765045166, 0.009172800064086915, 0.009271807670593261, 0.009163200378417969, 0.009124032020568848, 0.009104255676269532, 0.009241663932800293, 0.009117568016052246, 0.009078432083129882, 0.009066335678100586, 0.009129887580871581, 0.009116095542907715, 0.00907196807861328, 0.009140640258789063, 0.009227775573730468, 0.009088255882263184, 0.009148351669311524, 0.009147551536560059, 0.009251392364501954, 0.009147808074951172, 0.009136735916137695, 0.009101247787475586, 0.009146431922912597, 0.00910489559173584, 0.00912396812438965, 0.009168671607971192, 0.009083040237426758, 0.009161120414733886, 0.009142304420471191, 0.00914575958251953, 0.008917280197143555, 0.00915881633758545, 0.009148415565490722, 0.009232383728027344, 0.009287327766418456, 0.009293760299682617, 0.009332608222961426, 0.009290111541748048, 0.009244959831237793, 0.009207232475280762, 0.009202336311340332, 0.00914732837677002, 0.009124704360961914, 0.009259136199951171, 0.009170271873474122, 0.009138527870178223, 0.009195712089538574, 0.009160287857055664, 0.009111871719360351, 0.009142784118652344, 0.009186911582946777, 0.009189279556274414, 0.009259103775024414, 0.009580032348632812, 0.009200384140014648, 0.00925648021697998, 0.009295167922973633, 0.009171008110046387, 0.009131135940551758, 0.009135647773742676, 0.009185471534729005, 0.009154560089111329, 0.009108511924743653, 0.009218751907348633, 0.009127360343933105, 0.00930083179473877, 0.009147839546203613, 0.009347519874572753, 0.009174847602844238, 0.009171551704406738, 0.009162464141845704, 0.009107680320739745, 0.009070367813110351, 0.009132032394409179, 0.009143872261047363, 0.00911417579650879, 0.009105312347412109, 0.009095135688781739, 0.009120800018310548, 0.009166879653930664, 0.009211968421936036, 0.009161600112915039, 0.009132287979125976, 0.009154303550720215, 0.009117280006408691, 0.009124544143676758, 0.009080544471740722, 0.009134079933166504, 0.009108768463134766, 0.009176128387451172, 0.009111200332641602, 0.009136128425598144, 0.009291775703430176, 0.008814847946166993, 0.009121536254882812, 0.00921951961517334, 0.009199551582336426, 0.009316927909851074, 0.009170463562011719, 0.009107872009277344, 0.009123071670532227, 0.009106304168701172, 0.009093503952026367, 0.009141887664794922, 0.009275391578674316, 0.009110943794250488, 0.009128128051757813, 0.009186847686767578, 0.009145503997802734, 0.009141695976257325, 0.009152511596679687, 0.00922815990447998, 0.00941487979888916, 0.009158720016479492, 0.009192992210388183, 0.009159392356872558, 0.009227775573730468, 0.009217599868774413, 0.009113759994506835, 0.009063072204589843, 0.009086976051330567, 0.00913798427581787, 0.009064640045166016, 0.009092608451843261, 0.009083295822143555, 0.009111359596252441, 0.00912003231048584, 0.009164799690246582, 0.009198944091796874, 0.009212575912475585, 0.009146368026733399, 0.009176639556884766, 0.009255359649658203, 0.009457663536071777, 0.009379839897155762, 0.009285632133483887, 0.00928060817718506, 0.009255231857299805, 0.009273440361022948, 0.00947424030303955, 0.009281855583190918, 0.009439231872558594, 0.009177184104919434, 0.009250720024108887, 0.009273344039916993, 0.009451359748840331, 0.00918172836303711, 0.009242239952087403, 0.009211903572082519, 0.009207743644714356, 0.009232447624206544, 0.009179136276245118, 0.009252863883972168, 0.009166848182678223, 0.00917078399658203, 0.009138336181640625, 0.009007776260375976, 0.009633567810058594, 0.009619680404663086, 0.009295231819152832, 0.00921664047241211, 0.009195584297180176, 0.009231552124023438, 0.0091942081451416, 0.00916044807434082, 0.009263104438781738, 0.009299967765808105, 0.009338496208190918, 0.009170656204223632, 0.009251775741577149, 0.009179136276245118, 0.009208928108215332, 0.009253791809082031, 0.009307456016540528, 0.009148192405700684, 0.009360383987426758, 0.0091810884475708, 0.00919375991821289, 0.009154272079467774, 0.009179007530212403, 0.009127327919006348, 0.009311136245727538, 0.009135231971740723, 0.009269951820373535, 0.00920201587677002, 0.009193120002746582, 0.009086688041687012, 0.009089311599731446, 0.009132032394409179, 0.009154463768005371, 0.009089119911193847, 0.009104543685913086, 0.009108320236206054, 0.009281567573547363, 0.009184576034545899, 0.009197312355041504, 0.009255840301513671, 0.00927337646484375, 0.009223936080932617, 0.00921008014678955, 0.009143872261047363, 0.009116095542907715, 0.00913584041595459, 0.0091428804397583, 0.009102080345153809, 0.009120256423950195, 0.009134528160095214, 0.009750176429748535, 0.009138879776000976, 0.009141440391540528, 0.009242367744445801, 0.009138879776000976, 0.009203455924987792, 0.00912611198425293, 0.009066368103027344, 0.009148256301879883, 0.009126239776611329, 0.009107104301452637, 0.009115551948547363, 0.00886796760559082, 0.00924880027770996, 0.009358943939208985, 0.009321375846862793, 0.009357248306274414, 0.009148480415344239, 0.009188896179199218, 0.009187711715698241, 0.00912393569946289, 0.009111104011535644, 0.009144767761230469, 0.009134079933166504, 0.00908902359008789, 0.009152095794677734, 0.009081248283386231, 0.009241824150085448, 0.00913219165802002, 0.00914198398590088, 0.009143199920654297, 0.009150272369384765, 0.009173407554626464, 0.009126784324645996, 0.00911248016357422, 0.009109503746032714, 0.00910918426513672, 0.009174912452697755, 0.009095199584960937, 0.00914463996887207, 0.00984598445892334, 0.009232255935668945, 0.009157471656799317, 0.009128191947937011, 0.00912399959564209, 0.009113568305969239, 0.00910927963256836, 0.009074687957763672, 0.009101311683654785, 0.009160223960876466, 0.009094976425170898, 0.009046303749084472, 0.009077119827270507, 0.009094719886779784, 0.009157055854797364, 0.009137727737426759, 0.009103808403015137, 0.009183103561401368, 0.009359392166137695, 0.009148351669311524, 0.00914857578277588, 0.009176063537597656, 0.00927619171142578, 0.00931827163696289, 0.009183103561401368, 0.009152640342712402, 0.009279295921325684, 0.009239071846008301, 0.00915993595123291, 0.009245408058166505, 0.009180928230285644, 0.009257023811340331, 0.009156959533691406, 0.009140095710754394, 0.009365504264831542, 0.008854975700378418, 0.009122464179992676, 0.009114879608154297, 0.009104127883911134, 0.009136128425598144, 0.009076191902160644, 0.009113696098327637, 0.009131872177124023, 0.009174752235412597, 0.009133184432983398, 0.009099007606506348, 0.009183327674865722, 0.009122976303100586, 0.009167360305786134, 0.009166367530822754, 0.009136863708496094, 0.009127936363220214, 0.00908022403717041, 0.009122400283813477, 0.009070591926574707, 0.009129983901977539, 0.009127264022827148, 0.009396896362304688, 0.009111167907714844, 0.00920751953125, 0.0092042875289917, 0.009113856315612793, 0.009129823684692383, 0.009084383964538574, 0.009154496192932128, 0.00913651180267334, 0.009132255554199218, 0.009175264358520507, 0.009111328125, 0.009103360176086426, 0.0090863037109375, 0.009104031562805176, 0.009158656120300293, 0.009114912033081055, 0.009140959739685059, 0.009068544387817384, 0.00907478427886963, 0.009086879730224609, 0.009134048461914063, 0.010091551780700684, 0.009117792129516602, 0.009104160308837891, 0.009093119621276855, 0.009226400375366211, 0.009207776069641113, 0.009228063583374023, 0.009158495903015137, 0.00913856029510498, 0.009140224456787109, 0.009107168197631837, 0.009190848350524902, 0.009382752418518067, 0.009260319709777831, 0.009153247833251953, 0.00921571159362793, 0.009124128341674805, 0.009117695808410644, 0.009209856033325196, 0.008889472007751465, 0.009108351707458497, 0.009125760078430176, 0.009164640426635742, 0.009154848098754883, 0.009095168113708496, 0.009198847770690919, 0.00915328025817871, 0.009195167541503907, 0.009160575866699218, 0.009183712005615234, 0.009156607627868652, 0.009125887870788574, 0.009154463768005371, 0.009162848472595214, 0.009161727905273438, 0.009190400123596192, 0.009174367904663087, 0.009160639762878418, 0.009168895721435547, 0.009349375724792481, 0.009179519653320313, 0.009154656410217284, 0.009165823936462402, 0.009110143661499023, 0.00915494441986084, 0.009494400024414063, 0.009207936286926269, 0.00930406379699707, 0.009541119575500488, 0.00923641586303711, 0.009168767929077148, 0.009206527709960938, 0.00924665641784668, 0.00912723159790039, 0.009093855857849121, 0.009138400077819825, 0.009137920379638672, 0.009127936363220214, 0.00951039981842041, 0.009267935752868653, 0.009170720100402832, 0.00930121612548828, 0.009214752197265625, 0.009119263648986816, 0.009132512092590332, 0.009150464057922364, 0.009109791755676269, 0.009115360260009765, 0.009737343788146972, 0.009323391914367676, 0.009203712463378906, 0.009179391860961915, 0.009111200332641602, 0.009154208183288574, 0.009126336097717284, 0.009172991752624511, 0.009299967765808105, 0.009135807991027832, 0.00921836757659912, 0.009106528282165528, 0.009135007858276367, 0.009164799690246582, 0.008863327980041504, 0.009230751991271973, 0.00932688045501709, 0.009290752410888671, 0.009193408012390136, 0.009187871932983399, 0.009168255805969238, 0.00914521598815918, 0.009133119583129883, 0.009206656455993653, 0.009166239738464355, 0.009167519569396973, 0.009181183815002441, 0.009161791801452636, 0.010471936225891113, 0.01195359992980957, 0.01159500789642334, 0.009496319770812988, 0.00941158390045166, 0.009335136413574219, 0.009380096435546876, 0.009324480056762695, 0.009445856094360352, 0.00937945556640625, 0.009461824417114257, 0.009333056449890137, 0.009295743942260742, 0.009453696250915527, 0.00931817626953125, 0.009341152191162109, 0.01031372833251953, 0.010174464225769043, 0.009476096153259277, 0.009532447814941405, 0.009405183792114257, 0.009396448135375976, 0.00945462417602539, 0.0093787841796875, 0.009361087799072266, 0.009389663696289062, 0.009355775833129883, 0.009378016471862793, 0.009363455772399902, 0.009369440078735351, 0.009346783638000488, 0.009552319526672364, 0.009400320053100587, 0.009433088302612304, 0.009347071647644043, 0.009360639572143555, 0.009438079833984375, 0.009482111930847168, 0.00937942409515381, 0.009430848121643066, 0.009388287544250489, 0.009243328094482422, 0.00927244758605957, 0.009308704376220703, 0.00941222381591797, 0.009369728088378906, 0.009287936210632324, 0.00926905632019043, 0.00933516788482666, 0.008907008171081542, 0.00917734432220459, 0.009217920303344727, 0.00923027229309082, 0.009274656295776368, 0.009224991798400878, 0.009398271560668945, 0.00925046443939209, 0.009254976272583008, 0.009183103561401368, 0.009234848022460938, 0.009245856285095215, 0.00913916778564453, 0.009144191741943359, 0.009166496276855469, 0.009183584213256836, 0.009717375755310058, 0.009203295707702636, 0.00916988754272461, 0.00913798427581787, 0.009158783912658692, 0.009314175605773925, 0.00912992000579834, 0.009101311683654785, 0.00909932804107666, 0.009117695808410644, 0.00931340789794922, 0.009204544067382813, 0.009150527954101562, 0.009052160263061524, 0.009187328338623046, 0.009127936363220214, 0.009191072463989258, 0.009107040405273438, 0.00913702392578125, 0.009115519523620606, 0.009273664474487305, 0.009146047592163085, 0.009250016212463378, 0.009077535629272462, 0.009125887870788574, 0.009094176292419433, 0.009153504371643067, 0.009347264289855956, 0.010486687660217285, 0.009382816314697265, 0.009381888389587402, 0.009700639724731445, 0.00932528018951416, 0.010059776306152344, 0.009359328269958497, 0.00924828815460205, 0.009236991882324219, 0.009105376243591309, 0.009160160064697265, 0.009142271995544434, 0.009252575874328614, 0.00938479995727539, 0.010022175788879395, 0.009344736099243163, 0.009143296241760255, 0.009151583671569824, 0.00918825626373291, 0.008917280197143555, 0.009205344200134278, 0.00914668846130371, 0.009130144119262695, 0.009164640426635742, 0.009226240158081055, 0.009443327903747559, 0.009257984161376954, 0.009239551544189453, 0.00914572811126709, 0.00914463996887207, 0.009232192039489747, 0.009213695526123047, 0.00915328025817871, 0.009326272010803223, 0.009273664474487305, 0.009174367904663087, 0.009169695854187011, 0.009153440475463867, 0.009095359802246093, 0.009127903938293457, 0.009311039924621583, 0.009265439987182617, 0.009154463768005371, 0.009117504119873047, 0.00928767967224121, 0.009164799690246582, 0.009232159614562988, 0.009121888160705567, 0.009131423950195313, 0.009108320236206054, 0.009097087860107421, 0.009134207725524902, 0.009135744094848634, 0.009101856231689453, 0.00915135955810547, 0.009202527999877929, 0.009141568183898927, 0.009138879776000976, 0.009162752151489258, 0.00909721565246582, 0.009545151710510253, 0.009163392066955567, 0.009167807579040527, 0.009188032150268555, 0.009152959823608399, 0.009086848258972168, 0.009183232307434081, 0.009686623573303223, 0.009345439910888672, 0.009163135528564454, 0.009231616020202636, 0.009133919715881348, 0.009151007652282715, 0.009138175964355469, 0.009191295623779298, 0.009123488426208496, 0.009701855659484863, 0.009308256149291993, 0.01001251220703125, 0.009377856254577637, 0.009151552200317383, 0.009158592224121093, 0.009240927696228028, 0.00914192008972168, 0.009109503746032714, 0.009232383728027344, 0.009201663970947266, 0.009312255859375, 0.009201663970947266, 0.009162752151489258, 0.00910102367401123, 0.009148608207702637, 0.009154656410217284, 0.00911359977722168, 0.00919257640838623, 0.009116671562194823, 0.00915443229675293, 0.009153696060180664, 0.009367839813232422, 0.009201984405517578, 0.009129504203796386, 0.009089792251586914, 0.009211872100830078, 0.009107199668884278, 0.009170880317687988, 0.009150783538818359, 0.009146368026733399, 0.009372768402099609, 0.009216992378234864, 0.009158944129943848, 0.009135135650634766, 0.009163455963134766, 0.00941868782043457, 0.00927948760986328, 0.009328767776489258, 0.009185152053833008, 0.009151488304138184, 0.009046879768371582, 0.009155008316040039, 0.00940003204345703, 0.009179072380065918, 0.009102879524230957, 0.0091909761428833, 0.009079327583312988, 0.009099712371826172, 0.009108736038208007, 0.009083295822143555, 0.009123552322387695, 0.009149248123168946, 0.009177951812744141, 0.009232895851135254, 0.009103167533874511, 0.009181632041931152, 0.009152735710144043, 0.00912179183959961, 0.009097344398498535, 0.009142144203186035, 0.00912384033203125, 0.009131775856018067, 0.009133343696594238, 0.009114239692687988, 0.009284159660339356, 0.009229248046875, 0.009167584419250488, 0.009129152297973633, 0.00899897575378418, 0.009164159774780273, 0.009258848190307617, 0.009112095832824707, 0.009165280342102051, 0.009082592010498047, 0.009302176475524903, 0.00926255989074707, 0.00912831974029541, 0.009087039947509765, 0.00907855987548828, 0.009147744178771972, 0.009104000091552735, 0.009625791549682617, 0.009281536102294922, 0.009194944381713867, 0.009160575866699218, 0.009272192001342773, 0.009238335609436035, 0.009197567939758301, 0.00932476806640625, 0.009315808296203614, 0.00926956844329834, 0.009195327758789062, 0.009203871726989747, 0.009173024177551269, 0.009426015853881836, 0.009282464027404786, 0.009522720336914063, 0.009185855865478515, 0.0091910400390625, 0.009184639930725097, 0.009151519775390625, 0.009149408340454102, 0.009124575614929199, 0.00915884780883789, 0.009135552406311035, 0.00918175983428955, 0.00923033618927002, 0.00927948760986328, 0.009228287696838379, 0.009118880271911621, 0.009130847930908204, 0.009176480293273925, 0.009169407844543457, 0.009406559944152832, 0.009639616012573243, 0.009369919776916504, 0.009109312057495118, 0.009162336349487306, 0.00926371192932129, 0.009299967765808105, 0.00918115234375, 0.009136159896850586, 0.009109503746032714, 0.009181183815002441, 0.009220095634460449, 0.009287424087524414, 0.00924287986755371, 0.00932863998413086, 0.009267200469970703, 0.009313983917236328, 0.00914668846130371, 0.008846752166748047, 0.009095359802246093, 0.009129887580871581, 0.009222911834716798, 0.009961440086364745, 0.009169983863830566, 0.009203776359558106, 0.009314175605773925, 0.009151424407958984, 0.009211872100830078, 0.009144319534301757, 0.009123200416564942, 0.009156576156616211, 0.00911836814880371, 0.009152511596679687, 0.0091975040435791, 0.00915187168121338, 0.009097920417785645, 0.009232383728027344, 0.009150464057922364, 0.009115648269653321, 0.009185279846191406, 0.00928713607788086, 0.009191360473632813, 0.009095775604248046, 0.009197567939758301, 0.009190431594848633, 0.00917967987060547, 0.009181632041931152, 0.009277407646179198, 0.009190784454345704, 0.009111200332641602, 0.009120063781738281, 0.009179007530212403, 0.009106240272521972, 0.00908902359008789, 0.009189375877380371, 0.00931007957458496, 0.009365792274475098, 0.009111007690429688, 0.009291487693786621, 0.00907436752319336, 0.009165792465209962, 0.009132032394409179, 0.00908902359008789, 0.009125408172607421, 0.009190943717956543, 0.009207072257995606, 0.009152159690856934, 0.009191424369812011, 0.009263104438781738, 0.009334272384643554, 0.009144831657409667, 0.009124992370605468, 0.009083040237426758, 0.009654047966003419, 0.00920639991760254, 0.009189696311950683, 0.009674592018127441, 0.011497311592102051, 0.010137536048889161, 0.009353631973266602, 0.009242591857910156]",tokens/s,108.3895650507256,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4399.427584,4976.410624,0.0,4573.888512,4568.93696,s,1,10.653498046875,10.653498046875,0.0,10.653498046875,10.653498046875,10.653498046875,10.653498046875,[10.653498046875],,kWh,0.0001063242550708234,1.1721156534385567e-05,3.27141928379987e-05,0.00015075960444320766,,MB,2182.750208,5303.566336,0.0,4886.36416,4814.97344,s,10,2.0939474792480466,0.20939474792480467,0.00018643556789469997,0.20931350708007812,0.20967909698486328,0.2096967643737793,0.2097108982849121,"[0.2095928955078125, 0.20921379089355469, 0.2092220458984375, 0.2096751708984375, 0.20922808837890625, 0.20928163146972656, 0.20942431640625, 0.20924972534179687, 0.2097144317626953, 0.20934538269042968]",tokens/s,1222.5712561421628,kWh,6.184242152083483e-06,6.820154746513085e-07,4.111340094624845e-06,1.0977597721359636e-05,tokens/kWh,23320220.55261586,MB,2189.852672,5408.423936,0.0,4991.22176,4947.963904,s,10,25.337256347656247,2.5337256347656245,0.012450117298803099,2.5302037353515625,2.54606923828125,2.555306103515625,2.562695595703125,"[2.56454296875, 2.53185595703125, 2.53433349609375, 2.525358154296875, 2.5440166015625, 2.528551513671875, 2.519905517578125, 2.52308251953125, 2.539431396484375, 2.52617822265625]",tokens/s,24.864570628945643,kWh,7.406795273791552e-05,8.169663890119593e-06,4.533969946617528e-05,0.00012757731609421038,tokens/kWh,493818.1953402845,,s,630,25.334205780029304,0.040213025047665545,0.00046863920960852685,0.04015086364746094,0.04064994049072266,0.04087552795410156,0.04192168930053712,"[0.040599777221679685, 0.04045888137817383, 0.04061606216430664, 0.040546302795410154, 0.04255539321899414, 0.04041113662719727, 0.040302593231201174, 0.041043968200683595, 0.04048281478881836, 0.04069279861450195, 0.04052268981933594, 0.04085724639892578, 0.04042329788208008, 0.0405775032043457, 0.040476673126220705, 0.04022886276245117, 0.040237056732177735, 0.04103987121582031, 0.04028598403930664, 0.04099094390869141, 0.04043571090698242, 0.040548351287841795, 0.04051932907104492, 0.04109139251708984, 0.04074198532104492, 0.040567745208740236, 0.04054617691040039, 0.040687744140625, 0.04069564819335938, 0.040615966796875, 0.04060732650756836, 0.04400751876831055, 0.04127484893798828, 0.040604736328125, 0.040794017791748044, 0.041428897857666014, 0.04163593673706055, 0.040777313232421876, 0.04059807968139648, 0.040715614318847654, 0.04045260620117187, 0.04168499374389648, 0.041154014587402345, 0.040489505767822266, 0.040787616729736326, 0.040304702758789064, 0.04034288024902344, 0.04076009750366211, 0.04087004852294922, 0.0409804801940918, 0.040667137145996096, 0.04037836837768555, 0.040427520751953126, 0.040318977355957034, 0.04017068862915039, 0.0402808952331543, 0.040325119018554685, 0.040476673126220705, 0.04015718460083008, 0.040358016967773434, 0.04045721435546875, 0.0401847038269043, 0.03995852661132813, 0.04116361618041992, 0.04019174575805664, 0.04005503845214844, 0.04032908630371094, 0.04029196929931641, 0.04059145736694336, 0.04023337554931641, 0.040400543212890626, 0.040050014495849606, 0.0398831672668457, 0.039892929077148434, 0.03981584167480469, 0.039723262786865235, 0.03981286239624023, 0.03969225692749023, 0.03976563262939453, 0.039776927947998045, 0.040233726501464846, 0.04015158462524414, 0.04030511856079102, 0.04042342376708984, 0.04016934585571289, 0.04004022216796875, 0.04036774444580078, 0.039873249053955076, 0.04004249572753906, 0.04000569534301758, 0.04003401565551758, 0.04010185623168945, 0.04013663864135742, 0.04063983917236328, 0.040596446990966796, 0.04053606414794922, 0.040404449462890624, 0.040371841430664065, 0.040237983703613284, 0.04013379287719727, 0.04020620727539063, 0.04029065704345703, 0.04003609466552734, 0.040354686737060545, 0.0403963508605957, 0.04015763092041016, 0.040353759765625, 0.039982368469238284, 0.03999452972412109, 0.040242015838623045, 0.04028112030029297, 0.040304351806640625, 0.040468223571777345, 0.04037247848510742, 0.04023100662231445, 0.04000553512573242, 0.04008700942993164, 0.040314815521240235, 0.04061017608642578, 0.04029481506347656, 0.040169567108154294, 0.04011324691772461, 0.03980944061279297, 0.040040672302246096, 0.040012863159179686, 0.039954593658447266, 0.040597312927246096, 0.04070076751708984, 0.04041247940063476, 0.04045075225830078, 0.040714241027832034, 0.040529918670654294, 0.04052406311035156, 0.04075715255737305, 0.0401629753112793, 0.040034111022949216, 0.04004217529296875, 0.04019817733764648, 0.04081024169921875, 0.040219104766845704, 0.039825889587402345, 0.03979462432861328, 0.039882625579833984, 0.040107616424560545, 0.040511070251464845, 0.040623039245605466, 0.040554080963134766, 0.04027638244628906, 0.040304222106933595, 0.040449920654296874, 0.040365760803222656, 0.04012236785888672, 0.04027068710327148, 0.040279998779296874, 0.040124095916748044, 0.04023334503173828, 0.04012236785888672, 0.04009574508666992, 0.04044972610473633, 0.04028857421875, 0.03994214248657227, 0.03984998321533203, 0.03968393707275391, 0.040372383117675784, 0.039995391845703124, 0.03972473526000977, 0.03980486297607422, 0.03990521621704102, 0.03998553466796875, 0.03986748886108398, 0.03969446563720703, 0.040014305114746095, 0.0399441909790039, 0.04000806427001953, 0.040008865356445315, 0.0399183349609375, 0.04077987289428711, 0.0399884147644043, 0.04009884643554688, 0.040218273162841794, 0.04060992050170899, 0.04018739318847656, 0.040165790557861326, 0.04100342559814453, 0.04075040054321289, 0.04018972778320312, 0.04004719924926758, 0.040043743133544925, 0.040395553588867185, 0.040426303863525394, 0.040291519165039064, 0.04032412719726562, 0.040283935546875, 0.04006326293945312, 0.04039276885986328, 0.04000486373901367, 0.04002470397949219, 0.04005411148071289, 0.039911006927490236, 0.039990177154541014, 0.039855072021484375, 0.0398427848815918, 0.04005440139770508, 0.03983305740356445, 0.04016950225830078, 0.03983244705200195, 0.03978035354614258, 0.03969023895263672, 0.04031488037109375, 0.04041318511962891, 0.03986636734008789, 0.03981926345825195, 0.03971072006225586, 0.03974467086791992, 0.03940003204345703, 0.03980108642578125, 0.03937279891967774, 0.03958784103393555, 0.03967715072631836, 0.039887649536132816, 0.0396649284362793, 0.0397782096862793, 0.0396192626953125, 0.03967382431030273, 0.0402966079711914, 0.04087804794311523, 0.04016096115112305, 0.04009971237182617, 0.03995027160644531, 0.040003231048583984, 0.03988159942626953, 0.040030174255371094, 0.04004665756225586, 0.04047225570678711, 0.04049859237670898, 0.03996761703491211, 0.0401162223815918, 0.04119756698608398, 0.040359935760498046, 0.04017283248901367, 0.040182689666748046, 0.04009471893310547, 0.039955520629882814, 0.03996236801147461, 0.040130561828613284, 0.04047257614135742, 0.04023839950561523, 0.04016169738769531, 0.040397087097167966, 0.04057292938232422, 0.040803966522216795, 0.040744991302490235, 0.04040457534790039, 0.04089708709716797, 0.04052284622192383, 0.040589824676513675, 0.04045651245117188, 0.040441856384277344, 0.04026572799682617, 0.04022249603271484, 0.04025980758666992, 0.040703998565673825, 0.040468097686767575, 0.041576831817626954, 0.04365856170654297, 0.04060675048828125, 0.04041449737548828, 0.04034950256347656, 0.04027654266357422, 0.04049100875854492, 0.04035939025878906, 0.04032969665527344, 0.040492225646972656, 0.040455039978027345, 0.04020547103881836, 0.040147743225097655, 0.040198207855224606, 0.040369663238525394, 0.0408724479675293, 0.04069171142578125, 0.04031078338623047, 0.04021488189697266, 0.04024889755249023, 0.04030064010620117, 0.0402529296875, 0.04017407989501953, 0.040229217529296875, 0.04016479873657226, 0.04037817764282227, 0.040148704528808594, 0.04005062484741211, 0.03991865539550781, 0.03991622543334961, 0.04011430358886719, 0.04288761520385742, 0.04100755310058594, 0.04028416061401367, 0.04034966278076172, 0.04004048156738281, 0.040032257080078126, 0.040204288482666016, 0.040079360961914064, 0.04029439926147461, 0.04048076629638672, 0.04015727996826172, 0.040202144622802735, 0.03990118408203125, 0.03994195175170898, 0.03987065505981445, 0.03990323257446289, 0.039725055694580076, 0.039816608428955076, 0.03968793487548828, 0.03992793655395508, 0.03977443313598633, 0.04034550476074219, 0.04043724822998047, 0.040497631072998044, 0.04020336151123047, 0.04024617767333984, 0.03993209457397461, 0.040019775390625, 0.040048446655273434, 0.040923328399658204, 0.040289726257324215, 0.040370750427246097, 0.04038860702514648, 0.04043775939941406, 0.04036198425292969, 0.04017737579345703, 0.04044188690185547, 0.040184062957763673, 0.0400200309753418, 0.04048275375366211, 0.040103935241699216, 0.03990729522705078, 0.040113662719726564, 0.04014339065551758, 0.04016569519042969, 0.04018355178833008, 0.04019705581665039, 0.03988787078857422, 0.04006313705444336, 0.040529727935791016, 0.040091552734375, 0.03992793655395508, 0.03983747100830078, 0.039815391540527344, 0.04023868942260742, 0.040165088653564454, 0.03982815933227539, 0.0396572151184082, 0.039606208801269534, 0.040427841186523435, 0.039747486114501955, 0.039761024475097655, 0.0397957763671875, 0.040316257476806644, 0.04066566467285156, 0.04002790451049805, 0.04000179290771484, 0.03982131195068359, 0.0400261116027832, 0.03988681411743164, 0.040056575775146486, 0.03956140899658203, 0.03996681594848633, 0.039782398223876955, 0.0396954231262207, 0.040172576904296875, 0.03963619232177734, 0.039785152435302736, 0.0397946891784668, 0.03971286392211914, 0.040251296997070314, 0.04003622436523437, 0.03994611358642578, 0.04308566284179687, 0.04035583877563476, 0.040497150421142575, 0.04030905532836914, 0.04027155303955078, 0.04031283187866211, 0.04047872161865235, 0.040042560577392576, 0.04014313507080078, 0.040275615692138673, 0.04019401550292969, 0.04003843307495117, 0.03995852661132813, 0.040064990997314455, 0.03989097595214844, 0.039741439819335936, 0.040137054443359375, 0.040144542694091796, 0.04023036956787109, 0.03999593734741211, 0.039755775451660154, 0.039567359924316405, 0.03971686553955078, 0.03962623977661133, 0.03957196807861328, 0.039657470703125, 0.03998454284667969, 0.03948604965209961, 0.03972035217285156, 0.039596481323242186, 0.03953788757324219, 0.04028684616088867, 0.04003667068481445, 0.039766014099121096, 0.04127510452270508, 0.041195232391357424, 0.03989728164672852, 0.039954814910888675, 0.03978035354614258, 0.03993100738525391, 0.03981606292724609, 0.0397344970703125, 0.04013545608520508, 0.04303462219238281, 0.04078182220458984, 0.04015014266967774, 0.03998400115966797, 0.03985612869262695, 0.039608318328857424, 0.03949772644042969, 0.0397006721496582, 0.03999497604370117, 0.039698654174804685, 0.03972915267944336, 0.039626750946044925, 0.03966124725341797, 0.039717182159423825, 0.03954687881469727, 0.03960176086425781, 0.03971932983398437, 0.039669761657714846, 0.039550880432128906, 0.03965500640869141, 0.03973689651489258, 0.03995647811889649, 0.03995369720458984, 0.03982163238525391, 0.039849952697753904, 0.03974364852905273, 0.039979137420654294, 0.03970883178710938, 0.040082817077636716, 0.039766048431396486, 0.039989376068115236, 0.039965152740478516, 0.04023295974731445, 0.040356960296630856, 0.03999862289428711, 0.040003326416015624, 0.03996672058105469, 0.040062976837158204, 0.03999948883056641, 0.03959513473510742, 0.039647552490234376, 0.039489566802978514, 0.03994169616699219, 0.039803550720214846, 0.04031110382080078, 0.0409804801940918, 0.039766368865966795, 0.039937694549560546, 0.03987865447998047, 0.03985715103149414, 0.039847198486328124, 0.03975958251953125, 0.039981056213378906, 0.040390369415283206, 0.04012265777587891, 0.03993088150024414, 0.03986288070678711, 0.039976577758789066, 0.040174369812011716, 0.040153438568115235, 0.03994179153442383, 0.03995647811889649, 0.03968239974975586, 0.03974115371704102, 0.03963488006591797, 0.04005795288085937, 0.04025027084350586, 0.04042956924438477, 0.04038835144042969, 0.04054451370239258, 0.040288257598876956, 0.0400715217590332, 0.04005567932128906, 0.040008480072021485, 0.040013824462890625, 0.04025881576538086, 0.03994291305541992, 0.041371200561523436, 0.040996353149414064, 0.040137664794921875, 0.0400076789855957, 0.039894847869873046, 0.0401446418762207, 0.04009209442138672, 0.0405794563293457, 0.04046195220947266, 0.04068163299560547, 0.04077923202514649, 0.040434272766113284, 0.0408037109375, 0.040538238525390624, 0.040430335998535155, 0.040312000274658207, 0.04054508972167969, 0.04042342376708984, 0.04055478286743164, 0.04027568054199219, 0.04012851333618164, 0.03992486572265625, 0.039697280883789064, 0.04000966262817383, 0.039946529388427736, 0.040331295013427734, 0.04201836776733398, 0.040691902160644534, 0.040648193359375, 0.0408089599609375, 0.04090044784545899, 0.0412808952331543, 0.040462177276611326, 0.04036908721923828, 0.04008678436279297, 0.040471294403076175, 0.039946239471435545, 0.04016089630126953, 0.04012275314331055, 0.039904926300048826, 0.04011199951171875, 0.03999164962768555, 0.03986240005493164, 0.03973324966430664, 0.04003443145751953, 0.03971996688842774, 0.03995734405517578, 0.039846912384033206, 0.039954849243164066, 0.03975228881835938, 0.03996979141235352, 0.04028684616088867, 0.04028044891357422, 0.04024873733520508, 0.040441726684570314, 0.04018454360961914, 0.040172607421875, 0.040046558380126954, 0.04029334259033203, 0.04012192153930664, 0.04015315246582031, 0.040118656158447265, 0.04007731246948242, 0.04054963302612305, 0.04056553649902344, 0.04060780715942383, 0.040507297515869144, 0.04007321548461914, 0.04072390365600586, 0.040008480072021485, 0.04042515182495117, 0.04037235260009766, 0.03999996948242188, 0.0399441909790039, 0.04008755111694336, 0.039892383575439457, 0.039957088470458986, 0.040222431182861326, 0.039846176147460936, 0.040078750610351564, 0.03992399978637695, 0.039805534362792966, 0.03993571090698242, 0.03998617553710938, 0.040007774353027346, 0.04019497680664062, 0.04022886276245117, 0.03996672058105469, 0.039833343505859375, 0.03996448135375977, 0.040026912689208986, 0.04009958267211914, 0.03998060989379883, 0.039991870880126956, 0.03999065780639648, 0.040256992340087894, 0.03994630432128906, 0.040166271209716796, 0.04006006240844726, 0.04006095886230469, 0.03996080017089844, 0.03994275283813477, 0.03991551971435547, 0.040030208587646485, 0.0399788818359375, 0.04010591888427734, 0.03990156936645508, 0.039952190399169925, 0.0401099853515625, 0.04046448135375977, 0.04062019348144531, 0.04052361679077148, 0.04024079895019531, 0.0403111686706543, 0.04008310317993164, 0.04020217514038086, 0.040462177276611326, 0.040543968200683594, 0.04028908920288086, 0.04024694442749024, 0.04027033615112305, 0.040221599578857424, 0.0401673583984375, 0.04017449569702149, 0.040136798858642575, 0.04011849594116211, 0.040088512420654296, 0.04003926467895508, 0.039903358459472654, 0.039964542388916016, 0.04019302368164063, 0.03978672027587891, 0.03969731140136719]",tokens/s,24.867564646396882,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1045.434368,904.855552,0.0,509.607936,491.434496,s,1,7.69194482421875,7.69194482421875,0.0,7.69194482421875,7.69194482421875,7.69194482421875,7.69194482421875,[7.69194482421875],,kWh,2.3835842712499773e-05,2.621883335264642e-06,7.240561347994501e-06,3.369828739575891e-05,,MB,1403.293696,1039.07328,0.0,629.1456,592.24832,s,10,0.26188947296142584,0.02618894729614258,0.00021841016604451076,0.026089839935302735,0.026433218955993654,0.026552321910858153,0.026647604274749756,"[0.02629078483581543, 0.025977535247802733, 0.02640675163269043, 0.026321664810180664, 0.026105951309204102, 0.02605801582336426, 0.026671424865722656, 0.02605923271179199, 0.02592438316345215, 0.026073728561401367]",tokens/s,9775.11608638453,kWh,7.529128719823882e-07,8.303299081971835e-08,4.7044925952060254e-07,1.3063951223227092e-06,tokens/kWh,195959090.4969425,MB,1442.83648,1053.753344,0.0,643.825664,605.085696,s,10,13.617179565429685,1.3617179565429687,0.005481366197635939,1.3625023803710938,1.3678305053710937,1.368237274169922,1.3685626892089844,"[1.3677401123046875, 1.3626983642578125, 1.357629150390625, 1.366910400390625, 1.3640528564453125, 1.36864404296875, 1.362306396484375, 1.3496138916015625, 1.3598287353515626, 1.357755615234375]",tokens/s,46.2650871990701,kWh,3.91161338250991e-05,4.314099562961237e-06,1.4551146091079945e-05,5.7981379479140276e-05,tokens/kWh,1086555.728165544,,s,630,13.611552797317499,0.021605639360821435,0.0004668648119582739,0.02151136016845703,0.021915046691894532,0.022159848117828368,0.022940940837860118,"[0.02109094429016113, 0.021511455535888672, 0.02187932777404785, 0.022566911697387695, 0.0217674560546875, 0.021486303329467774, 0.021960832595825194, 0.021898944854736327, 0.021585407257080077, 0.021679103851318358, 0.02184364891052246, 0.02174799919128418, 0.021874656677246095, 0.022176736831665038, 0.021777183532714843, 0.021987680435180665, 0.021745439529418945, 0.021680000305175782, 0.02151136016845703, 0.021904191970825194, 0.02185420799255371, 0.021848031997680664, 0.0217620792388916, 0.02188047981262207, 0.02188969612121582, 0.021946016311645507, 0.02163715171813965, 0.021624448776245118, 0.021813631057739258, 0.021688671112060548, 0.021618495941162108, 0.021604192733764647, 0.021544031143188477, 0.021697439193725587, 0.02162073516845703, 0.021616128921508788, 0.021688575744628905, 0.0214182071685791, 0.02182899284362793, 0.0216231689453125, 0.02154323196411133, 0.021442655563354493, 0.021518207550048827, 0.021741151809692383, 0.021689983367919923, 0.021712703704833983, 0.021603296279907227, 0.021489664077758788, 0.021600255966186522, 0.021538047790527343, 0.021701375961303712, 0.02147942352294922, 0.021548671722412108, 0.02181711959838867, 0.022073951721191407, 0.021766143798828123, 0.021547008514404296, 0.021512447357177736, 0.021800703048706054, 0.021635072708129883, 0.021575679779052736, 0.021432319641113282, 0.02155142402648926, 0.020985824584960937, 0.02126608085632324, 0.021326112747192382, 0.021349439620971678, 0.021433343887329103, 0.02157702445983887, 0.021381376266479492, 0.021616512298583985, 0.02152422332763672, 0.021541696548461914, 0.0215285758972168, 0.021389312744140625, 0.021386560440063478, 0.021330623626708983, 0.02127667236328125, 0.022237184524536133, 0.021288543701171874, 0.02141222381591797, 0.021700607299804688, 0.021612255096435547, 0.022180320739746094, 0.021484800338745117, 0.02154092788696289, 0.021323808670043944, 0.021388927459716798, 0.021560192108154297, 0.02128895950317383, 0.021489152908325194, 0.021328384399414063, 0.021407487869262696, 0.021448768615722657, 0.02133625602722168, 0.021345983505249022, 0.021768512725830077, 0.02263417625427246, 0.022300800323486327, 0.02219241523742676, 0.0216878719329834, 0.021429824829101562, 0.021787424087524414, 0.022111423492431642, 0.022330463409423826, 0.022029119491577147, 0.02198579216003418, 0.021839839935302734, 0.023038143157958983, 0.0217193603515625, 0.021532608032226563, 0.02146928024291992, 0.021499807357788087, 0.021374975204467773, 0.021377023696899415, 0.02136000061035156, 0.021451391220092774, 0.021331968307495116, 0.021779552459716797, 0.02144486427307129, 0.021623296737670897, 0.021735328674316406, 0.02174527931213379, 0.02175823974609375, 0.021510496139526367, 0.021932191848754883, 0.021527584075927735, 0.02155513572692871, 0.021747167587280274, 0.02151888084411621, 0.021303295135498047, 0.021259904861450196, 0.02128927993774414, 0.02174777603149414, 0.021440511703491212, 0.02123334312438965, 0.02123119926452637, 0.021523168563842773, 0.021503456115722658, 0.021375520706176758, 0.02135980796813965, 0.0212607364654541, 0.02154944038391113, 0.02187001609802246, 0.02208620834350586, 0.02146099281311035, 0.021403936386108397, 0.02144428825378418, 0.02265091133117676, 0.021829183578491212, 0.021457088470458983, 0.021323904037475586, 0.021491615295410157, 0.021366432189941408, 0.021319936752319336, 0.021295360565185547, 0.021440576553344727, 0.021329376220703126, 0.021381664276123046, 0.021313888549804687, 0.02134009552001953, 0.02156924819946289, 0.021525856018066405, 0.02162719917297363, 0.021766496658325196, 0.02160428810119629, 0.02161004829406738, 0.02142380714416504, 0.021419967651367187, 0.021224031448364256, 0.021185951232910158, 0.021390335083007812, 0.021337984085083007, 0.021323263168334963, 0.02123766326904297, 0.021480031967163086, 0.02155023956298828, 0.021434240341186524, 0.02133705520629883, 0.021336416244506835, 0.021517984390258788, 0.02123366355895996, 0.021260128021240235, 0.02216352081298828, 0.025294944763183592, 0.021600255966186522, 0.021395456314086913, 0.021624895095825197, 0.02137081527709961, 0.022188543319702148, 0.021338016510009765, 0.02125823974609375, 0.021458208084106447, 0.021457279205322265, 0.02192624092102051, 0.02145280075073242, 0.02153424072265625, 0.021799455642700194, 0.02199679946899414, 0.021572288513183595, 0.021477535247802736, 0.021671520233154298, 0.02162291145324707, 0.021366527557373047, 0.021336448669433593, 0.021249759674072267, 0.021475616455078124, 0.02164156723022461, 0.022205408096313477, 0.02240905570983887, 0.022692832946777344, 0.02202406311035156, 0.022015071868896483, 0.021758880615234375, 0.02188489532470703, 0.02186591911315918, 0.02178518486022949, 0.02182143974304199, 0.02163408088684082, 0.02158896064758301, 0.02170675277709961, 0.021880544662475587, 0.021741216659545898, 0.021858848571777344, 0.021803104400634765, 0.021778432846069336, 0.022228992462158204, 0.02138729667663574, 0.02146031951904297, 0.021534591674804687, 0.02148374366760254, 0.021456703186035157, 0.021392032623291014, 0.021530176162719728, 0.02173833656311035, 0.022009504318237304, 0.021395456314086913, 0.021782527923583983, 0.02181907272338867, 0.022567232131958007, 0.021523775100708006, 0.021570047378540038, 0.021502080917358397, 0.021477439880371093, 0.021387039184570314, 0.021504287719726563, 0.021483808517456054, 0.021626527786254884, 0.02170675277709961, 0.021712896347045898, 0.02146099281311035, 0.02137411117553711, 0.020966592788696288, 0.021473056793212892, 0.021693504333496094, 0.021763008117675783, 0.021618688583374023, 0.021497856140136717, 0.02141596794128418, 0.021468608856201173, 0.021418527603149416, 0.021409791946411134, 0.02141798400878906, 0.02184601593017578, 0.021485567092895508, 0.0216342716217041, 0.02144745635986328, 0.021524032592773436, 0.021524768829345703, 0.021536191940307616, 0.021472000122070314, 0.021608415603637694, 0.021405696868896484, 0.022202367782592772, 0.021803007125854493, 0.02161664009094238, 0.021421087265014647, 0.021724128723144533, 0.02150761604309082, 0.02141596794128418, 0.02163961601257324, 0.022095903396606446, 0.022155359268188478, 0.021998495101928712, 0.021801631927490236, 0.02456403160095215, 0.021854015350341798, 0.02177401542663574, 0.021496320724487306, 0.021608575820922852, 0.02160201644897461, 0.021555360794067384, 0.02164121627807617, 0.021782815933227537, 0.0217903995513916, 0.02159187126159668, 0.02166192054748535, 0.021525856018066405, 0.021429088592529295, 0.0215567684173584, 0.021586208343505858, 0.021822656631469727, 0.02158675193786621, 0.021660736083984375, 0.02158687973022461, 0.021536415100097656, 0.021534400939941405, 0.021486143112182617, 0.021398784637451172, 0.021528543472290038, 0.02140403175354004, 0.021469696044921875, 0.021526527404785157, 0.021428319931030275, 0.021492671966552735, 0.02096588706970215, 0.021331104278564453, 0.021471136093139647, 0.021302207946777344, 0.021519807815551757, 0.021359167098999023, 0.021407743453979493, 0.02155036735534668, 0.021412832260131836, 0.021433343887329103, 0.022036800384521483, 0.02150592041015625, 0.021352863311767577, 0.021425439834594728, 0.021913984298706054, 0.0216048641204834, 0.021774335861206053, 0.02151628875732422, 0.02147260856628418, 0.02147599983215332, 0.021385215759277345, 0.021479040145874023, 0.021416736602783204, 0.02134556770324707, 0.021399839401245117, 0.021452064514160155, 0.02131190490722656, 0.021491743087768553, 0.02146441650390625, 0.021473695755004883, 0.021414112091064454, 0.023469791412353516, 0.02245427131652832, 0.021838464736938477, 0.021755136489868165, 0.021746431350708008, 0.0216407356262207, 0.0214486083984375, 0.02144723129272461, 0.021387264251708983, 0.021421087265014647, 0.021735647201538085, 0.021481695175170897, 0.02259008026123047, 0.021381216049194338, 0.02156540870666504, 0.02151753616333008, 0.021469535827636718, 0.02147689628601074, 0.021435136795043944, 0.021344255447387696, 0.021480768203735352, 0.021452768325805664, 0.021368831634521485, 0.021477567672729493, 0.02171139144897461, 0.021761184692382814, 0.021977951049804687, 0.02142144012451172, 0.021419967651367187, 0.021502656936645506, 0.023023616790771483, 0.029345792770385744, 0.021269792556762694, 0.02227689552307129, 0.02152851104736328, 0.0215283203125, 0.021397823333740233, 0.021491647720336914, 0.021734527587890625, 0.021383167266845703, 0.02152332878112793, 0.021564735412597656, 0.021476255416870118, 0.02152943992614746, 0.021474239349365234, 0.02162483215332031, 0.021398591995239257, 0.021625247955322266, 0.02150864028930664, 0.021712448120117186, 0.021436384201049805, 0.021485759735107423, 0.022276384353637695, 0.021671072006225586, 0.02160931205749512, 0.02164249610900879, 0.02187321662902832, 0.02203468894958496, 0.022062719345092772, 0.02227436828613281, 0.02186240005493164, 0.021874975204467774, 0.021816255569458008, 0.02164201545715332, 0.021536512374877928, 0.021506303787231444, 0.021553152084350585, 0.021551103591918946, 0.021411487579345703, 0.021467168807983397, 0.022067007064819337, 0.021486080169677735, 0.02142633628845215, 0.02143631935119629, 0.02188038444519043, 0.021551488876342773, 0.021737567901611327, 0.0217475528717041, 0.021766208648681642, 0.021569536209106444, 0.021609600067138673, 0.021697311401367186, 0.02143459129333496, 0.02141539192199707, 0.021475744247436524, 0.021401599884033205, 0.02137660789489746, 0.021477792739868166, 0.021358591079711914, 0.021452768325805664, 0.021749792098999025, 0.02161187171936035, 0.021571807861328125, 0.021380704879760744, 0.02143052864074707, 0.02119308853149414, 0.021497568130493163, 0.021547615051269533, 0.021692256927490234, 0.021364736557006835, 0.021339231491088868, 0.02140447998046875, 0.02125212860107422, 0.021540607452392578, 0.021356512069702148, 0.02129539108276367, 0.021259647369384765, 0.021309951782226562, 0.021403039932250977, 0.021192575454711916, 0.021405920028686524, 0.02168288040161133, 0.02154876708984375, 0.021350208282470702, 0.021311967849731446, 0.02156787109375, 0.021558528900146486, 0.021324159622192383, 0.02123401641845703, 0.021315263748168944, 0.02164102363586426, 0.02151238441467285, 0.021354623794555664, 0.02125129508972168, 0.021375616073608397, 0.021366783142089844, 0.021370431900024415, 0.021451200485229492, 0.021381120681762695, 0.021511199951171876, 0.021396448135375976, 0.021372928619384765, 0.021577119827270508, 0.021306047439575194, 0.02140764808654785, 0.02145484733581543, 0.021377023696899415, 0.021303295135498047, 0.021476831436157227, 0.0213591365814209, 0.021313407897949218, 0.021336320877075196, 0.021405567169189454, 0.02143846321105957, 0.02182963180541992, 0.021407743453979493, 0.02141119956970215, 0.021633888244628908, 0.021524255752563476, 0.021556480407714844, 0.021357280731201172, 0.02149331283569336, 0.021453279495239258, 0.021405696868896484, 0.021395456314086913, 0.021348352432250976, 0.021321311950683593, 0.021217376708984374, 0.021008384704589843, 0.02129292869567871, 0.021362815856933594, 0.02204876708984375, 0.02130534362792969, 0.02130086326599121, 0.021205568313598634, 0.021640960693359374, 0.02139891242980957, 0.021490367889404297, 0.021397504806518555, 0.021362911224365233, 0.021403423309326174, 0.0215285758972168, 0.02143846321105957, 0.021307392120361326, 0.021305280685424803, 0.021235488891601564, 0.02136911964416504, 0.021292032241821288, 0.021189407348632814, 0.02123347282409668, 0.021293472290039063, 0.021217279434204102, 0.021241695404052734, 0.021290912628173828, 0.021378719329833984, 0.021525312423706054, 0.022738527297973633, 0.021458976745605467, 0.021521631240844726, 0.021668384552001953, 0.021907392501831054, 0.021690847396850586, 0.021643264770507813, 0.022140928268432617, 0.021753856658935547, 0.02146428871154785, 0.02166659164428711, 0.021385215759277345, 0.021354496002197267, 0.021493759155273438, 0.02127872085571289, 0.021506271362304687, 0.021346080780029298, 0.021397184371948243, 0.02174345588684082, 0.023611871719360352, 0.021719039916992186, 0.022004032135009767, 0.022113983154296874, 0.021517568588256836, 0.02158361625671387, 0.02192460823059082, 0.02221900749206543, 0.021786687850952148, 0.021787967681884766, 0.021807584762573242, 0.021655712127685547, 0.021634559631347656, 0.021481248855590822, 0.02175868797302246, 0.02145305633544922, 0.021368640899658203, 0.021412448883056642, 0.02136662483215332, 0.02136675262451172, 0.021499359130859375, 0.02150662422180176, 0.02146099281311035, 0.021450815200805665, 0.021956544876098633, 0.021968896865844727, 0.021843967437744142, 0.0215482234954834, 0.02165433692932129, 0.02182143974304199, 0.021717056274414063, 0.021639104843139648, 0.02167558479309082, 0.021750207901000976, 0.022726655960083008, 0.02177222442626953, 0.021966911315917968, 0.02162067222595215, 0.02134841537475586, 0.021495967864990233, 0.021454975128173827, 0.021384607315063475, 0.021754175186157226, 0.021356544494628905, 0.02142617607116699, 0.021409791946411134, 0.02151136016845703, 0.021662784576416017, 0.021845760345458983, 0.021344512939453126, 0.02179971122741699, 0.02166268730163574, 0.02169980812072754, 0.021326400756835937, 0.021420255661010742, 0.02134940719604492, 0.02133705520629883, 0.021298816680908203, 0.021457279205322265, 0.021370880126953123, 0.02148761558532715, 0.021447744369506836, 0.021539775848388672, 0.02147327995300293, 0.021358591079711914, 0.02124777603149414, 0.021307615280151366, 0.021327871322631836, 0.021289087295532225, 0.021237312316894533, 0.02146124839782715, 0.02134022331237793, 0.021401311874389647, 0.02194870376586914, 0.021448703765869142, 0.021469247817993163, 0.02160367965698242, 0.021527135848999023, 0.021493759155273438]",tokens/s,46.284212343808214,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4913.119232,5995.626496,0.0,5593.104384,5582.857216,s,1,11.47325390625,11.47325390625,0.0,11.47325390625,11.47325390625,11.47325390625,11.47325390625,[11.47325390625],,kWh,0.00011755960349584407,1.2960246374984289e-05,3.553197286999821e-05,0.00016605182274082657,,MB,1562.48064,6324.87936,0.0,5907.677184,5844.559872,s,10,2.019981018066406,0.20199810180664063,0.0003855504032527179,0.2019721450805664,0.20252928771972656,0.2026807891845703,0.2028019903564453,"[0.20204623413085937, 0.2017305908203125, 0.2015630340576172, 0.2019661407470703, 0.2019781494140625, 0.20283229064941405, 0.20159686279296876, 0.20210099792480468, 0.20249562072753907, 0.20167109680175782]",tokens/s,1267.3386418504656,kWh,5.911116723167197e-06,6.518912872977685e-07,3.936292037920175e-06,1.049930004838514e-05,tokens/kWh,24382577.77377973,MB,1570.7136,6324.87936,0.0,5907.677184,5844.562432,s,10,14.89311328125,1.4893113281250003,0.0027831900490972463,1.4886818237304689,1.4936528686523438,1.494207843017578,1.4946518225097656,"[1.4863668212890624, 1.4868624267578125, 1.4947628173828125, 1.48802294921875, 1.4907061767578125, 1.4872960205078125, 1.4896527099609376, 1.4865731201171875, 1.4893406982421875, 1.493529541015625]",tokens/s,42.30143074202972,kWh,4.347846238225187e-05,4.795396007565931e-06,2.8716400750879446e-05,7.699025914069724e-05,tokens/kWh,818285.3350950477,,s,630,14.89045707130433,0.023635646144927497,0.0001889448159364638,0.023624704360961916,0.023861656951904297,0.023941408443450926,0.02410854320526123,"[0.02360019111633301, 0.023313343048095705, 0.023403615951538087, 0.023172128677368165, 0.023213951110839844, 0.02340220832824707, 0.023537376403808593, 0.02366249656677246, 0.023577247619628906, 0.023399839401245116, 0.023212640762329102, 0.02330419158935547, 0.023470207214355467, 0.023437183380126955, 0.023481760025024414, 0.023508703231811524, 0.023382656097412108, 0.02344576072692871, 0.023635967254638672, 0.023643999099731444, 0.023584928512573242, 0.023627872467041015, 0.023506847381591797, 0.02352332878112793, 0.02373788833618164, 0.023730655670166016, 0.02353971290588379, 0.023431167602539063, 0.02333695983886719, 0.02328166389465332, 0.023473407745361326, 0.023642719268798826, 0.02379520034790039, 0.023833248138427736, 0.023554048538208007, 0.023443456649780273, 0.023747840881347657, 0.024095487594604493, 0.023625600814819337, 0.02351852798461914, 0.023407424926757812, 0.023475839614868165, 0.023800031661987305, 0.02380611228942871, 0.02386147117614746, 0.023856576919555665, 0.023721599578857423, 0.023631935119628907, 0.024013248443603516, 0.023720287322998048, 0.023556192398071288, 0.02345919990539551, 0.023355424880981444, 0.023673215866088868, 0.023803520202636718, 0.023804288864135742, 0.023771135330200196, 0.02376006317138672, 0.02379244804382324, 0.023848960876464844, 0.023748287200927733, 0.023734592437744142, 0.02369740867614746, 0.023597984313964843, 0.02326655960083008, 0.023408863067626955, 0.02353001594543457, 0.023623712539672853, 0.02346937561035156, 0.023420991897583007, 0.023335519790649413, 0.023435264587402343, 0.023373023986816406, 0.023568607330322264, 0.023640640258789064, 0.023534751892089843, 0.023407455444335937, 0.023576576232910155, 0.02366873550415039, 0.023778751373291017, 0.023627744674682618, 0.023528032302856446, 0.023590911865234376, 0.023533344268798828, 0.023586912155151366, 0.023407936096191406, 0.023497535705566407, 0.023408639907836915, 0.023574047088623047, 0.023470752716064452, 0.023682880401611327, 0.023827455520629884, 0.023774208068847655, 0.02352332878112793, 0.02357811164855957, 0.023828512191772462, 0.023793983459472656, 0.023666175842285156, 0.023585439682006836, 0.023382272720336914, 0.023448352813720704, 0.023632991790771486, 0.023648128509521485, 0.02360927963256836, 0.023668863296508788, 0.02342086410522461, 0.02351103973388672, 0.0236810245513916, 0.023633920669555664, 0.023676895141601564, 0.0235947208404541, 0.023449920654296876, 0.02347113609313965, 0.02366281509399414, 0.02367692756652832, 0.023650751113891602, 0.02358940887451172, 0.023436992645263673, 0.02379132843017578, 0.02392019271850586, 0.023984960556030274, 0.02391766357421875, 0.023591840744018554, 0.02366054344177246, 0.023949312210083007, 0.023799072265625, 0.023705631256103515, 0.023591680526733397, 0.02364022445678711, 0.02374995231628418, 0.023755456924438475, 0.02380905532836914, 0.023835615158081056, 0.023729663848876953, 0.023509504318237305, 0.023721664428710938, 0.023781440734863282, 0.023767200469970703, 0.02370947265625, 0.02356870460510254, 0.023380096435546876, 0.0235231990814209, 0.02378246307373047, 0.02376799964904785, 0.0236810245513916, 0.023435264587402343, 0.023463712692260743, 0.023386335372924803, 0.023557952880859375, 0.02349484825134277, 0.02343926429748535, 0.02351091194152832, 0.023699840545654296, 0.02369094467163086, 0.0237938232421875, 0.023948448181152344, 0.024212255477905273, 0.023611455917358398, 0.02360220718383789, 0.0238067512512207, 0.023883968353271483, 0.02387513542175293, 0.023820735931396483, 0.023564287185668945, 0.0235948486328125, 0.023737920761108398, 0.023765087127685547, 0.023736543655395508, 0.02359939193725586, 0.023513088226318358, 0.02356425666809082, 0.023909439086914064, 0.023972831726074218, 0.02378153610229492, 0.02362303924560547, 0.02354979133605957, 0.023880319595336916, 0.024153375625610353, 0.023853599548339845, 0.02372969627380371, 0.023458463668823242, 0.0235020809173584, 0.023849727630615235, 0.024155296325683594, 0.024111967086791992, 0.023946943283081053, 0.023822528839111328, 0.02393267250061035, 0.023902496337890624, 0.02365190315246582, 0.02356268882751465, 0.023549184799194336, 0.02374924850463867, 0.023947391510009765, 0.023767040252685546, 0.023618623733520507, 0.023572864532470702, 0.0235850887298584, 0.023527679443359376, 0.023572608947753905, 0.02350067138671875, 0.023517183303833008, 0.023413951873779298, 0.02326406478881836, 0.02333443260192871, 0.02343075180053711, 0.02338899230957031, 0.023396352767944335, 0.023369184494018556, 0.023411296844482423, 0.023393695831298827, 0.023556608200073242, 0.02352547264099121, 0.023529472351074218, 0.023590911865234376, 0.02365235137939453, 0.02368060874938965, 0.023614879608154296, 0.024060640335083008, 0.023832128524780272, 0.02360188865661621, 0.023377599716186522, 0.023427200317382813, 0.02357267189025879, 0.023723583221435546, 0.023615840911865235, 0.023431264877319335, 0.023331071853637694, 0.02344633674621582, 0.023705951690673827, 0.02355583953857422, 0.023533632278442383, 0.02347667121887207, 0.023420608520507813, 0.0235567684173584, 0.023879680633544922, 0.023801855087280274, 0.023748832702636717, 0.02366841506958008, 0.02369340705871582, 0.023825855255126954, 0.024011327743530275, 0.02400444793701172, 0.023808160781860353, 0.023807487487792968, 0.023699968338012696, 0.02385305595397949, 0.024031007766723633, 0.023799776077270508, 0.02364031982421875, 0.0235797119140625, 0.023540576934814452, 0.023525503158569334, 0.023291231155395508, 0.023364383697509764, 0.023504287719726562, 0.023420480728149413, 0.023325471878051757, 0.023333919525146483, 0.02336832046508789, 0.023523168563842775, 0.0234005126953125, 0.023383968353271483, 0.02334979248046875, 0.023741695404052736, 0.023921215057373046, 0.02388528060913086, 0.023886112213134764, 0.02352707290649414, 0.023374624252319336, 0.023629823684692384, 0.023764991760253908, 0.023700544357299805, 0.02372822380065918, 0.023640735626220703, 0.02377676773071289, 0.023851167678833007, 0.023912031173706053, 0.02375315284729004, 0.023577247619628906, 0.02333171272277832, 0.023619903564453124, 0.023763776779174805, 0.02378940773010254, 0.023759071350097655, 0.02373200035095215, 0.023545087814331053, 0.023831296920776367, 0.023838272094726564, 0.02373881530761719, 0.023549663543701173, 0.023501216888427736, 0.02346544075012207, 0.023715551376342774, 0.02382099151611328, 0.024028480529785155, 0.023799903869628908, 0.023644767761230468, 0.023430624008178712, 0.023568992614746095, 0.023633440017700194, 0.023631296157836913, 0.023704320907592773, 0.023599456787109375, 0.023746143341064452, 0.023889471054077148, 0.02386921691894531, 0.02393600082397461, 0.023805152893066405, 0.02361622428894043, 0.023734111785888672, 0.023912607192993166, 0.02381772804260254, 0.023871999740600586, 0.02372559928894043, 0.02374838447570801, 0.023527040481567382, 0.023513824462890624, 0.02364009666442871, 0.02354774475097656, 0.023307872772216798, 0.023384479522705077, 0.023434848785400392, 0.0234903678894043, 0.023454303741455077, 0.023461856842041016, 0.02330403137207031, 0.02335670471191406, 0.023445951461791993, 0.023406463623046873, 0.023347808837890626, 0.023243871688842774, 0.023200576782226562, 0.023230560302734377, 0.023508640289306642, 0.023589216232299804, 0.02353561592102051, 0.023640064239501952, 0.023571744918823242, 0.023380704879760742, 0.023607007980346678, 0.023636255264282226, 0.02370969581604004, 0.02357072067260742, 0.02338323211669922, 0.02320368003845215, 0.023611679077148437, 0.023730592727661134, 0.023853023529052733, 0.02359823989868164, 0.023542591094970703, 0.023355552673339844, 0.023539199829101562, 0.02371625518798828, 0.02376870346069336, 0.02385081672668457, 0.02376252746582031, 0.02354470443725586, 0.023775232315063476, 0.02399216079711914, 0.02394463920593262, 0.023928991317749025, 0.0237861442565918, 0.023590080261230467, 0.023822944641113283, 0.023939296722412108, 0.023917760848999024, 0.023839487075805663, 0.023529407501220703, 0.023740447998046876, 0.023787328720092774, 0.02378371238708496, 0.02388787269592285, 0.02368716812133789, 0.023576480865478516, 0.0236661434173584, 0.02379840087890625, 0.023796735763549806, 0.023577024459838867, 0.023066591262817383, 0.023328895568847655, 0.023549951553344727, 0.023762943267822266, 0.02370150375366211, 0.02357219123840332, 0.023464256286621094, 0.023385919570922852, 0.023656415939331054, 0.023639488220214843, 0.023627872467041015, 0.023617855072021486, 0.02345385551452637, 0.023548095703125, 0.023789567947387694, 0.023760896682739258, 0.023569536209106446, 0.023593568801879884, 0.023451936721801757, 0.023369279861450196, 0.02351763153076172, 0.023621631622314454, 0.02360633659362793, 0.023482431411743165, 0.023310720443725588, 0.023351808547973633, 0.023584320068359376, 0.023599327087402342, 0.023753023147583006, 0.02382022476196289, 0.02394313621520996, 0.023571456909179687, 0.02373843193054199, 0.02377209663391113, 0.02368921661376953, 0.023476224899291992, 0.023343103408813477, 0.023508256912231445, 0.023784160614013672, 0.02371788787841797, 0.023762144088745118, 0.023776031494140624, 0.023614719390869142, 0.023717920303344728, 0.023775039672851564, 0.023703775405883788, 0.0237063045501709, 0.023518976211547853, 0.023388416290283202, 0.023606784820556642, 0.02381875228881836, 0.02383452796936035, 0.023760896682739258, 0.02370159912109375, 0.02372198486328125, 0.024020992279052734, 0.02408176040649414, 0.024019615173339844, 0.023797183990478515, 0.023599679946899415, 0.02382636833190918, 0.0239484806060791, 0.023662879943847658, 0.023375551223754884, 0.023371807098388674, 0.023513088226318358, 0.023775232315063476, 0.023744287490844725, 0.023511264801025392, 0.023547391891479492, 0.023323135375976564, 0.023416831970214845, 0.023509151458740236, 0.02351638412475586, 0.023602943420410156, 0.02374950408935547, 0.023453535079956053, 0.023390335083007814, 0.023478303909301758, 0.023439136505126953, 0.02345974349975586, 0.023449663162231446, 0.023574655532836913, 0.02325721549987793, 0.023201791763305665, 0.02349056053161621, 0.02349180793762207, 0.023505311965942383, 0.02350102424621582, 0.023385696411132813, 0.023435808181762694, 0.023775264739990233, 0.023994367599487306, 0.02372140884399414, 0.023558624267578127, 0.023470399856567382, 0.0235314884185791, 0.023529279708862306, 0.02354764747619629, 0.02367862319946289, 0.023591520309448243, 0.023476224899291992, 0.02356150436401367, 0.02359107208251953, 0.02367964744567871, 0.023704479217529297, 0.02350796890258789, 0.023601055145263672, 0.023603263854980468, 0.02365222358703613, 0.023695520401000977, 0.0236810245513916, 0.02361452865600586, 0.023534400939941406, 0.023623807907104492, 0.02386534309387207, 0.02383407974243164, 0.023845504760742188, 0.02414806365966797, 0.023631584167480468, 0.0238056640625, 0.023840608596801757, 0.023787935256958007, 0.023789695739746094, 0.0236844482421875, 0.023730367660522462, 0.023545663833618165, 0.02346598434448242, 0.02350694465637207, 0.023573984146118165, 0.023338720321655272, 0.023563072204589842, 0.023924736022949217, 0.0237871036529541, 0.023583232879638674, 0.023483360290527344, 0.023374624252319336, 0.02338364791870117, 0.023484928131103516, 0.023486528396606445, 0.023516511917114256, 0.02343199920654297, 0.023322656631469728, 0.02340640068054199, 0.023572576522827147, 0.023562143325805664, 0.023639680862426758, 0.023564672470092772, 0.023646207809448243, 0.023932191848754884, 0.023656543731689454, 0.023734592437744142, 0.023674688339233398, 0.023526079177856447, 0.023585599899291994, 0.023813087463378905, 0.02386332893371582, 0.023926015853881835, 0.023733152389526366, 0.02351702308654785, 0.02348441505432129, 0.023688352584838868, 0.023669439315795897, 0.023621055603027345, 0.02359369659423828, 0.02345939254760742, 0.02339232063293457, 0.023531551361083983, 0.023525728225708007, 0.02357151985168457, 0.023704544067382812, 0.02355836868286133, 0.023666336059570314, 0.02387984085083008, 0.02384889602661133, 0.023785472869873047, 0.023713024139404296, 0.02358348846435547, 0.023854719161987305, 0.02380633544921875, 0.02385686492919922, 0.023742752075195314, 0.023554048538208007, 0.023467296600341796, 0.023689952850341797, 0.02403740882873535, 0.024037343978881836, 0.023887264251708985, 0.023525087356567383, 0.02338240051269531, 0.023764991760253908, 0.023823455810546876, 0.023689695358276367, 0.02373049545288086, 0.023594207763671875, 0.02363702392578125, 0.023883295059204102, 0.023814495086669923, 0.023731903076171876, 0.023578943252563475, 0.023460063934326172, 0.02360655975341797, 0.023691007614135742, 0.023765472412109374, 0.023660831451416016, 0.02367910385131836, 0.023567520141601562, 0.023648223876953124, 0.02375142478942871, 0.023619583129882812, 0.023611391067504883, 0.023410911560058593, 0.02340003204345703, 0.023681215286254883, 0.023813888549804686, 0.023799871444702147, 0.023840831756591796, 0.02358518409729004, 0.023695072174072265, 0.02383228874206543, 0.0238123836517334, 0.023666688919067383, 0.02364313507080078, 0.02338515281677246, 0.023449535369873046, 0.023814144134521483, 0.023783231735229494, 0.023711040496826173, 0.023701536178588868, 0.02355830383300781, 0.02366752052307129, 0.02410016059875488, 0.02418659210205078, 0.024014911651611327, 0.023770111083984375, 0.0236824951171875, 0.023593311309814454, 0.023789567947387694, 0.023837696075439452, 0.023694015502929686, 0.02374185562133789, 0.023705984115600588, 0.023877311706542968, 0.02456889533996582, 0.023727359771728514, 0.023574176788330077, 0.02340656089782715, 0.023460031509399414, 0.023722719192504883, 0.02373420715332031, 0.023666751861572265]",tokens/s,42.30897661389352,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4051.959808,4705.878016,0.0,4303.355904,4034.388992,s,1,10.8661083984375,10.8661083984375,0.0,10.8661083984375,10.8661083984375,10.8661083984375,10.8661083984375,[10.8661083984375],,kWh,9.204202662083768e-05,1.0145559628117612e-05,2.850391169199884e-05,0.00013069149794095415,,MB,4047.450112,4764.598272,0.0,4347.396096,4202.566656,s,10,1.9079676055908201,0.190796760559082,0.0003826545170538232,0.19068252563476562,0.19127831268310547,0.19129922103881836,0.19131594772338867,"[0.19033404541015625, 0.19076463317871092, 0.19056294250488282, 0.19059657287597656, 0.19127366638183593, 0.19125302124023438, 0.19021192932128905, 0.19060041809082032, 0.1910502471923828, 0.19132012939453125]",tokens/s,1341.7418579322639,kWh,5.5966284450469795e-06,6.172078091286968e-07,3.7061769691320295e-06,9.920013223307706e-06,tokens/kWh,25806417.213085122,MB,4050.518016,4764.598272,0.0,4347.396096,4213.08416,s,10,15.977960449218749,1.5977960449218749,0.009045792168758312,1.5948738403320313,1.609370361328125,1.61231689453125,1.6146741210937499,"[1.607498291015625, 1.59230517578125, 1.5956602783203124, 1.608715576171875, 1.615263427734375, 1.59408740234375, 1.58989453125, 1.5980814208984375, 1.5865423583984375, 1.5899119873046874]",tokens/s,39.42931277131833,kWh,4.638766980953673e-05,5.116314542565207e-06,3.0880184033268506e-05,8.238416838537044e-05,tokens/kWh,764710.0314869158,,s,630,15.975381315231335,0.025357748119414797,0.000410379029997672,0.025255871772766114,0.025797766304016114,0.026085864162445068,0.026799885692596438,"[0.027425567626953126, 0.02663542366027832, 0.02554966354370117, 0.02536857604980469, 0.025325376510620116, 0.025345535278320314, 0.025200992584228515, 0.025628000259399413, 0.025322399139404296, 0.02590665626525879, 0.02623551940917969, 0.026736640930175783, 0.025585599899291992, 0.025448511123657227, 0.02542144012451172, 0.02584204864501953, 0.02516537666320801, 0.025369024276733397, 0.025118175506591796, 0.02504473686218262, 0.02508380889892578, 0.0253571834564209, 0.025006080627441408, 0.02508595275878906, 0.025389055252075195, 0.025309183120727538, 0.02524569511413574, 0.025219072341918947, 0.02509414482116699, 0.025174016952514647, 0.02508799934387207, 0.025162784576416016, 0.025266752243041993, 0.02544816017150879, 0.025335615158081054, 0.025606399536132814, 0.025759807586669924, 0.025645151138305664, 0.025514463424682617, 0.026447135925292967, 0.0266014404296875, 0.026092031478881835, 0.02569036865234375, 0.02549737548828125, 0.025429248809814453, 0.025461727142333985, 0.025606143951416017, 0.025563135147094726, 0.02546873664855957, 0.025325759887695313, 0.025442304611206053, 0.02558361625671387, 0.025408607482910156, 0.02528348731994629, 0.025231264114379884, 0.02525382423400879, 0.025511615753173827, 0.02522972869873047, 0.02521708869934082, 0.025231359481811523, 0.025239519119262695, 0.025395040512084962, 0.02555104064941406, 0.026262048721313477, 0.025645055770874024, 0.025656639099121095, 0.025672000885009767, 0.025767679214477538, 0.025498239517211915, 0.025466367721557616, 0.026169183731079102, 0.02551465606689453, 0.025675039291381836, 0.025379552841186523, 0.02522854423522949, 0.025585504531860353, 0.025231775283813478, 0.025358848571777344, 0.025217023849487305, 0.02517753601074219, 0.02514796829223633, 0.025095808029174806, 0.025124351501464845, 0.025250688552856445, 0.02508768081665039, 0.025024831771850584, 0.024923295974731446, 0.024978271484375, 0.0252127685546875, 0.024939807891845703, 0.024961088180541994, 0.025008319854736328, 0.02499843215942383, 0.02494473648071289, 0.025004032135009766, 0.025223167419433593, 0.025036800384521486, 0.025010175704956054, 0.024989696502685548, 0.025212928771972655, 0.024999935150146483, 0.025194496154785157, 0.025593856811523437, 0.02539036750793457, 0.02561097526550293, 0.025192384719848634, 0.025118783950805666, 0.025007936477661134, 0.025266368865966796, 0.025261407852172853, 0.02516793632507324, 0.025112415313720705, 0.02515635108947754, 0.025012224197387696, 0.02507161521911621, 0.02516579246520996, 0.025106176376342774, 0.025433664321899415, 0.02550822448730469, 0.02529680061340332, 0.025199039459228516, 0.025167648315429687, 0.025118911743164062, 0.025259456634521484, 0.025387615203857423, 0.025571327209472656, 0.026066944122314452, 0.025227903366088867, 0.025138240814208984, 0.024969215393066405, 0.024994655609130858, 0.02500111961364746, 0.025293760299682617, 0.02529177665710449, 0.02530988883972168, 0.02519183921813965, 0.02522819137573242, 0.02535628890991211, 0.025503103256225585, 0.025620576858520507, 0.025786720275878906, 0.025661632537841796, 0.025815040588378906, 0.025587711334228515, 0.025793920516967772, 0.025667360305786133, 0.02560905647277832, 0.02573289680480957, 0.02552444839477539, 0.02573107147216797, 0.02571468734741211, 0.02572015953063965, 0.025622528076171876, 0.025582239151000975, 0.025642112731933595, 0.025593856811523437, 0.025412479400634767, 0.025290592193603516, 0.02518556785583496, 0.025487871170043946, 0.025306655883789064, 0.025223455429077148, 0.025283136367797852, 0.025195871353149414, 0.02516854476928711, 0.024963071823120117, 0.02515558433532715, 0.025744543075561524, 0.025096927642822266, 0.02505945587158203, 0.02502016067504883, 0.025145023345947266, 0.025080352783203124, 0.025363584518432618, 0.026208864212036134, 0.0251661434173584, 0.025010080337524415, 0.025040960311889647, 0.025088031768798827, 0.02509619140625, 0.025030656814575194, 0.025169023513793944, 0.0249597110748291, 0.024885408401489256, 0.02486835289001465, 0.024895999908447267, 0.024896831512451173, 0.025073856353759767, 0.024863231658935548, 0.026054752349853515, 0.025329376220703127, 0.025354143142700195, 0.025207168579101564, 0.025317375183105468, 0.025156768798828125, 0.02504697608947754, 0.02509494400024414, 0.02507491111755371, 0.02504979133605957, 0.02513711929321289, 0.025121023178100586, 0.025264127731323242, 0.025439807891845703, 0.02531068801879883, 0.02520572853088379, 0.025060575485229494, 0.02506380844116211, 0.024987360000610352, 0.024987775802612303, 0.024948448181152345, 0.02482671928405762, 0.025020416259765626, 0.024901344299316407, 0.024956640243530274, 0.025149280548095704, 0.025340639114379882, 0.025417055130004883, 0.02534988784790039, 0.02524825668334961, 0.025356704711914063, 0.025403072357177734, 0.025722623825073242, 0.025731647491455078, 0.025450496673583983, 0.025413631439208984, 0.02550912094116211, 0.02587923240661621, 0.028904767990112306, 0.02574412727355957, 0.02586419105529785, 0.025759008407592772, 0.025768672943115235, 0.02556723213195801, 0.025581567764282227, 0.02556723213195801, 0.025666751861572266, 0.025822015762329103, 0.02558348846435547, 0.025796607971191408, 0.02589094352722168, 0.02595840072631836, 0.025857791900634766, 0.02702505683898926, 0.026091455459594726, 0.026088096618652343, 0.025808191299438475, 0.025815744400024414, 0.02573721694946289, 0.02555023956298828, 0.025680160522460936, 0.02576806449890137, 0.025637056350708006, 0.026224639892578124, 0.025726688385009765, 0.02526345634460449, 0.025264608383178712, 0.02513148880004883, 0.025159168243408202, 0.02576416015625, 0.025423519134521483, 0.025607839584350586, 0.025432960510253906, 0.02571219253540039, 0.025478591918945314, 0.02550067138671875, 0.025393152236938478, 0.02545248031616211, 0.02521299171447754, 0.025257600784301757, 0.025147775650024413, 0.025902528762817383, 0.025486976623535156, 0.02676419258117676, 0.025518112182617188, 0.025709951400756836, 0.02537126350402832, 0.026183328628540038, 0.02572118377685547, 0.025667264938354493, 0.025484607696533202, 0.02586835289001465, 0.02556729507446289, 0.02575654411315918, 0.025636863708496094, 0.026814464569091798, 0.025722816467285158, 0.025885759353637697, 0.025414560317993166, 0.025702112197875975, 0.025722623825073242, 0.025820959091186525, 0.025869152069091798, 0.026253311157226563, 0.02576083183288574, 0.025893823623657226, 0.025677440643310546, 0.026161535263061524, 0.025592992782592774, 0.025897823333740234, 0.02575974464416504, 0.025786367416381836, 0.025995264053344725, 0.025862144470214843, 0.025636512756347655, 0.025518047332763673, 0.025397024154663085, 0.02563852882385254, 0.02557417678833008, 0.02573535919189453, 0.02548121643066406, 0.025288703918457032, 0.02527027130126953, 0.02511027145385742, 0.025026432037353517, 0.02497315216064453, 0.026037216186523438, 0.025245344161987305, 0.025007488250732422, 0.025144256591796876, 0.025092063903808595, 0.025178176879882812, 0.025552896499633788, 0.025554943084716796, 0.025628671646118165, 0.02534364891052246, 0.025157983779907227, 0.025016319274902343, 0.025007295608520507, 0.025060096740722657, 0.02497532844543457, 0.0251943359375, 0.025090303421020508, 0.025001983642578125, 0.025387008666992186, 0.02518396759033203, 0.024967456817626955, 0.024899263381958008, 0.024982912063598633, 0.024811807632446288, 0.02493712043762207, 0.024929759979248046, 0.024863264083862305, 0.02495462417602539, 0.025551103591918947, 0.025349920272827148, 0.025472991943359374, 0.025118976593017577, 0.025103456497192384, 0.02506230354309082, 0.02507161521911621, 0.02513920021057129, 0.024990911483764647, 0.026189952850341796, 0.026091968536376953, 0.025235712051391603, 0.025414880752563478, 0.025176864624023437, 0.02742620849609375, 0.026646528244018555, 0.0250883846282959, 0.025237632751464845, 0.025059423446655273, 0.025118719100952147, 0.025225215911865235, 0.0251080322265625, 0.025267967224121092, 0.02526688003540039, 0.025949535369873048, 0.02531599998474121, 0.025176063537597656, 0.025378816604614256, 0.02506342315673828, 0.025454368591308594, 0.025131168365478514, 0.02540959930419922, 0.02547302436828613, 0.025365535736083984, 0.025466848373413085, 0.025520128250122072, 0.025101856231689455, 0.024915807723999022, 0.02483465576171875, 0.024954912185668945, 0.024840063095092773, 0.025095808029174806, 0.02501273536682129, 0.024905759811401366, 0.02489449691772461, 0.025506752014160156, 0.0260831356048584, 0.025432256698608397, 0.025229312896728515, 0.025456159591674805, 0.02527280044555664, 0.02519353675842285, 0.02544735908508301, 0.02529484748840332, 0.025282463073730468, 0.025608287811279298, 0.025360031127929686, 0.025446687698364258, 0.025299007415771485, 0.025257280349731445, 0.025293567657470702, 0.025380416870117186, 0.025166208267211915, 0.02513689613342285, 0.025235679626464842, 0.025298847198486327, 0.02522534370422363, 0.02526736068725586, 0.025125471115112305, 0.025067039489746094, 0.025163488388061525, 0.02524176025390625, 0.025019231796264647, 0.025020416259765626, 0.025118719100952147, 0.025001983642578125, 0.02492416000366211, 0.024928255081176756, 0.02494259262084961, 0.025036800384521486, 0.02531532859802246, 0.025437471389770507, 0.025473760604858398, 0.025327007293701173, 0.025561695098876954, 0.025573375701904297, 0.02574950408935547, 0.025478591918945314, 0.02525446319580078, 0.02522230339050293, 0.025144159317016603, 0.025233087539672853, 0.02537094306945801, 0.025175167083740235, 0.025019264221191405, 0.025016319274902343, 0.025251840591430662, 0.025206367492675782, 0.026196352005004885, 0.025501535415649413, 0.025509727478027343, 0.0261658878326416, 0.02575155258178711, 0.02559164810180664, 0.02582134437561035, 0.02575155258178711, 0.02559903907775879, 0.025688671112060548, 0.025657472610473634, 0.025569503784179687, 0.025812992095947264, 0.02572287940979004, 0.025619583129882814, 0.025413951873779296, 0.025527936935424805, 0.0254453125, 0.025413631439208984, 0.025313247680664064, 0.025583103179931642, 0.025223039627075197, 0.025149856567382813, 0.025219327926635744, 0.025057024002075195, 0.025046815872192384, 0.024965024948120116, 0.025094720840454103, 0.025069343566894532, 0.02492438316345215, 0.02491596794128418, 0.025157087326049803, 0.025396831512451173, 0.025275327682495116, 0.025550464630126953, 0.025982912063598634, 0.025551296234130858, 0.025881856918334962, 0.02520140838623047, 0.025251136779785157, 0.0252423038482666, 0.025153535842895508, 0.025089408874511717, 0.025137535095214845, 0.024976800918579102, 0.024914783477783205, 0.025003904342651366, 0.02510451126098633, 0.025019872665405275, 0.024989248275756836, 0.02495372772216797, 0.02525929641723633, 0.025736000061035155, 0.025036800384521486, 0.025001983642578125, 0.02497865676879883, 0.024998687744140626, 0.025351680755615235, 0.025106880187988283, 0.025186368942260742, 0.025792512893676758, 0.025786304473876955, 0.02542598342895508, 0.02651228713989258, 0.025450496673583983, 0.025126367568969726, 0.025025056838989257, 0.025190399169921874, 0.025200639724731445, 0.025089855194091796, 0.025020608901977538, 0.025089632034301756, 0.024963167190551756, 0.025051456451416015, 0.02538812828063965, 0.024965248107910155, 0.025002016067504882, 0.024916736602783204, 0.025296192169189453, 0.024891231536865236, 0.024906591415405275, 0.02571878433227539, 0.025689311981201172, 0.024930335998535155, 0.02506825637817383, 0.025042623519897462, 0.025179584503173827, 0.02525846481323242, 0.02514579200744629, 0.025040639877319335, 0.025032960891723632, 0.02507302474975586, 0.02522995185852051, 0.024977407455444335, 0.024944320678710937, 0.02503424072265625, 0.02496371269226074, 0.025001983642578125, 0.0250361270904541, 0.02576870346069336, 0.025245792388916017, 0.025233407974243165, 0.025235456466674806, 0.027006464004516603, 0.025335712432861326, 0.024973920822143555, 0.024927295684814454, 0.025314239501953124, 0.02493235206604004, 0.02488115119934082, 0.02490777587890625, 0.02504876708984375, 0.0249051513671875, 0.02492505645751953, 0.02504649543762207, 0.025215520858764648, 0.025464832305908205, 0.026197824478149414, 0.02557107162475586, 0.02508639907836914, 0.02494419288635254, 0.025078176498413086, 0.024913951873779296, 0.024852479934692383, 0.024975360870361327, 0.02488115119934082, 0.025830528259277345, 0.025226079940795898, 0.025010208129882812, 0.02495849609375, 0.024979936599731446, 0.02490163230895996, 0.024882368087768555, 0.025300991058349608, 0.025469728469848633, 0.02526825523376465, 0.026462207794189452, 0.02633020782470703, 0.025061695098876954, 0.02504150390625, 0.02506342315673828, 0.02513206481933594, 0.02495158386230469, 0.024756032943725585, 0.025151872634887697, 0.024864767074584963, 0.02508595275878906, 0.02497331237792969, 0.02527027130126953, 0.025188352584838865, 0.02704697608947754, 0.025540672302246093, 0.025188575744628905, 0.025030847549438476, 0.024973567962646485, 0.025088224411010742, 0.025198591232299804, 0.025004032135009766, 0.025012224197387696, 0.025061279296875, 0.025148544311523437, 0.0252589111328125, 0.025073375701904297, 0.02506787109375, 0.025075712203979493, 0.025012224197387696, 0.025034303665161132, 0.025164224624633788, 0.02514441680908203, 0.025297311782836913, 0.025287168502807617, 0.025091680526733398, 0.02499990463256836, 0.025644479751586916, 0.02513587188720703, 0.025612543106079102, 0.025350143432617187, 0.02507263946533203, 0.024996864318847657, 0.025161727905273438, 0.025077119827270507, 0.02508019256591797, 0.024980735778808594, 0.025305599212646485, 0.02506598472595215, 0.025058624267578124, 0.025392831802368163, 0.025742336273193358, 0.02599081611633301]",tokens/s,39.43567840846105,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,3161.960448,4423.876608,0.0,4028.628992,3944.723968,s,1,10.405025390625,10.405025390625,0.0,10.405025390625,10.405025390625,10.405025390625,10.405025390625,[10.405025390625],,kWh,9.278183984999561e-05,1.022738812495303e-05,2.9278912311997418e-05,0.00013228814028694606,,MB,3216.65024,4763.615232,0.0,4353.687552,4305.05728,s,10,1.091167808532715,0.1091167808532715,0.0004448517049072606,0.10910725021362305,0.10960431518554688,0.10980559844970703,0.10996662506103516,"[0.11000688171386719, 0.10916985321044922, 0.10916105651855469, 0.10896675109863281, 0.10890582275390626, 0.10955958557128906, 0.1091534423828125, 0.1090610580444336, 0.10901763153076172, 0.10816572570800781]",tokens/s,2346.110268266081,kWh,3.258486512037279e-06,3.593257365275346e-07,2.1669801286445e-06,5.7847923772093135e-06,tokens/kWh,44253965.10488056,MB,3216.65024,4763.615232,0.0,4353.687552,4305.05984,s,10,21.80553564453125,2.180553564453125,0.02097974010962949,2.1801929931640625,2.2088744140625,2.2116095703125,2.2137976953125,"[2.156970703125, 2.16743212890625, 2.1854130859375, 2.18996337890625, 2.2082666015625, 2.2143447265625, 2.19798046875, 2.158267822265625, 2.174972900390625, 2.151923828125]",tokens/s,28.891746126768584,kWh,6.251471981838032e-05,6.894757265456508e-06,3.406832663735535e-05,0.00010347780372119218,tokens/kWh,608826.2190966626,,s,630,21.802990219116204,0.03460792098272415,0.0006658891403288806,0.034552591323852536,0.0352255615234375,0.035505592918395994,0.03711125492095947,"[0.03509654235839844, 0.03595951843261719, 0.035210529327392576, 0.034835166931152343, 0.03445062255859375, 0.03415110397338867, 0.03393548965454102, 0.03414435195922851, 0.034076576232910154, 0.03444262313842773, 0.03470195388793945, 0.03417462539672852, 0.034293758392333985, 0.03453916931152344, 0.03415420913696289, 0.03390768051147461, 0.033990657806396485, 0.03405619049072266, 0.03407462310791016, 0.03397836685180664, 0.033908447265625, 0.03399871826171875, 0.03419587326049805, 0.03399244689941406, 0.03463139343261719, 0.03472835159301758, 0.03432815933227539, 0.0341099853515625, 0.03376128005981445, 0.0343138542175293, 0.033948257446289064, 0.03386729431152344, 0.03376582336425781, 0.033990623474121094, 0.03388809585571289, 0.03425484848022461, 0.03367731094360352, 0.03400246429443359, 0.03439459228515625, 0.03425075149536133, 0.03394889450073242, 0.033987422943115235, 0.03379363250732422, 0.034143775939941404, 0.03374982452392578, 0.03466156768798828, 0.03387065505981445, 0.03401318359375, 0.03391692733764649, 0.03429507064819336, 0.03451279830932617, 0.03441337585449219, 0.034031105041503903, 0.03390476989746094, 0.034646400451660155, 0.03609395217895508, 0.03399660873413086, 0.034113792419433596, 0.03396207809448242, 0.03436243057250977, 0.033936161041259766, 0.03416409683227539, 0.03404249572753906, 0.033922752380371096, 0.03433776092529297, 0.03377884674072266, 0.03417379379272461, 0.03391692733764649, 0.034095104217529294, 0.03455376052856445, 0.035139041900634764, 0.034988094329833984, 0.0345337905883789, 0.0342437744140625, 0.034579071044921875, 0.03435760116577148, 0.034170879364013675, 0.03415244674682617, 0.03437363052368164, 0.034473983764648435, 0.03499990463256836, 0.034547168731689455, 0.034648448944091796, 0.03439878463745117, 0.03417497634887695, 0.034342910766601564, 0.03405984115600586, 0.03398905563354492, 0.034218273162841796, 0.034145694732666015, 0.03409337615966797, 0.03409414291381836, 0.03392006301879883, 0.03439603042602539, 0.0349793930053711, 0.034476478576660155, 0.0344637451171875, 0.034080768585205076, 0.03424383926391601, 0.03412416076660156, 0.03412211227416992, 0.03400006484985352, 0.03408364868164063, 0.034430816650390626, 0.03420585632324219, 0.03439206314086914, 0.03450470352172851, 0.03434604644775391, 0.0343438720703125, 0.034945022583007815, 0.03455535888671875, 0.0343680305480957, 0.034605216979980466, 0.03431817626953125, 0.03422329711914063, 0.03417171096801758, 0.038128673553466795, 0.03480630493164062, 0.03482444763183594, 0.03446137619018555, 0.034327041625976565, 0.03402751922607422, 0.03420707321166992, 0.034441600799560546, 0.03404828643798828, 0.034103294372558594, 0.0355650863647461, 0.035081695556640625, 0.03469327926635742, 0.03470217514038086, 0.034557823181152345, 0.03460051345825195, 0.03414883041381836, 0.034154590606689454, 0.0343818244934082, 0.034854911804199216, 0.03466796875, 0.03447251129150391, 0.034484222412109376, 0.03455590438842773, 0.034710655212402346, 0.034253150939941406, 0.03412022399902344, 0.0339351692199707, 0.03395734405517578, 0.03386220932006836, 0.0338331184387207, 0.03466864013671875, 0.03408886337280274, 0.034938880920410156, 0.03616553497314453, 0.03512723159790039, 0.035084190368652346, 0.03486896133422852, 0.03422467041015625, 0.034164894104003904, 0.03409628677368164, 0.03440915298461914, 0.034516990661621096, 0.034353153228759765, 0.03438796615600586, 0.03469107055664063, 0.034830337524414064, 0.034559745788574216, 0.034516895294189456, 0.03556524658203125, 0.03461574554443359, 0.03441622543334961, 0.0347465934753418, 0.03712432098388672, 0.035794113159179686, 0.034700286865234374, 0.03456121444702148, 0.03482502365112305, 0.034740097045898435, 0.034680606842041016, 0.03462118530273438, 0.0347039680480957, 0.03428966522216797, 0.034447647094726565, 0.03426793670654297, 0.03435212707519531, 0.03504316711425781, 0.035305023193359375, 0.034802207946777346, 0.034887680053710936, 0.03566400146484375, 0.03505539321899414, 0.03467273712158203, 0.03537715148925781, 0.03451289749145508, 0.03461648178100586, 0.03460796737670899, 0.034680831909179685, 0.0351879997253418, 0.03484659194946289, 0.03484297561645508, 0.03463164901733398, 0.03426556777954102, 0.034078784942626957, 0.03398976135253906, 0.033917823791503904, 0.03422617721557617, 0.034369537353515625, 0.034772991180419925, 0.03456361770629883, 0.03477164840698242, 0.03490505599975586, 0.034991104125976565, 0.03879670333862305, 0.03533567810058594, 0.0346591682434082, 0.03478707122802734, 0.034330944061279296, 0.0346063346862793, 0.03451980972290039, 0.03462742233276367, 0.03484832000732422, 0.034780895233154294, 0.03472473526000976, 0.03473958587646484, 0.0345258560180664, 0.03461526489257812, 0.03456185531616211, 0.034276542663574217, 0.03456467056274414, 0.03443545532226563, 0.03443308639526367, 0.03444073486328125, 0.03492659378051758, 0.03478371047973633, 0.03444351959228516, 0.03461503982543945, 0.03451084899902344, 0.03458047866821289, 0.034566143035888675, 0.034729312896728516, 0.034703296661376955, 0.0348076171875, 0.03496217727661133, 0.034923774719238285, 0.035213375091552736, 0.03538118362426758, 0.03506473541259766, 0.034935905456542966, 0.034724544525146485, 0.03443119812011719, 0.03478112030029297, 0.03448384094238281, 0.03514147186279297, 0.03490671920776367, 0.03527635192871094, 0.035624767303466795, 0.034840766906738284, 0.0347770881652832, 0.035833854675292966, 0.034551422119140626, 0.034689407348632816, 0.034639297485351564, 0.0348326416015625, 0.03500374221801758, 0.035044097900390626, 0.035154144287109376, 0.03490611267089844, 0.03464803314208984, 0.03485084915161133, 0.03470675277709961, 0.03522364807128906, 0.034676704406738285, 0.03496409606933594, 0.03654156875610352, 0.034880542755126955, 0.03480508804321289, 0.03857664108276367, 0.039120670318603515, 0.03547772979736328, 0.035065345764160157, 0.036252159118652344, 0.03515561676025391, 0.034939231872558596, 0.035098175048828124, 0.03520131301879883, 0.034938880920410156, 0.034785438537597656, 0.03502899169921875, 0.03494297790527344, 0.03540297698974609, 0.034892097473144534, 0.034705886840820314, 0.034715648651123046, 0.03491017532348633, 0.034504127502441403, 0.03457904052734375, 0.03451084899902344, 0.034441375732421876, 0.034786975860595704, 0.03572326278686523, 0.034955455780029294, 0.03479267120361328, 0.03472006225585938, 0.03451667022705078, 0.03495987319946289, 0.03466287994384765, 0.03441030502319336, 0.0344268798828125, 0.034720863342285156, 0.03499212646484375, 0.034783905029296874, 0.03461759948730469, 0.034715648651123046, 0.03663644790649414, 0.034318431854248044, 0.03423654556274414, 0.03424822235107422, 0.034327007293701174, 0.035743743896484374, 0.0349813117980957, 0.03508486557006836, 0.03509209442138672, 0.034703742980957034, 0.03487094497680664, 0.035350879669189456, 0.03534643173217773, 0.03541315078735351, 0.03526460647583008, 0.035396350860595706, 0.035339488983154296, 0.03519302368164062, 0.035885887145996095, 0.03549776077270508, 0.035512001037597656, 0.035766654968261716, 0.03545270538330078, 0.03541798400878906, 0.03645257568359375, 0.035020030975341794, 0.035074432373046874, 0.03521120071411133, 0.03520134353637695, 0.03535865783691406, 0.035495872497558596, 0.03512761688232422, 0.035181888580322264, 0.03532025527954102, 0.03533657455444336, 0.035026016235351565, 0.035149951934814454, 0.035219680786132815, 0.03542265701293945, 0.03548291015625, 0.03532255935668945, 0.035182624816894534, 0.03524937438964844, 0.035242782592773435, 0.03516630554199219, 0.035165313720703126, 0.03500316619873047, 0.03480780792236328, 0.03474147033691406, 0.03481884765625, 0.03463782501220703, 0.034560001373291016, 0.034548927307128906, 0.03493356704711914, 0.03480985641479492, 0.03451446533203125, 0.03480403137207031, 0.03469942474365234, 0.03495462417602539, 0.035043968200683596, 0.03493289566040039, 0.03481340789794922, 0.03489782333374023, 0.03485948944091797, 0.03466377639770508, 0.034996158599853514, 0.03510140609741211, 0.03519875335693359, 0.035409374237060545, 0.03462995147705078, 0.03457660675048828, 0.03436515045166016, 0.03453366470336914, 0.03482624053955078, 0.035003681182861325, 0.03593084716796875, 0.03477119827270508, 0.03476591873168945, 0.03486991882324219, 0.03469827270507812, 0.03465315246582031, 0.03475807952880859, 0.03452985763549805, 0.03474172973632812, 0.03506230545043945, 0.03494841766357422, 0.03519081497192383, 0.03495126342773437, 0.03480137634277344, 0.034683582305908206, 0.03494620895385742, 0.034976768493652347, 0.03466035079956055, 0.034781185150146485, 0.03496345520019531, 0.0348671989440918, 0.03478732681274414, 0.03504870223999024, 0.034992511749267576, 0.03479385757446289, 0.03472348785400391, 0.034721439361572265, 0.03470748901367188, 0.03489449691772461, 0.03505753707885742, 0.03471516799926758, 0.03458623886108399, 0.034869598388671874, 0.035068641662597655, 0.03502070236206055, 0.03495062255859375, 0.035025440216064456, 0.035039039611816404, 0.03559401702880859, 0.03707926559448242, 0.03541196823120117, 0.03491244888305664, 0.035081825256347655, 0.034916767120361326, 0.03485081481933594, 0.03526041412353516, 0.03489542388916016, 0.035051166534423826, 0.034824897766113284, 0.03441468811035156, 0.034426433563232425, 0.034592254638671875, 0.03435820770263672, 0.03441049575805664, 0.03435449600219727, 0.034409183502197266, 0.03584000015258789, 0.035125022888183595, 0.03453155136108398, 0.03490611267089844, 0.034385921478271485, 0.03449446487426758, 0.034203647613525394, 0.03486265563964844, 0.034346462249755856, 0.034222270965576174, 0.034565086364746096, 0.03439708709716797, 0.03424256134033203, 0.03418320083618164, 0.03398332977294922, 0.0339733772277832, 0.03397097778320313, 0.034213985443115234, 0.03391507339477539, 0.03389763259887695, 0.033841888427734376, 0.03421366500854492, 0.03461920166015625, 0.03466713714599609, 0.034688766479492185, 0.034313377380371095, 0.03388678359985352, 0.034439456939697265, 0.03433606338500977, 0.033769439697265625, 0.033979393005371096, 0.03383779144287109, 0.0337716178894043, 0.03385232162475586, 0.034070526123046875, 0.03451084899902344, 0.03450470352172851, 0.035192832946777344, 0.03407177734375, 0.03403807830810547, 0.033831390380859375, 0.0339046401977539, 0.03376278305053711, 0.033944095611572266, 0.03393283081054688, 0.033931743621826174, 0.034598911285400394, 0.03465420913696289, 0.03409920120239258, 0.034164798736572265, 0.03379929733276367, 0.0338460807800293, 0.03393769454956055, 0.034035423278808596, 0.03392716979980469, 0.03393920135498047, 0.033820926666259764, 0.034618751525878906, 0.03434764862060547, 0.03470950317382813, 0.034523136138916014, 0.03405414581298828, 0.03476620864868164, 0.03478169631958008, 0.03407257461547852, 0.03406028747558594, 0.03402921676635742, 0.03434121704101562, 0.03446783828735352, 0.03423436737060547, 0.03403571319580078, 0.034086910247802735, 0.03407417678833008, 0.034165184020996095, 0.03404211044311523, 0.03380607986450195, 0.034104991912841796, 0.034027870178222654, 0.03464751815795898, 0.035576446533203125, 0.03469609451293945, 0.034511871337890625, 0.03500758361816406, 0.03757689666748047, 0.035991680145263674, 0.03477155303955078, 0.03431628799438476, 0.03403945541381836, 0.03410364913940429, 0.034293758392333985, 0.03417497634887695, 0.034411937713623046, 0.03487152099609375, 0.03424643325805664, 0.03402403259277344, 0.03400908660888672, 0.03389788818359375, 0.03394211196899414, 0.03400089645385742, 0.03410124969482422, 0.03391897583007813, 0.034301246643066406, 0.034157249450683595, 0.034395904541015626, 0.035299583435058596, 0.034170879364013675, 0.03416595077514648, 0.03697318267822266, 0.03438963317871094, 0.039381568908691406, 0.03473932647705078, 0.0350599365234375, 0.035256446838378905, 0.034990623474121095, 0.03497369766235352, 0.03488668823242187, 0.03453231811523438, 0.03417472076416016, 0.03397635269165039, 0.03385776138305664, 0.03382259368896484, 0.034319553375244144, 0.033739391326904296, 0.03385139083862305, 0.03376115036010742, 0.03409561538696289, 0.03479347229003906, 0.034361343383789066, 0.033853408813476565, 0.03376278305053711, 0.03380691146850586, 0.033767425537109375, 0.03386163330078125, 0.0340450553894043, 0.03404422378540039, 0.03406905746459961, 0.03470131301879883, 0.0353372802734375, 0.034986686706542966, 0.034277023315429686, 0.034027679443359375, 0.033849342346191406, 0.033900993347167965, 0.03395174407958984, 0.03382463836669922, 0.03386175918579101, 0.033957279205322266, 0.035156574249267575, 0.03484463882446289, 0.03477916717529297, 0.03410678482055664, 0.033841758728027346, 0.033939582824707035, 0.03377139282226563, 0.033990657806396485, 0.03378176116943359, 0.03415654373168946, 0.033890304565429685, 0.03390566253662109, 0.03387078475952148, 0.03399875259399414, 0.03388022232055664, 0.03407254409790039, 0.03374460983276367, 0.03406012725830078, 0.033717758178710935, 0.033992897033691405, 0.034468639373779295, 0.03417702484130859, 0.03381657409667969, 0.03401068878173828, 0.034042304992675784, 0.03454969787597656, 0.03435504150390625, 0.03427145767211914, 0.034184638977050784, 0.03400140762329101, 0.03411500930786133, 0.03395660781860352, 0.03419075012207031, 0.03394303894042969, 0.03406742477416992, 0.03387187194824219, 0.03408835220336914, 0.034789985656738284, 0.03487948989868164, 0.0349409294128418, 0.03435520172119141, 0.03406643295288086]",tokens/s,28.895119140476183,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,2063.392768,2350.77632,0.0,1948.254208,1873.42336,s,1,9.2428017578125,9.2428017578125,0.0,9.2428017578125,9.2428017578125,9.2428017578125,9.2428017578125,[9.2428017578125],,kWh,5.7910573087497135e-05,6.380466043640964e-06,1.7610847422008802e-05,8.19018865531469e-05,,MB,2131.02592,2451.439616,0.0,2034.23744,2015.14496,s,10,0.7635896987915038,0.07635896987915039,0.0002324427042233355,0.07630073547363281,0.07651495895385743,0.07675074272155762,0.07693936973571777,"[0.07698652648925781, 0.07620816040039062, 0.07634873962402344, 0.0761461410522461, 0.07627168273925782, 0.07614002990722656, 0.07627862548828125, 0.07646256256103516, 0.07642438507080078, 0.07632284545898438]",tokens/s,3352.5858246275284,kWh,2.286161325227868e-06,2.5209155472040226e-07,1.509456502703059e-06,4.047709382651329e-06,tokens/kWh,63245647.30294816,MB,2135.928832,2451.439616,0.0,2034.23744,2015.14752,s,10,13.666246459960938,1.3666246459960938,0.014425212861351832,1.3677744140625,1.3836555053710937,1.3837059875488282,1.3837463732910156,"[1.379995849609375, 1.362877685546875, 1.3497362060546876, 1.3430860595703125, 1.3655684814453124, 1.349014404296875, 1.3699803466796876, 1.3837564697265625, 1.383644287109375, 1.378586669921875]",tokens/s,46.098978373159,kWh,3.9398439371442235e-05,4.345298330284794e-06,2.209672904889704e-05,6.584046675062408e-05,tokens/kWh,956858.344255326,,s,630,13.663810777664184,0.021688588535974898,0.0005430316079600029,0.02167350387573242,0.02215102062225342,0.022367539501190185,0.02380048198699951,"[0.022534143447875975, 0.02171404838562012, 0.021781312942504884, 0.02175923156738281, 0.021527360916137696, 0.02182534408569336, 0.022742591857910156, 0.02169001579284668, 0.021685216903686525, 0.021359935760498047, 0.02174991989135742, 0.021774911880493165, 0.02155673599243164, 0.021559776306152342, 0.021804319381713868, 0.021749696731567382, 0.021758752822875975, 0.021463136672973632, 0.02169363212585449, 0.023484256744384764, 0.021555679321289063, 0.021543327331542968, 0.02175129508972168, 0.02174812889099121, 0.021706560134887695, 0.02155513572692871, 0.021918079376220704, 0.021566719055175782, 0.021517023086547852, 0.02155939292907715, 0.021530080795288085, 0.021551616668701173, 0.021508031845092774, 0.02187238311767578, 0.02177395248413086, 0.022082271575927733, 0.02183772850036621, 0.022091775894165038, 0.022062623977661133, 0.022084064483642578, 0.022112255096435548, 0.022269952774047853, 0.02199545669555664, 0.0221778564453125, 0.022000831604003908, 0.021985952377319335, 0.021997087478637694, 0.022026432037353515, 0.022042495727539062, 0.022032608032226564, 0.022108383178710937, 0.022216543197631836, 0.02236240005493164, 0.02212380790710449, 0.02206096076965332, 0.02217612838745117, 0.02200227165222168, 0.022019935607910157, 0.02197881507873535, 0.02209619140625, 0.021956607818603514, 0.022068960189819336, 0.02190979194641113, 0.02214963150024414, 0.021841119766235352, 0.021787424087524414, 0.021927135467529297, 0.021975008010864258, 0.02201273536682129, 0.021946367263793946, 0.022011295318603515, 0.021903968811035155, 0.022013952255249023, 0.021909503936767577, 0.022048608779907226, 0.022110368728637694, 0.021996831893920897, 0.02277587127685547, 0.023138975143432616, 0.023209375381469728, 0.02235251235961914, 0.022295520782470702, 0.022414112091064455, 0.022154624938964845, 0.022044544219970704, 0.022217695236206054, 0.021630847930908203, 0.02149782371520996, 0.021299360275268554, 0.021313535690307618, 0.02127667236328125, 0.02139507293701172, 0.021250431060791015, 0.02124595260620117, 0.021363935470581054, 0.02106857681274414, 0.02115497589111328, 0.021021535873413086, 0.02122972869873047, 0.02130441665649414, 0.021031423568725584, 0.021059328079223633, 0.021017087936401366, 0.020972768783569337, 0.0212139835357666, 0.02154003143310547, 0.02145142364501953, 0.021135520935058594, 0.021220447540283204, 0.02102569580078125, 0.021086208343505858, 0.021088064193725584, 0.021815488815307617, 0.025116064071655272, 0.02136332893371582, 0.021352415084838868, 0.021275968551635743, 0.021062335968017577, 0.02099772834777832, 0.020998559951782226, 0.02104115104675293, 0.021313535690307618, 0.021052736282348633, 0.02106585693359375, 0.020961856842041014, 0.021116960525512696, 0.021385087966918945, 0.021139583587646484, 0.024149471282958985, 0.021502496719360352, 0.021157760620117188, 0.021644575119018555, 0.021004863739013672, 0.02092166328430176, 0.020952032089233397, 0.021019872665405274, 0.020867071151733398, 0.02091859245300293, 0.020838592529296877, 0.020846080780029298, 0.020943647384643556, 0.021175392150878908, 0.021093280792236328, 0.02110684776306152, 0.0209836483001709, 0.02096732711791992, 0.02092985534667969, 0.021078399658203125, 0.0213221435546875, 0.02122271919250488, 0.021105344772338868, 0.022031967163085937, 0.021559711456298827, 0.021172224044799806, 0.02116387176513672, 0.021587936401367188, 0.02137107276916504, 0.02133363151550293, 0.0213753604888916, 0.02139913558959961, 0.021208959579467772, 0.021235328674316406, 0.020982688903808593, 0.020920320510864256, 0.021227584838867188, 0.021112768173217774, 0.021204992294311522, 0.021219327926635743, 0.02149337577819824, 0.021794591903686523, 0.021826143264770507, 0.021608064651489258, 0.022024576187133788, 0.021991424560546875, 0.021641183853149414, 0.02187014389038086, 0.021907936096191405, 0.0220446720123291, 0.02173535919189453, 0.021780160903930663, 0.021684192657470704, 0.021547168731689454, 0.021715200424194336, 0.021677568435668947, 0.02222336006164551, 0.021777631759643555, 0.02150275230407715, 0.021860511779785156, 0.021433343887329103, 0.021615615844726564, 0.021457984924316408, 0.021331199645996092, 0.0215067195892334, 0.021409824371337892, 0.021325824737548828, 0.021372928619384765, 0.021263776779174806, 0.021119583129882814, 0.021374143600463868, 0.022067039489746094, 0.021267040252685547, 0.02123200035095215, 0.022056127548217775, 0.02146940803527832, 0.021537376403808595, 0.02125788879394531, 0.021172096252441406, 0.021469375610351563, 0.02132809638977051, 0.021263935089111327, 0.021070240020751953, 0.02118230438232422, 0.02122777557373047, 0.021212352752685546, 0.021209087371826172, 0.021281600952148438, 0.021401344299316408, 0.021350656509399414, 0.02128486442565918, 0.021706047058105468, 0.021182207107543944, 0.02125494384765625, 0.021373088836669923, 0.02129305648803711, 0.02106563186645508, 0.021038848876953124, 0.021174240112304687, 0.021516672134399416, 0.021211135864257814, 0.02096303939819336, 0.02092598342895508, 0.021027584075927735, 0.02107596778869629, 0.020891616821289063, 0.021669919967651368, 0.021155839920043946, 0.02106572723388672, 0.021350175857543945, 0.021231424331665038, 0.021410207748413086, 0.02144438362121582, 0.021446880340576173, 0.021557247161865235, 0.02146073532104492, 0.021184768676757813, 0.02167705535888672, 0.021375999450683594, 0.021121023178100586, 0.021058784484863282, 0.021141855239868165, 0.021352895736694334, 0.0213439998626709, 0.022484415054321288, 0.021563968658447265, 0.02152038383483887, 0.021452320098876952, 0.021538528442382812, 0.021799007415771485, 0.021719711303710938, 0.022358015060424806, 0.021669952392578126, 0.021787776947021484, 0.021445568084716798, 0.02178060722351074, 0.02171878433227539, 0.02187059211730957, 0.022118207931518554, 0.021960479736328125, 0.021714975357055664, 0.021590368270874023, 0.02194771194458008, 0.02152092742919922, 0.02117856025695801, 0.021239519119262695, 0.02127881622314453, 0.02137049674987793, 0.02139174461364746, 0.02141116714477539, 0.02141270446777344, 0.02125404739379883, 0.021121120452880858, 0.021161760330200195, 0.021242080688476564, 0.02126438331604004, 0.021300928115844726, 0.021806432723999025, 0.02194326400756836, 0.02217964744567871, 0.021807296752929688, 0.021855424880981446, 0.021807935714721678, 0.021834815979003906, 0.021816255569458008, 0.021871967315673826, 0.021820064544677734, 0.021880256652832032, 0.021733951568603516, 0.021935359954833984, 0.0217259521484375, 0.021778079986572267, 0.021788095474243162, 0.02160848045349121, 0.021625728607177735, 0.02168832015991211, 0.02176540756225586, 0.023108320236206056, 0.021763711929321288, 0.02176243209838867, 0.021614048004150392, 0.021545600891113282, 0.02144041633605957, 0.021423711776733398, 0.02145526313781738, 0.021372928619384765, 0.021394975662231447, 0.02168003273010254, 0.021339231491088868, 0.02117068862915039, 0.021137887954711915, 0.02109235191345215, 0.02125823974609375, 0.021186559677124024, 0.021297407150268555, 0.021388608932495116, 0.02128879928588867, 0.021584480285644532, 0.0220296630859375, 0.021596832275390623, 0.021362688064575194, 0.021381120681762695, 0.021687679290771485, 0.021291648864746094, 0.02126233673095703, 0.02122137641906738, 0.021762048721313477, 0.02104319953918457, 0.02244361686706543, 0.02374697685241699, 0.02146918487548828, 0.021306495666503906, 0.02108233642578125, 0.021023391723632812, 0.020977664947509765, 0.020943967819213868, 0.02133475112915039, 0.02109459114074707, 0.021215232849121093, 0.0211180477142334, 0.021563392639160156, 0.021039743423461914, 0.021173952102661132, 0.021233983993530273, 0.02107027244567871, 0.021068864822387696, 0.02101641654968262, 0.020968351364135742, 0.021163936614990234, 0.021272703170776366, 0.02124799919128418, 0.021139455795288087, 0.02128060722351074, 0.02120252799987793, 0.02141049575805664, 0.02147862434387207, 0.023001472473144532, 0.022426080703735352, 0.021704511642456056, 0.02162483215332031, 0.02122857666015625, 0.021011360168457033, 0.0209881591796875, 0.021223232269287108, 0.021202079772949217, 0.02142255973815918, 0.021594079971313476, 0.02174403190612793, 0.021721088409423828, 0.02171641540527344, 0.023201759338378907, 0.021864479064941406, 0.02190745544433594, 0.021778207778930664, 0.02213500785827637, 0.0220263671875, 0.0218602237701416, 0.021876031875610352, 0.02183203125, 0.022026592254638672, 0.02185625648498535, 0.021967071533203125, 0.022009183883666992, 0.022563232421875, 0.022111743927001954, 0.022263519287109373, 0.022024192810058595, 0.021764928817749024, 0.02190745544433594, 0.022032384872436524, 0.02189036750793457, 0.021877439498901367, 0.02208563232421875, 0.02205286407470703, 0.02215936088562012, 0.023822336196899413, 0.0216977596282959, 0.02150275230407715, 0.021335519790649415, 0.021294719696044923, 0.021115808486938475, 0.021042943954467774, 0.02106188774108887, 0.021345375061035156, 0.02156947135925293, 0.021586208343505858, 0.021729984283447266, 0.021569536209106444, 0.021753856658935547, 0.021831680297851562, 0.021499456405639647, 0.021623231887817382, 0.021308799743652344, 0.021265216827392578, 0.02119865608215332, 0.02104710388183594, 0.020938432693481446, 0.021264896392822266, 0.021438079833984373, 0.021315935134887696, 0.021198944091796876, 0.021518272399902345, 0.02122502326965332, 0.021225536346435547, 0.021518720626831054, 0.021649343490600586, 0.02160646438598633, 0.021501855850219728, 0.021577152252197265, 0.022166400909423827, 0.022054527282714845, 0.02214019203186035, 0.022094112396240234, 0.022655616760253905, 0.02206447982788086, 0.022213472366333007, 0.022113824844360353, 0.022460735321044922, 0.022063264846801756, 0.022063104629516602, 0.022264064788818358, 0.022058752059936522, 0.022154304504394533, 0.02196950340270996, 0.02180131149291992, 0.021936128616333008, 0.022036575317382814, 0.022177183151245117, 0.02222073554992676, 0.02202272033691406, 0.02200796890258789, 0.022055967330932617, 0.022122976303100585, 0.022236831665039064, 0.02206492805480957, 0.022045343399047852, 0.021868320465087892, 0.021948223114013673, 0.021969568252563475, 0.02186854362487793, 0.021919424057006837, 0.021827648162841797, 0.02184182357788086, 0.022351392745971678, 0.02224211120605469, 0.02268742370605469, 0.02271059226989746, 0.022016000747680665, 0.021946367263793946, 0.022034431457519533, 0.021894399642944335, 0.022231807708740236, 0.021948640823364257, 0.02191107177734375, 0.021838079452514647, 0.02191155242919922, 0.021731327056884766, 0.021526527404785157, 0.02137478446960449, 0.021350175857543945, 0.021430688858032225, 0.021792768478393554, 0.02152038383483887, 0.021350400924682617, 0.021387487411499023, 0.021410688400268555, 0.021726112365722656, 0.021913471221923827, 0.021910783767700195, 0.021891359329223634, 0.021852767944335938, 0.021836864471435548, 0.021869247436523437, 0.021900768280029296, 0.021904159545898437, 0.022032384872436524, 0.022974239349365235, 0.02211737632751465, 0.02206502342224121, 0.02197871971130371, 0.021860191345214844, 0.021850624084472657, 0.021946559906005858, 0.021883104324340822, 0.021638944625854493, 0.02191689682006836, 0.021533472061157227, 0.022364160537719727, 0.023390207290649414, 0.024025087356567384, 0.021323776245117186, 0.02129248046875, 0.021084543228149413, 0.021335872650146484, 0.021150079727172852, 0.021366783142089844, 0.023100799560546875, 0.021992063522338866, 0.021185983657836915, 0.021166303634643554, 0.02120025634765625, 0.02146976089477539, 0.021871007919311524, 0.021831071853637696, 0.021848127365112303, 0.022124671936035158, 0.021911136627197264, 0.02181407928466797, 0.021968896865844727, 0.02208153533935547, 0.022370304107666016, 0.021931711196899413, 0.021819711685180664, 0.02177395248413086, 0.021919456481933594, 0.022123071670532228, 0.022040672302246093, 0.02205695915222168, 0.022018047332763673, 0.02210793685913086, 0.022110176086425782, 0.02208153533935547, 0.02264908790588379, 0.025003551483154297, 0.021971424102783202, 0.021968896865844727, 0.0219238395690918, 0.021884511947631836, 0.021688032150268554, 0.021655231475830077, 0.021498880386352538, 0.021540864944458008, 0.021785696029663085, 0.021799360275268555, 0.021694944381713866, 0.022041919708251954, 0.021746496200561523, 0.02169759941101074, 0.021812320709228516, 0.025626752853393556, 0.024633344650268556, 0.022327167510986328, 0.022063039779663087, 0.02195270347595215, 0.022016000747680665, 0.02144220733642578, 0.021648895263671874, 0.021928415298461915, 0.02168828773498535, 0.02156175994873047, 0.02162892723083496, 0.021522432327270507, 0.021518335342407227, 0.021501056671142577, 0.021377920150756836, 0.021000192642211913, 0.021104639053344726, 0.02166374397277832, 0.021916799545288086, 0.021420927047729493, 0.021303295135498047, 0.02149590492248535, 0.021859968185424804, 0.021862688064575194, 0.02145884895324707, 0.02130735969543457, 0.02135865592956543, 0.021314943313598633, 0.02137772750854492, 0.02134774398803711, 0.02132643127441406, 0.021301248550415038, 0.02128895950317383, 0.021393407821655275, 0.021312543869018555, 0.02174857521057129, 0.02189913558959961, 0.02187494468688965, 0.022417407989501953, 0.021987327575683592, 0.02204876708984375, 0.021992767333984375, 0.022204927444458008, 0.022023359298706056, 0.022012319564819336, 0.021948160171508788, 0.021977951049804687, 0.021979135513305666, 0.021970943450927736, 0.022013023376464845, 0.022186912536621094, 0.022175743103027345, 0.02230886459350586, 0.02208153533935547, 0.021892223358154297, 0.022048831939697266, 0.022230976104736327, 0.02211724853515625, 0.022035743713378905, 0.022073471069335937, 0.022065696716308595, 0.02215065574645996]",tokens/s,46.107195880511,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,7184.310272,7954.366464,0.0,7551.844352,7485.12768,s,1,13.24946875,13.24946875,0.0,13.24946875,13.24946875,13.24946875,13.24946875,[13.24946875],,kWh,0.00016912939204582924,1.8644150291138583e-05,5.4434210213991086e-05,0.00024220775255095892,,MB,2824.76544,8243.77344,0.0,7826.571264,7735.225344,s,10,3.4801188964843752,0.34801188964843754,0.0007169999590072981,0.3478880157470703,0.348765234375,0.34899913330078125,0.3491862524414062,"[0.34772744750976564, 0.34717376708984377, 0.347068603515625, 0.348048583984375, 0.34761251831054685, 0.34860671997070314, 0.3472724914550781, 0.3487132568359375, 0.3486624755859375, 0.3492330322265625]",tokens/s,735.6070514102602,kWh,1.0181624958047789e-05,1.1228344657361814e-06,6.777744885792372e-06,1.808220430957634e-05,tokens/kWh,14157565.948108567,MB,2835.095552,8285.71648,0.0,7868.514304,7758.594048,s,10,25.94851904296875,2.594851904296875,0.005046201768738657,2.5936531982421878,2.6015237548828125,2.601653723144531,2.6017576977539063,"[2.58699609375, 2.58734228515625, 2.593427001953125, 2.5924814453125, 2.59387939453125, 2.593269287109375, 2.599162841796875, 2.59868212890625, 2.60178369140625, 2.601494873046875]",tokens/s,24.278842232066058,kWh,7.607375449403551e-05,8.391069024112311e-06,5.0603801019408636e-05,0.00013506862453755648,tokens/kWh,466429.5665680859,,s,630,25.94561858367922,0.041183521561395556,0.00033474247518787295,0.04117406463623047,0.04162982749938965,0.041736821937561036,0.04188318634033203,"[0.041609119415283204, 0.04080239868164062, 0.04111769485473633, 0.04093753433227539, 0.040914878845214844, 0.04085760116577149, 0.04088639831542969, 0.040648128509521486, 0.04076512145996094, 0.04074883270263672, 0.04055721664428711, 0.040546112060546875, 0.04079257583618164, 0.04065689468383789, 0.04081856155395508, 0.04080038452148437, 0.04079206466674805, 0.040634174346923825, 0.04055878448486328, 0.04052377700805664, 0.040697856903076174, 0.04067558288574219, 0.04065459060668945, 0.040531967163085936, 0.0407347183227539, 0.040820735931396485, 0.04077568054199219, 0.04072447967529297, 0.04077888107299805, 0.04075158309936523, 0.0410557746887207, 0.04111967849731445, 0.04099168014526367, 0.0409989128112793, 0.041414657592773435, 0.04140812683105469, 0.04124710464477539, 0.04147932815551758, 0.04144828796386719, 0.04154304122924805, 0.04126988983154297, 0.04120371246337891, 0.041266239166259766, 0.04136236953735352, 0.04128531265258789, 0.041031009674072264, 0.041247711181640626, 0.04125696182250976, 0.04118937683105469, 0.04108902359008789, 0.041264801025390624, 0.0412163200378418, 0.04101839828491211, 0.041124927520751954, 0.04124585723876953, 0.041126686096191405, 0.04158054351806641, 0.041670654296875, 0.04169113540649414, 0.04158796691894531, 0.041767681121826175, 0.04175667190551758, 0.0416228141784668, 0.04147814559936523, 0.04082451248168945, 0.040854942321777346, 0.04077865600585937, 0.040853374481201174, 0.040845569610595704, 0.040782752990722655, 0.040580062866210936, 0.04049932861328125, 0.04033932876586914, 0.04044156646728515, 0.0406080322265625, 0.04065280151367188, 0.040654880523681644, 0.04092720031738281, 0.04097433471679687, 0.04080230331420898, 0.040621822357177734, 0.04057040023803711, 0.04043747329711914, 0.04054121780395508, 0.04066300964355469, 0.040673152923583984, 0.040968318939208985, 0.041285472869873045, 0.04134931182861328, 0.041361377716064456, 0.04122828674316406, 0.041705215454101566, 0.04161151885986328, 0.04165820693969727, 0.041087390899658204, 0.04114611053466797, 0.041121761322021486, 0.041121822357177734, 0.041244384765625, 0.04121219253540039, 0.04105363082885742, 0.04114694213867188, 0.041309696197509765, 0.04123846435546875, 0.041080543518066406, 0.04086870574951172, 0.04070518493652344, 0.040733470916748046, 0.040752670288085935, 0.04093596649169922, 0.040937473297119144, 0.04095180892944336, 0.04112303924560547, 0.04153833770751953, 0.04155187225341797, 0.041908222198486327, 0.041739742279052736, 0.04184288024902344, 0.041347423553466794, 0.04143212890625, 0.0412927360534668, 0.0414060173034668, 0.04129942321777344, 0.041477088928222654, 0.041432670593261715, 0.04143503952026367, 0.04136220932006836, 0.041121280670166016, 0.04141107177734375, 0.04122796630859375, 0.04149689483642578, 0.04126924896240235, 0.04129916763305664, 0.041020191192626954, 0.04107059097290039, 0.04102963256835938, 0.04106444931030274, 0.04122175979614258, 0.04107516860961914, 0.041250720977783206, 0.04124671936035156, 0.04132153701782226, 0.04102560043334961, 0.04083123016357422, 0.04061452865600586, 0.04058726501464844, 0.04090195083618164, 0.04068207931518555, 0.04062019348144531, 0.04076332855224609, 0.04081782531738281, 0.04076835250854492, 0.04079158401489258, 0.040844993591308595, 0.040864543914794924, 0.04085760116577149, 0.04088140869140625, 0.04094028854370117, 0.040904705047607424, 0.041131935119628905, 0.04090233612060547, 0.040933441162109375, 0.041209598541259766, 0.041149024963378904, 0.04148633575439453, 0.04148633575439453, 0.04180275344848633, 0.0417507209777832, 0.041530174255371095, 0.04140236663818359, 0.041215999603271485, 0.04131244659423828, 0.04124764633178711, 0.041271232604980466, 0.041343647003173827, 0.0412020492553711, 0.04132447814941406, 0.04127334213256836, 0.04135465621948242, 0.04132515335083008, 0.041193473815917966, 0.04128153610229492, 0.041267200469970705, 0.041132030487060545, 0.04114636611938476, 0.04130416107177735, 0.04131011199951172, 0.04166041564941406, 0.04197299194335938, 0.041488384246826174, 0.0411297607421875, 0.040984798431396484, 0.04094771194458008, 0.040937473297119144, 0.04098867034912109, 0.04095180892944336, 0.040925182342529294, 0.0408941764831543, 0.040873695373535156, 0.0406308479309082, 0.04047257614135742, 0.04096614456176758, 0.04094771194458008, 0.04132812881469727, 0.040876544952392575, 0.04084735870361328, 0.040664257049560545, 0.04055868911743164, 0.040701759338378905, 0.04067324829101562, 0.040582080841064454, 0.04051148986816406, 0.04048880004882813, 0.04058537673950195, 0.04078387069702148, 0.04110540771484375, 0.04113817596435547, 0.04139212799072266, 0.041403873443603516, 0.04137628936767578, 0.04175465774536133, 0.04162761688232422, 0.041430782318115235, 0.04147577667236328, 0.04121222305297852, 0.04114051055908203, 0.04148630523681641, 0.04139548873901367, 0.04151174545288086, 0.04134288024902344, 0.041242366790771486, 0.04111999893188477, 0.041134078979492186, 0.0409989128112793, 0.04101939010620117, 0.04099283218383789, 0.04105612945556641, 0.04097548675537109, 0.04084339141845703, 0.04105436706542969, 0.04153539276123047, 0.04154345703125, 0.04141308975219726, 0.04178947067260742, 0.041645950317382815, 0.041708255767822264, 0.0414964485168457, 0.04163187026977539, 0.041642974853515625, 0.041460254669189456, 0.04164614486694336, 0.04171401596069336, 0.04154735946655273, 0.0411693115234375, 0.041163776397705076, 0.041003711700439455, 0.040866111755371096, 0.04104806518554688, 0.040992767333984374, 0.041092384338378904, 0.04105046463012695, 0.041320831298828124, 0.041078048706054686, 0.04107894515991211, 0.04111977767944336, 0.041691680908203126, 0.04141788864135742, 0.04140745544433594, 0.04113996887207031, 0.04107276916503906, 0.040925182342529294, 0.04087782287597656, 0.04081689453125, 0.0409804801940918, 0.04101740646362305, 0.04090678405761719, 0.040685470581054685, 0.04069375991821289, 0.040667137145996096, 0.040515583038330076, 0.040622081756591794, 0.040799713134765624, 0.04078985595703125, 0.04077571105957031, 0.040783935546875, 0.0408704948425293, 0.04086175918579102, 0.04099238586425781, 0.041142398834228516, 0.04159932708740234, 0.0417872314453125, 0.041662464141845705, 0.04154998397827148, 0.04155148696899414, 0.04131248092651367, 0.04167209625244141, 0.04155446243286133, 0.04155158233642578, 0.04152902221679688, 0.04125724792480469, 0.0412248649597168, 0.04125062561035156, 0.04117084884643555, 0.04097228622436523, 0.041002529144287106, 0.040960479736328125, 0.04101939010620117, 0.04104191970825195, 0.04100409698486328, 0.04144643020629883, 0.04169516754150391, 0.041625057220458984, 0.04164998245239258, 0.041704128265380856, 0.04182128143310547, 0.04150067138671875, 0.04111769485473633, 0.041125408172607424, 0.040855777740478515, 0.040712448120117185, 0.040837120056152344, 0.04088332748413086, 0.04078681564331055, 0.04067062377929687, 0.04050198364257813, 0.040427391052246096, 0.04062822341918945, 0.040622081756591794, 0.040959999084472655, 0.04106444931030274, 0.04100716781616211, 0.04091897583007813, 0.04084531021118164, 0.040939231872558594, 0.04124496078491211, 0.04157030487060547, 0.04144947052001953, 0.04128911972045898, 0.04124323272705078, 0.04096819305419922, 0.04149238586425781, 0.04133827209472656, 0.04112044906616211, 0.04129177474975586, 0.04137750244140625, 0.04114662551879883, 0.041135616302490234, 0.04109161758422852, 0.040987712860107425, 0.04110841751098633, 0.04118310546875, 0.041291519165039064, 0.041258399963378906, 0.041231327056884766, 0.04111148834228515, 0.04103548812866211, 0.04092963027954102, 0.04095590209960937, 0.04093132781982422, 0.04099071884155273, 0.0412789421081543, 0.04118377685546875, 0.041132030487060545, 0.04146585464477539, 0.04145500946044922, 0.04187993621826172, 0.04174204635620117, 0.04152166366577149, 0.04138598251342773, 0.04153343963623047, 0.041373695373535156, 0.041383167266845704, 0.041306686401367185, 0.04144508743286133, 0.04144124984741211, 0.04143158340454101, 0.04143465423583984, 0.041417152404785156, 0.041578495025634765, 0.04134624099731445, 0.04148307037353516, 0.04127743911743164, 0.04119043350219727, 0.041210399627685544, 0.04114195251464844, 0.04103200149536133, 0.041054656982421875, 0.0410145263671875, 0.0409911994934082, 0.04101763153076172, 0.04123551940917969, 0.04142176055908203, 0.041367263793945314, 0.041047519683837894, 0.04112636947631836, 0.04106588745117187, 0.04090771102905273, 0.0409989128112793, 0.04076544189453125, 0.04073267364501953, 0.040744991302490235, 0.04076950454711914, 0.040809825897216795, 0.04075987243652344, 0.04076553726196289, 0.040903743743896485, 0.04101388931274414, 0.04093939208984375, 0.041013694763183596, 0.040978431701660156, 0.04137884902954102, 0.0414238395690918, 0.04131804656982422, 0.04185327911376953, 0.041799678802490234, 0.04161049652099609, 0.04151958465576172, 0.041500961303710934, 0.0415695686340332, 0.0413191032409668, 0.04134096145629883, 0.041250816345214845, 0.0411317138671875, 0.041226558685302735, 0.04131945419311524, 0.04115897750854492, 0.04126508712768555, 0.04124131011962891, 0.04112384033203125, 0.04120576095581055, 0.04127132797241211, 0.04125049591064453, 0.04155625534057617, 0.04162883377075195, 0.04148310470581055, 0.041603103637695316, 0.04167472076416016, 0.04173932647705078, 0.041839393615722656, 0.04173376083374023, 0.04183113479614258, 0.04162432098388672, 0.04109737777709961, 0.04126416015625, 0.041227073669433595, 0.041191585540771486, 0.0409989128112793, 0.040853504180908204, 0.040830463409423826, 0.04119305419921875, 0.04102598571777344, 0.04104240036010742, 0.041258174896240236, 0.0414666862487793, 0.041320480346679685, 0.041398273468017575, 0.04128716659545899, 0.04126118469238281, 0.04100540924072266, 0.040906944274902345, 0.040916801452636715, 0.04090662384033203, 0.041021503448486325, 0.041073726654052733, 0.04089929580688476, 0.04078140640258789, 0.0407599983215332, 0.040767040252685544, 0.040833473205566406, 0.04072243118286133, 0.04083241653442383, 0.0408276481628418, 0.04080009460449219, 0.04083200073242187, 0.0418007698059082, 0.04104995346069336, 0.041455711364746094, 0.04182425689697265, 0.04166041564941406, 0.04182636642456055, 0.04159606552124023, 0.041508705139160156, 0.041512126922607424, 0.04148524856567383, 0.04128992080688477, 0.04153203201293945, 0.04150067138671875, 0.04139212799072266, 0.04133020782470703, 0.04131683349609375, 0.04119356918334961, 0.041170303344726565, 0.04111414337158203, 0.0409804801940918, 0.04100096130371094, 0.041132030487060545, 0.041560062408447264, 0.04143513488769531, 0.04154777526855469, 0.041578590393066404, 0.04141660690307617, 0.041976959228515624, 0.04229123306274414, 0.041683616638183596, 0.04160204696655274, 0.04088435363769531, 0.04086739349365234, 0.040982463836669925, 0.04114387130737305, 0.04105503845214844, 0.04112384033203125, 0.04121734237670899, 0.0412474250793457, 0.04103724670410156, 0.041662208557128905, 0.041068672180175785, 0.04109587097167969, 0.041336830139160154, 0.041490528106689455, 0.041629600524902347, 0.04145971298217774, 0.04136707305908203, 0.041118175506591796, 0.04114960098266601, 0.04111222457885742, 0.04117728042602539, 0.041186817169189455, 0.041124351501464845, 0.04113817596435547, 0.04098787307739258, 0.04085561752319336, 0.04080297470092773, 0.04080441665649414, 0.04084726333618164, 0.04088387298583984, 0.04090105438232422, 0.04082284927368164, 0.041113536834716795, 0.0413983039855957, 0.04144124984741211, 0.041438846588134765, 0.04182668685913086, 0.04162169647216797, 0.04148937606811524, 0.04188451385498047, 0.04172102355957031, 0.04149945449829102, 0.041289726257324216, 0.04154697418212891, 0.04158969497680664, 0.04156598281860351, 0.04135737609863281, 0.04136483383178711, 0.04136207962036133, 0.04128099060058594, 0.04099331283569336, 0.041183231353759765, 0.04120182418823242, 0.04122198486328125, 0.04148739242553711, 0.041511390686035154, 0.04160768127441406, 0.041645729064941406, 0.04145100784301758, 0.041683456420898435, 0.041726303100585935, 0.041809310913085936, 0.041771934509277346, 0.04145356750488281, 0.041573535919189455, 0.041374561309814456, 0.04097433471679687, 0.04113011169433594, 0.041000831604003904, 0.04095590209960937, 0.04098867034912109, 0.04102348709106445, 0.04110153579711914, 0.041002784729003906, 0.04105830383300781, 0.04154982376098633, 0.041381057739257814, 0.04130489730834961, 0.0410478401184082, 0.04091516876220703, 0.04112998580932617, 0.04109449768066406, 0.041357982635498045, 0.04094976043701172, 0.04086579132080078, 0.040959999084472655, 0.04107167816162109, 0.04094003295898437, 0.04095391845703125, 0.040736286163330075, 0.04078396987915039, 0.040774398803710935, 0.040755199432373046, 0.04089023971557617, 0.04132473754882812, 0.04138387298583984, 0.04126224136352539, 0.04151792144775391, 0.04167667388916016, 0.041846977233886716, 0.041662303924560544, 0.04145724868774414, 0.041511425018310545, 0.04130758285522461, 0.04147011184692383, 0.041492897033691405, 0.04152265548706055, 0.041275264739990235, 0.04134515380859375, 0.041345569610595705, 0.041289726257324216, 0.04131174468994141, 0.041236286163330076, 0.04111849594116211, 0.04151295852661133, 0.04159888076782227, 0.0413935661315918, 0.04165897750854492, 0.04163520050048828, 0.04162009429931641, 0.041796993255615235, 0.041330944061279296, 0.04188800048828125, 0.04165439987182617, 0.041885696411132815]",tokens/s,24.281556362517964,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1388.331008,1490.944,0.0,1088.421888,1083.532288,s,1,8.866751953125,8.866751953125,0.0,8.866751953125,8.866751953125,8.866751953125,8.866751953125,[8.866751953125],,kWh,3.712971701250656e-05,4.088070360566613e-06,1.0539175097995512e-05,5.175696247106869e-05,,MB,1494.925312,1608.384512,0.0,1191.182336,1163.39968,s,10,0.4624177589416504,0.04624177589416504,0.00014340797535724757,0.04619683074951172,0.04647196006774902,0.04649380397796631,0.04651127910614014,"[0.04646710586547852, 0.046110240936279294, 0.04608713531494141, 0.046515647888183596, 0.04620003128051758, 0.04610873413085938, 0.04621900939941406, 0.04619363021850586, 0.046347137451171874, 0.04616908645629883]",tokens/s,5536.119559636182,kWh,1.3846145214060558e-06,1.5269729859713205e-07,9.154364880094702e-07,2.4527483080126582e-06,tokens/kWh,104372714.95150852,MB,1498.845184,1650.327552,0.0,1233.125376,1163.40224,s,10,12.103496582031248,1.2103496582031248,0.012407942718434522,1.2047265014648438,1.2240706298828126,1.2293207763671874,1.2335208935546875,"[1.1987220458984376, 1.202774658203125, 1.2226396484375, 1.2179346923828125, 1.1995107421875, 1.1986546630859376, 1.19910693359375, 1.2066783447265625, 1.2345709228515624, 1.2229039306640626]",tokens/s,52.051074309823235,kWh,3.475991765817438e-05,3.833569158979199e-06,1.6180049410591356e-05,5.4773536227744925e-05,tokens/kWh,1150190.48136768,,s,630,12.101098878860455,0.01920809345850869,0.0004978213486695474,0.019046671867370607,0.01975124111175537,0.019903335571289062,0.021250968952178956,"[0.019033567428588867, 0.018886911392211915, 0.01884160041809082, 0.018824607849121093, 0.018878143310546876, 0.0188056640625, 0.018736928939819337, 0.018779455184936525, 0.019138816833496095, 0.018954912185668946, 0.018894847869873048, 0.018929664611816405, 0.01900339126586914, 0.01878835105895996, 0.019333120346069335, 0.019369983673095705, 0.019099647521972657, 0.018984960556030273, 0.018931711196899414, 0.018997247695922852, 0.018970624923706055, 0.01887843132019043, 0.018944032669067384, 0.018845216751098633, 0.018766304016113282, 0.018896896362304686, 0.018836992263793945, 0.018901504516601563, 0.018761728286743166, 0.01884979248046875, 0.019109888076782225, 0.0188939208984375, 0.01903094482421875, 0.023848447799682617, 0.018981216430664062, 0.018872032165527342, 0.019767744064331055, 0.01884886360168457, 0.018923519134521484, 0.018918304443359374, 0.018882560729980468, 0.018784255981445314, 0.018728960037231446, 0.018802303314208985, 0.018768255233764648, 0.01881065559387207, 0.018918655395507813, 0.018936416625976563, 0.018856319427490234, 0.018882335662841795, 0.01885206413269043, 0.0188538875579834, 0.019105951309204103, 0.01913430404663086, 0.019187583923339843, 0.01896771240234375, 0.018901311874389648, 0.01887846374511719, 0.018944671630859375, 0.018895008087158202, 0.019070560455322266, 0.019177728652954102, 0.019120128631591796, 0.018962751388549803, 0.018882560729980468, 0.018834688186645507, 0.01876643180847168, 0.019085248947143554, 0.018929887771606445, 0.018937856674194335, 0.01885215950012207, 0.019055839538574218, 0.019424896240234375, 0.019682111740112303, 0.019644447326660156, 0.01966694450378418, 0.019329023361206055, 0.019199104309082032, 0.019028255462646484, 0.019030624389648438, 0.01899519920349121, 0.019302400588989257, 0.019167232513427734, 0.019331071853637697, 0.021564992904663086, 0.020840896606445312, 0.01907711982727051, 0.018990463256835937, 0.01891596794128418, 0.01889039993286133, 0.018732959747314454, 0.018788799285888672, 0.018823392868041994, 0.01882521629333496, 0.01937385559082031, 0.019302112579345703, 0.01933679962158203, 0.01938025665283203, 0.019337791442871094, 0.019128320693969726, 0.01899020767211914, 0.018949087142944337, 0.018840927124023438, 0.018948768615722655, 0.01901568031311035, 0.01893168067932129, 0.01910108757019043, 0.019006080627441406, 0.019322879791259767, 0.01880169677734375, 0.01893475151062012, 0.018784128189086913, 0.01926521682739258, 0.018799039840698244, 0.018812095642089844, 0.01876665687561035, 0.018714399337768556, 0.018784479141235353, 0.018608160018920898, 0.018656320571899414, 0.01879952049255371, 0.01898700714111328, 0.01883907127380371, 0.01879033660888672, 0.018808544158935545, 0.018887487411499024, 0.019259456634521485, 0.018963008880615233, 0.018835519790649412, 0.018932992935180665, 0.018772415161132813, 0.018934335708618164, 0.018871423721313476, 0.01894259262084961, 0.01883340835571289, 0.01916035270690918, 0.021354623794555664, 0.02022422409057617, 0.019078624725341796, 0.019066911697387695, 0.019011903762817382, 0.018934335708618164, 0.018875904083251953, 0.01915881538391113, 0.019079904556274414, 0.019113983154296875, 0.019249311447143556, 0.01908515167236328, 0.019105791091918945, 0.019606943130493163, 0.01975356864929199, 0.020174848556518556, 0.01906073570251465, 0.018970752716064455, 0.018881471633911132, 0.019065120697021484, 0.019187871932983398, 0.018991615295410155, 0.018955968856811525, 0.019079231262207032, 0.018929920196533202, 0.018944000244140623, 0.01927779197692871, 0.01942927932739258, 0.019136640548706056, 0.019134111404418945, 0.019317087173461915, 0.01946419143676758, 0.019562400817871094, 0.02024185562133789, 0.0197589111328125, 0.019810527801513673, 0.01950783920288086, 0.019598655700683594, 0.01963235282897949, 0.019744863510131837, 0.02003596878051758, 0.020268543243408203, 0.019679744720458983, 0.01978268814086914, 0.019737567901611328, 0.01966694450378418, 0.019505151748657225, 0.019971391677856446, 0.019794464111328125, 0.019851200103759764, 0.019755231857299806, 0.019714271545410156, 0.020418336868286133, 0.02037615966796875, 0.019840831756591796, 0.019701631546020507, 0.019892255783081056, 0.019670175552368163, 0.019758207321166992, 0.01995952033996582, 0.019759231567382813, 0.01963609504699707, 0.01970796775817871, 0.019688383102416992, 0.01957321548461914, 0.019357791900634767, 0.01933273506164551, 0.019149280548095702, 0.019116384506225586, 0.019178495407104493, 0.019573631286621093, 0.01944937515258789, 0.019679519653320314, 0.01935174369812012, 0.019751039505004883, 0.019290016174316405, 0.01960540771484375, 0.019156480789184572, 0.019163776397705078, 0.018974752426147462, 0.018939903259277344, 0.01888447952270508, 0.01921161651611328, 0.01889142417907715, 0.01886755180358887, 0.01880966377258301, 0.01877996826171875, 0.018804256439208984, 0.018948991775512694, 0.018855712890625, 0.019212287902832033, 0.019025312423706055, 0.019057247161865236, 0.019055648803710936, 0.019012575149536134, 0.01945395278930664, 0.018997247695922852, 0.018865440368652345, 0.018893184661865233, 0.01898736000061035, 0.018972671508789063, 0.0200677433013916, 0.018972864151000978, 0.018803104400634766, 0.019618944168090822, 0.023224319458007812, 0.01932342338562012, 0.019087423324584962, 0.019056928634643554, 0.01925324821472168, 0.01909667205810547, 0.019067808151245116, 0.018998687744140624, 0.019007232666015624, 0.019036447525024414, 0.018898687362670898, 0.0191279354095459, 0.019044736862182617, 0.018782112121582033, 0.018965984344482423, 0.018928255081176758, 0.018957887649536133, 0.018914911270141603, 0.019057504653930663, 0.019703807830810546, 0.0190250244140625, 0.019702335357666016, 0.018972991943359375, 0.019079168319702147, 0.019144256591796874, 0.018852256774902345, 0.01910153579711914, 0.019038400650024413, 0.01915216064453125, 0.019013599395751955, 0.018832128524780275, 0.01896380805969238, 0.01910598373413086, 0.019132896423339842, 0.01940275192260742, 0.01891433525085449, 0.018932607650756834, 0.018931520462036132, 0.01966441535949707, 0.01886031913757324, 0.018964960098266603, 0.018851839065551757, 0.01884160041809082, 0.018867519378662108, 0.01901638412475586, 0.01883750343322754, 0.01909926414489746, 0.018730560302734376, 0.018860544204711914, 0.019103872299194337, 0.02174995231628418, 0.019122175216674805, 0.018943519592285157, 0.018860416412353517, 0.019135616302490235, 0.018809600830078124, 0.018815231323242188, 0.01874940872192383, 0.018847391128540038, 0.018749792098999022, 0.01884160041809082, 0.018748544692993165, 0.01883977508544922, 0.0186845760345459, 0.018884191513061522, 0.019013343811035158, 0.018893472671508788, 0.018890783309936522, 0.01883750343322754, 0.01892492866516113, 0.019560543060302735, 0.01869878387451172, 0.019353599548339845, 0.018875839233398438, 0.01916841506958008, 0.01897875213623047, 0.018891679763793946, 0.01886617660522461, 0.018969823837280273, 0.018926368713378907, 0.018747392654418944, 0.018734912872314453, 0.018837696075439454, 0.01885593605041504, 0.018917375564575196, 0.01960323143005371, 0.019564767837524415, 0.019490816116333007, 0.019386463165283203, 0.01931452751159668, 0.019111520767211915, 0.01904483222961426, 0.019166751861572264, 0.019083295822143555, 0.01900998306274414, 0.01899648094177246, 0.019138591766357422, 0.018985408782958985, 0.018811168670654296, 0.018931711196899414, 0.018900991439819336, 0.018931711196899414, 0.019148351669311524, 0.01903455924987793, 0.01910163116455078, 0.019103071212768555, 0.01920796775817871, 0.01891219139099121, 0.018950143814086915, 0.018911104202270507, 0.0189703369140625, 0.01883513641357422, 0.01884659194946289, 0.01887321662902832, 0.018867168426513672, 0.018790399551391602, 0.01886207962036133, 0.018890592575073244, 0.018827423095703125, 0.018892576217651367, 0.018936031341552733, 0.018790239334106444, 0.019705759048461915, 0.018820575714111328, 0.018936607360839845, 0.019349023818969725, 0.019055007934570312, 0.01899910354614258, 0.01897065544128418, 0.019169504165649415, 0.019068544387817382, 0.019169536590576172, 0.019218015670776366, 0.019020320892333985, 0.01898700714111328, 0.018920480728149416, 0.018940223693847656, 0.019164928436279295, 0.018833471298217774, 0.020090368270874022, 0.01897542381286621, 0.018814399719238283, 0.018945823669433592, 0.018819967269897462, 0.018939807891845704, 0.018898944854736328, 0.018760000228881836, 0.01886934471130371, 0.01895484733581543, 0.020494335174560546, 0.018896032333374023, 0.01889161682128906, 0.01884876823425293, 0.01883235168457031, 0.01887843132019043, 0.01889900779724121, 0.01914678382873535, 0.018831327438354493, 0.0188723201751709, 0.018753536224365236, 0.01883145523071289, 0.018793920516967773, 0.018834943771362304, 0.01907161521911621, 0.021256351470947267, 0.018964672088623048, 0.018948095321655273, 0.018841472625732422, 0.01885116767883301, 0.01884422492980957, 0.01880086326599121, 0.018734304428100586, 0.01953977584838867, 0.018797536849975587, 0.018806943893432616, 0.019102943420410155, 0.019044960021972656, 0.0187675838470459, 0.01897270393371582, 0.019274015426635743, 0.019262624740600587, 0.018999679565429688, 0.018923391342163087, 0.018815584182739258, 0.018851839065551757, 0.01884569549560547, 0.01882486343383789, 0.01880918312072754, 0.019064319610595702, 0.018850303649902343, 0.01894588851928711, 0.018765247344970704, 0.018937759399414063, 0.01894688034057617, 0.019693567276000978, 0.019582975387573243, 0.01908857536315918, 0.01897881507873535, 0.019263967514038086, 0.018956640243530273, 0.019173023223876953, 0.01879484748840332, 0.01875299263000488, 0.018782751083374023, 0.021237791061401366, 0.019061887741088867, 0.019055456161499024, 0.018788415908813475, 0.018880319595336915, 0.018860288619995117, 0.018950016021728515, 0.018737152099609376, 0.018882144927978517, 0.01888092803955078, 0.018792448043823243, 0.018861568450927735, 0.018844032287597658, 0.01883558464050293, 0.018833215713500977, 0.01911827278137207, 0.018886592864990233, 0.01894406318664551, 0.01966249656677246, 0.01922470474243164, 0.019622112274169923, 0.01979801559448242, 0.019986431121826173, 0.019368032455444335, 0.019174560546875, 0.019548831939697267, 0.019146015167236328, 0.019001535415649414, 0.01901590347290039, 0.0196011848449707, 0.02071206474304199, 0.019333120346069335, 0.01902704048156738, 0.019424160003662108, 0.01906892776489258, 0.019054592132568358, 0.018974367141723632, 0.018951583862304687, 0.019046783447265625, 0.01928771209716797, 0.01902003288269043, 0.018876224517822265, 0.01892848014831543, 0.019095392227172853, 0.01894326400756836, 0.019241247177124023, 0.018901599884033202, 0.018839391708374023, 0.018837696075439454, 0.01900889587402344, 0.01915555191040039, 0.019264991760253907, 0.018917984008789062, 0.01897212791442871, 0.019425760269165038, 0.01936764717102051, 0.019175487518310545, 0.019183839797973633, 0.019209632873535155, 0.020147071838378907, 0.019528799057006836, 0.01938115119934082, 0.019222368240356447, 0.019256895065307617, 0.0217872314453125, 0.01940617561340332, 0.019952064514160157, 0.01978390312194824, 0.020440639495849608, 0.019597312927246095, 0.019528127670288085, 0.019625312805175783, 0.020193599700927736, 0.019800384521484374, 0.019855295181274414, 0.01991075134277344, 0.019894271850585937, 0.019744768142700195, 0.01965670394897461, 0.019687423706054686, 0.019668127059936525, 0.01972719955444336, 0.01963113594055176, 0.019880928039550782, 0.01987583923339844, 0.019822208404541016, 0.019667392730712892, 0.019853248596191406, 0.019920896530151368, 0.01960531234741211, 0.019699071884155272, 0.019692384719848632, 0.019668928146362306, 0.020068384170532225, 0.019615743637084963, 0.01970790481567383, 0.01971993637084961, 0.01965286445617676, 0.01948041534423828, 0.019415199279785158, 0.019361888885498047, 0.01958684730529785, 0.019523296356201172, 0.0196014404296875, 0.01937846374511719, 0.019395904541015627, 0.019309215545654297, 0.019421312332153322, 0.019346559524536133, 0.019433631896972656, 0.019249887466430665, 0.019046560287475586, 0.019023712158203126, 0.018976415634155273, 0.019200096130371092, 0.018950399398803712, 0.01902387237548828, 0.01907302474975586, 0.019286016464233398, 0.01922662353515625, 0.019064640045166014, 0.0191080322265625, 0.01966636848449707, 0.01960416030883789, 0.019361791610717775, 0.01941868782043457, 0.019349952697753907, 0.019670944213867187, 0.01972185516357422, 0.019646944046020506, 0.0196177921295166, 0.019647615432739258, 0.01991881561279297, 0.019817279815673827, 0.019663999557495117, 0.019712480545043945, 0.01986307144165039, 0.019858400344848634, 0.019810304641723633, 0.019705087661743163, 0.019714815139770508, 0.019873600006103515, 0.02008291244506836, 0.019767263412475585, 0.01967840003967285, 0.019444511413574218, 0.019625919342041016, 0.019753055572509767, 0.019594688415527344, 0.01938489532470703, 0.019316192626953124, 0.01930294418334961, 0.01957811164855957, 0.019298656463623047, 0.019421600341796876, 0.01920614433288574, 0.01924710464477539, 0.019138559341430664, 0.01915014457702637, 0.01919046401977539, 0.019134143829345703, 0.019770944595336914, 0.019237119674682616, 0.019042816162109375, 0.018976768493652343, 0.018894847869873048, 0.018964479446411133, 0.018997024536132813, 0.01903379249572754, 0.018948640823364258, 0.018914688110351564, 0.018944639205932617, 0.01923276710510254, 0.019647615432739258, 0.01939955139160156, 0.019156991958618166, 0.019254848480224608, 0.019091583251953124, 0.018990976333618164, 0.018936256408691406, 0.019029312133789063, 0.01883001518249512, 0.018884607315063476, 0.01976038360595703, 0.019768064498901367]",tokens/s,52.06138767286276,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1410.506752,1946.025984,0.0,1543.503872,1510.249472,s,1,8.7410625,8.7410625,0.0,8.7410625,8.7410625,8.7410625,8.7410625,[8.7410625],,kWh,4.418543459166055e-05,4.863981115961554e-06,1.3188343884001918e-05,6.223775959162402e-05,,MB,1402.912768,2021.523456,0.0,1604.32128,1585.251328,s,10,0.42500700378417966,0.042500700378417966,0.0005578459025875528,0.04225854301452637,0.04327015838623047,0.04359379844665527,0.04385271049499512,"[0.04319823837280273, 0.043917438507080075, 0.04213423919677734, 0.042156417846679686, 0.04225151824951172, 0.04240150451660156, 0.04231875228881836, 0.04226556777954102, 0.04215430450439453, 0.04220902252197266]",tokens/s,6023.430148694629,kWh,1.2722033957246149e-06,1.403012353954741e-07,8.470731414260835e-07,2.2595777725461725e-06,tokens/kWh,113295502.86358592,MB,1428.164608,2084.438016,0.0,1667.23584,1589.165568,s,10,11.146312988281249,1.1146312988281248,0.0071437546207716454,1.1135956420898436,1.125074658203125,1.1257984374999999,1.1263774609375,"[1.126522216796875, 1.124913818359375, 1.1041187744140626, 1.111924072265625, 1.105998779296875, 1.1152672119140625, 1.1094910888671874, 1.11149951171875, 1.1163577880859374, 1.1202197265625]",tokens/s,56.52093213804015,kWh,3.226157929052351e-05,3.5579828794126756e-06,1.60256348011754e-05,5.1845196971111584e-05,tokens/kWh,1215155.9581325138,,s,630,11.144129419326799,0.01768909431639172,0.0005342182477299729,0.017548480033874513,0.01808710079193115,0.018285356426239012,0.020533974971771243,"[0.01805548858642578, 0.017972639083862305, 0.01756220817565918, 0.01761907196044922, 0.019945344924926757, 0.01927987289428711, 0.017630271911621094, 0.017781343460083008, 0.01775446319580078, 0.017571840286254883, 0.017497663497924806, 0.017400255203247072, 0.018167808532714845, 0.017685535430908204, 0.01772438430786133, 0.017721311569213867, 0.018224224090576172, 0.01808675193786621, 0.018351232528686524, 0.017743967056274415, 0.017570688247680665, 0.01762099266052246, 0.017596000671386718, 0.017575679779052736, 0.01765033531188965, 0.017517568588256836, 0.0174202880859375, 0.017660255432128905, 0.017489664077758788, 0.017501087188720704, 0.017526784896850587, 0.017506399154663087, 0.017481632232666015, 0.018173952102661133, 0.017879039764404296, 0.01782975959777832, 0.017918079376220704, 0.017580032348632812, 0.01777459144592285, 0.017624351501464845, 0.017893503189086914, 0.017889888763427734, 0.017661951065063478, 0.017678335189819337, 0.017758207321166994, 0.018317312240600587, 0.017779712677001954, 0.01769327926635742, 0.017571840286254883, 0.017650079727172852, 0.017559776306152342, 0.01762678337097168, 0.0177675838470459, 0.017496959686279297, 0.017581663131713866, 0.017766912460327147, 0.017662208557128908, 0.017985279083251954, 0.018743295669555664, 0.01818623924255371, 0.018124799728393554, 0.018350015640258788, 0.020799711227416993, 0.018688192367553712, 0.018151391983032228, 0.018182336807250978, 0.01831996726989746, 0.018140928268432617, 0.018090240478515623, 0.018112512588500978, 0.018335744857788085, 0.018272192001342773, 0.018260032653808593, 0.01829612731933594, 0.018246335983276366, 0.01818009567260742, 0.0182108154296875, 0.018196063995361327, 0.018155935287475587, 0.01798963165283203, 0.018187904357910158, 0.01804707145690918, 0.01785856056213379, 0.017856800079345703, 0.017788415908813478, 0.01788569641113281, 0.01797865676879883, 0.01776508712768555, 0.01763667106628418, 0.01779782485961914, 0.017548511505126953, 0.017529632568359373, 0.01754521560668945, 0.017467391967773437, 0.017952768325805665, 0.017590272903442384, 0.01762483215332031, 0.01775027275085449, 0.018034719467163087, 0.017618431091308593, 0.017580799102783203, 0.01750396728515625, 0.01747052764892578, 0.017464096069335938, 0.0174370231628418, 0.01745414352416992, 0.01742742347717285, 0.017377056121826173, 0.017475423812866212, 0.017408159255981444, 0.01741209602355957, 0.01755340766906738, 0.01765551948547363, 0.017627328872680665, 0.017545024871826173, 0.017440383911132812, 0.017512863159179687, 0.017535232543945314, 0.021753856658935547, 0.01762905693054199, 0.017492095947265626, 0.017715200424194336, 0.01754857635498047, 0.017439071655273437, 0.017453632354736327, 0.017475391387939455, 0.01777689552307129, 0.01756979179382324, 0.017392959594726563, 0.017318592071533204, 0.017452032089233398, 0.01737215995788574, 0.017375232696533204, 0.017343936920166017, 0.017539648056030272, 0.01745305633544922, 0.017344512939453126, 0.017468416213989257, 0.017323007583618166, 0.017528831481933595, 0.017345727920532225, 0.017519424438476563, 0.01737932777404785, 0.017747968673706056, 0.017426431655883787, 0.017457216262817384, 0.017381216049194338, 0.017456607818603517, 0.017369056701660158, 0.01783660888671875, 0.017496416091918945, 0.017579776763916016, 0.0176429443359375, 0.017476160049438475, 0.01742233657836914, 0.017426431655883787, 0.017504255294799806, 0.017352703094482422, 0.017425472259521485, 0.017583040237426757, 0.018966527938842775, 0.01761689567565918, 0.017647455215454102, 0.01749350357055664, 0.017477535247802736, 0.017595136642456054, 0.017434623718261717, 0.01738751983642578, 0.017299455642700197, 0.017280576705932617, 0.017379776000976562, 0.017385471343994142, 0.01743974494934082, 0.017578815460205077, 0.017832191467285156, 0.017825727462768556, 0.01757097625732422, 0.01747427177429199, 0.017533056259155272, 0.017434623718261717, 0.017487232208251952, 0.01761686325073242, 0.017682239532470702, 0.017676576614379883, 0.017609439849853515, 0.017448064804077148, 0.0175164794921875, 0.01759312057495117, 0.017547552108764648, 0.017604799270629884, 0.017502304077148437, 0.017480768203735352, 0.017410400390625, 0.017406560897827147, 0.017492992401123047, 0.017794048309326172, 0.017434623718261717, 0.01743667221069336, 0.017508352279663086, 0.017491968154907226, 0.020322303771972656, 0.017928031921386717, 0.017475103378295897, 0.017457792282104492, 0.018048479080200196, 0.01752931213378906, 0.017680448532104494, 0.017749664306640624, 0.017463647842407226, 0.01799295997619629, 0.017411840438842772, 0.020567039489746093, 0.0177174072265625, 0.0175897274017334, 0.017508319854736328, 0.017365055084228517, 0.017407327651977538, 0.01744179153442383, 0.01742624092102051, 0.017467519760131837, 0.017289119720458983, 0.017481887817382812, 0.017518592834472657, 0.01744486427307129, 0.01744895935058594, 0.017686304092407228, 0.017770719528198243, 0.017674240112304687, 0.017573312759399416, 0.017533376693725587, 0.01794598388671875, 0.017638143539428712, 0.01759814453125, 0.017473535537719728, 0.017443136215209962, 0.017649663925170898, 0.017544384002685546, 0.01748246383666992, 0.017554752349853514, 0.01738534355163574, 0.017488800048828124, 0.01747148895263672, 0.01760051155090332, 0.01760665512084961, 0.017632383346557617, 0.01774425506591797, 0.017474048614501952, 0.017704959869384765, 0.017555456161499023, 0.017469440460205078, 0.017315839767456053, 0.01740595245361328, 0.018654272079467772, 0.017994047164916992, 0.017789567947387695, 0.017726848602294922, 0.01765439987182617, 0.017514495849609374, 0.017423967361450195, 0.01790608024597168, 0.017624256134033203, 0.017824800491333007, 0.017614751815795898, 0.017483455657958984, 0.017574079513549806, 0.017603839874267578, 0.017507072448730468, 0.017496063232421876, 0.017510400772094727, 0.017802303314208984, 0.017910720825195313, 0.01774928092956543, 0.017554143905639648, 0.0174420166015625, 0.0174517765045166, 0.017444896697998046, 0.017463167190551757, 0.017481664657592773, 0.017443008422851562, 0.0174202880859375, 0.01741747283935547, 0.017405792236328124, 0.017531808853149415, 0.017589279174804687, 0.01744380760192871, 0.01760665512084961, 0.017581344604492188, 0.017650400161743164, 0.017571008682250977, 0.017527776718139647, 0.017540960311889647, 0.017528831481933595, 0.017358848571777344, 0.017354623794555664, 0.017337503433227538, 0.01731478309631348, 0.017353792190551758, 0.017329088211059572, 0.017333919525146485, 0.017379680633544923, 0.01754662322998047, 0.01751308822631836, 0.01764761543273926, 0.017528831481933595, 0.01744451141357422, 0.01742473602294922, 0.01780531120300293, 0.017544927597045897, 0.017395999908447264, 0.01770102310180664, 0.01746108818054199, 0.017364992141723632, 0.01740985679626465, 0.017383615493774415, 0.01740185546875, 0.01820057678222656, 0.01785036849975586, 0.01754115104675293, 0.017397727966308594, 0.017485824584960938, 0.01742233657836914, 0.017436063766479493, 0.01749043273925781, 0.01762723159790039, 0.017480928421020506, 0.017478431701660156, 0.017770496368408203, 0.0175548152923584, 0.017563776016235353, 0.017383935928344727, 0.017364992141723632, 0.017344512939453126, 0.01744483184814453, 0.0173404483795166, 0.017293312072753905, 0.017391551971435548, 0.017557567596435546, 0.017559551239013673, 0.01759164810180664, 0.017778495788574218, 0.017580095291137694, 0.017468191146850585, 0.020684799194335936, 0.020056255340576173, 0.019500864028930663, 0.017625087738037108, 0.017519615173339845, 0.01748905563354492, 0.017506111145019532, 0.017545248031616213, 0.01786675262451172, 0.017479679107666016, 0.017442815780639647, 0.01771651268005371, 0.01760927963256836, 0.01804265594482422, 0.017418624877929688, 0.017530879974365234, 0.01745840072631836, 0.018299680709838867, 0.017544704437255858, 0.017553823471069336, 0.01749135971069336, 0.017459903717041016, 0.0176124153137207, 0.01747318458557129, 0.017422752380371095, 0.017557823181152343, 0.017605632781982423, 0.017562623977661132, 0.01764352035522461, 0.018145280838012694, 0.01781760025024414, 0.017681663513183593, 0.017590688705444335, 0.017498111724853514, 0.017627487182617186, 0.017577823638916017, 0.017603424072265624, 0.01742848014831543, 0.01748384094238281, 0.017425600051879882, 0.017469728469848633, 0.017351360321044923, 0.018183712005615235, 0.01758028793334961, 0.017399232864379884, 0.017429056167602538, 0.017338367462158204, 0.017448383331298827, 0.017467775344848634, 0.017489599227905273, 0.01738598442077637, 0.01743657684326172, 0.017546432495117188, 0.017518688201904296, 0.0173920955657959, 0.017514848709106447, 0.01760665512084961, 0.01988812828063965, 0.0192225284576416, 0.017448320388793945, 0.017549503326416017, 0.01750067138671875, 0.01753696060180664, 0.01743052864074707, 0.017377279281616212, 0.017367040634155274, 0.017442815780639647, 0.017303712844848634, 0.017426271438598633, 0.017467647552490233, 0.017503904342651366, 0.017499807357788087, 0.0176495361328125, 0.017526655197143554, 0.01741436767578125, 0.01736163139343262, 0.017618688583374023, 0.017491968154907226, 0.017374975204467773, 0.017534751892089844, 0.017666528701782227, 0.01767328071594238, 0.017585088729858398, 0.017633279800415038, 0.018495487213134765, 0.018341888427734376, 0.017804576873779298, 0.017704927444458007, 0.017566688537597658, 0.01764534378051758, 0.01766364860534668, 0.017657791137695313, 0.017461664199829103, 0.017481472015380858, 0.01751078414916992, 0.01750822448730469, 0.01741414451599121, 0.01754844856262207, 0.01747983932495117, 0.01759516716003418, 0.01739776039123535, 0.017346080780029298, 0.017322399139404296, 0.017365055084228517, 0.01752835273742676, 0.017453535079956055, 0.017657440185546876, 0.017381792068481446, 0.017362943649291994, 0.01771721649169922, 0.017542911529541017, 0.017457439422607423, 0.01764956855773926, 0.018124895095825197, 0.02045302391052246, 0.01769094467163086, 0.0175118408203125, 0.017553024291992188, 0.017527807235717775, 0.01797324752807617, 0.017459199905395507, 0.017475072860717773, 0.017367551803588867, 0.017469440460205078, 0.017417728424072267, 0.017457664489746092, 0.017579839706420897, 0.017823392868041993, 0.01764780807495117, 0.017529087066650392, 0.017418336868286134, 0.01750217628479004, 0.017598495483398438, 0.017513919830322265, 0.017496639251708985, 0.017458688735961913, 0.01731839942932129, 0.0175861759185791, 0.017571840286254883, 0.01815465545654297, 0.017714239120483397, 0.017601791381835936, 0.01747817611694336, 0.018011871337890624, 0.017608800888061524, 0.017723583221435548, 0.018206335067749022, 0.017664159774780273, 0.01745123291015625, 0.017614368438720704, 0.017508832931518555, 0.019083072662353515, 0.017584192276000978, 0.017567520141601563, 0.01751865577697754, 0.0175250244140625, 0.017563648223876953, 0.017452415466308595, 0.01745574378967285, 0.017513887405395508, 0.01751308822631836, 0.017473503112792967, 0.017605375289916993, 0.017428287506103514, 0.017418079376220703, 0.01734876823425293, 0.01737276840209961, 0.017301183700561523, 0.017387327194213868, 0.01744144058227539, 0.021717248916625978, 0.017800703048706054, 0.0175416316986084, 0.017471424102783205, 0.01738057518005371, 0.017647520065307617, 0.017372095108032226, 0.017526527404785157, 0.01738534355163574, 0.017475488662719727, 0.017469343185424806, 0.017649824142456055, 0.017559711456298827, 0.017528255462646483, 0.017726272583007813, 0.01810867118835449, 0.017700607299804688, 0.0175861759185791, 0.01747148895263672, 0.017478944778442383, 0.01747760009765625, 0.01756163215637207, 0.017707008361816406, 0.017568607330322266, 0.01806271934509277, 0.01760723114013672, 0.01757382392883301, 0.017622720718383788, 0.017663488388061522, 0.01775699234008789, 0.0175861759185791, 0.017502208709716797, 0.017567136764526366, 0.017566303253173828, 0.01745510482788086, 0.017518144607543945, 0.01756819152832031, 0.017432512283325194, 0.01738947105407715, 0.017424543380737303, 0.017501216888427734, 0.017466175079345704, 0.017480127334594725, 0.01766371154785156, 0.01759846305847168, 0.017631519317626954, 0.01772515106201172, 0.018255552291870116, 0.017774560928344726, 0.01768400001525879, 0.01767491149902344, 0.018266271591186524, 0.021358591079711914, 0.017917407989501952, 0.01764201545715332, 0.018197887420654296, 0.020343616485595704, 0.017537151336669922, 0.017379007339477538, 0.01768822479248047, 0.01758064079284668, 0.017956607818603514, 0.017569343566894532, 0.017791423797607422, 0.017730880737304687, 0.017890176773071288, 0.017530847549438476, 0.01748054313659668, 0.017466367721557616, 0.01780735969543457, 0.017528831481933595, 0.017510271072387694, 0.017464895248413086, 0.017625600814819335, 0.017409088134765625, 0.017562623977661132, 0.018414751052856445, 0.018190816879272462, 0.018389375686645507, 0.018263904571533204, 0.01775430488586426, 0.018204639434814453, 0.017764352798461915, 0.017952512741088868, 0.017684736251831056, 0.017532928466796875, 0.01756064033508301, 0.01780784034729004, 0.01752899169921875, 0.017551679611206055, 0.01743846321105957, 0.017451263427734374, 0.017391456604003906, 0.017627296447753907, 0.017503360748291015, 0.017447839736938475, 0.017987039566040038, 0.017465856552124022, 0.017444128036499022, 0.01760534477233887, 0.017498111724853514, 0.017560863494873048, 0.017430912017822264, 0.017729888916015624, 0.017503488540649415, 0.017480287551879883, 0.01749622344970703, 0.01746329689025879, 0.01744076728820801, 0.017526784896850587, 0.017448448181152345, 0.017453567504882812, 0.017636543273925782, 0.018061920166015624, 0.022011520385742188, 0.017885791778564454, 0.017724639892578126, 0.017641599655151368]",tokens/s,56.532006789818695,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4338.601984,6253.576192,0.0,5851.05408,5850.293248,s,1,11.79224609375,11.79224609375,0.0,11.79224609375,11.79224609375,11.79224609375,11.79224609375,[11.79224609375],,kWh,0.00011709121809583394,1.290865747946052e-05,3.554919510600357e-05,0.00016554907068129802,,MB,4162.859008,6368.919552,0.0,5951.717376,5922.919424,s,10,1.989371276855469,0.1989371276855469,0.0004462519386197465,0.19888356781005861,0.19952062072753904,0.19954159088134765,0.19955836700439453,"[0.19855783081054687, 0.19951596069335936, 0.1982772216796875, 0.19934101867675783, 0.19852432250976562, 0.19930624389648438, 0.1990167694091797, 0.19851898193359374, 0.19956256103515624, 0.1987503662109375]",tokens/s,1286.8387262766278,kWh,5.849585294583221e-06,6.45094870788293e-07,3.888047554879981e-06,1.0382727720251495e-05,tokens/kWh,24656333.75906337,MB,4177.420288,6383.599616,0.0,5966.39744,5922.921984,s,10,19.704122802734375,1.9704122802734374,0.012788794836903069,1.9678577880859374,1.9886855712890625,1.9894063842773437,1.9899830346679688,"[1.9667432861328125, 1.9774005126953125, 1.959409912109375, 1.9540545654296875, 1.9595458984375, 1.956507568359375, 1.988525390625, 1.9689722900390625, 1.982836181640625, 1.990127197265625]",tokens/s,31.973004142695146,kWh,5.7651731447916654e-05,6.3579199090735824e-06,3.801709708032008e-05,0.00010202674843731035,tokens/kWh,617485.1297815291,,s,630,19.701035129547133,0.03127148433261447,0.0006615181647147342,0.031122479438781738,0.031586703300476075,0.03203819465637207,0.034321834907531754,"[0.032286209106445314, 0.03131888008117676, 0.03130726432800293, 0.03112188720703125, 0.031031072616577147, 0.03138096046447754, 0.03093337631225586, 0.030992864608764648, 0.03106790351867676, 0.03153232002258301, 0.03102409553527832, 0.03083263969421387, 0.03095347213745117, 0.031050975799560548, 0.03101366424560547, 0.030998239517211913, 0.031072511672973632, 0.030994464874267578, 0.031068159103393556, 0.03092092704772949, 0.030865184783935545, 0.031125343322753907, 0.030840991973876953, 0.030877376556396486, 0.03085548782348633, 0.03097596740722656, 0.030996223449707032, 0.031050016403198242, 0.03127705574035645, 0.031222976684570313, 0.03550620651245117, 0.031124128341674804, 0.030959360122680663, 0.030843263626098634, 0.031064159393310548, 0.030912416458129883, 0.030833791732788086, 0.03112544059753418, 0.03090937614440918, 0.03102019119262695, 0.031128416061401366, 0.03100057601928711, 0.031107072830200196, 0.03140991973876953, 0.03104697608947754, 0.031173568725585937, 0.031131647109985353, 0.03120128059387207, 0.03152835273742676, 0.031197792053222657, 0.0313118724822998, 0.03148185539245606, 0.031310880661010745, 0.03148080062866211, 0.031362752914428714, 0.03140959930419922, 0.03126361656188965, 0.03129251289367676, 0.031201408386230468, 0.03128604888916016, 0.03122934341430664, 0.03130019187927246, 0.031295711517333985, 0.03187833595275879, 0.03111404800415039, 0.031411968231201175, 0.03099375915527344, 0.031081375122070314, 0.031092735290527345, 0.030939136505126953, 0.031037439346313478, 0.030996320724487304, 0.032333984375, 0.03121504020690918, 0.031102880477905274, 0.03100124740600586, 0.03136316871643066, 0.031143264770507814, 0.031058496475219726, 0.03108390426635742, 0.031009408950805666, 0.030963167190551758, 0.031095327377319334, 0.030861312866210938, 0.03097599983215332, 0.031014911651611327, 0.031020511627197267, 0.03080633544921875, 0.03075299263000488, 0.030986047744750975, 0.03535427093505859, 0.031113056182861327, 0.031048383712768555, 0.031242303848266602, 0.03157600021362305, 0.031487871170043945, 0.03094745635986328, 0.031055488586425782, 0.031093120574951172, 0.031305376052856444, 0.03118227195739746, 0.031159200668334962, 0.030971904754638672, 0.03116214370727539, 0.03119945526123047, 0.03148182487487793, 0.03151203155517578, 0.031227647781372072, 0.03129849624633789, 0.03376323318481445, 0.03661616134643555, 0.031045631408691408, 0.03092889595031738, 0.031090015411376952, 0.03115635108947754, 0.031074848175048828, 0.031098880767822266, 0.031254528045654296, 0.03362406539916992, 0.03115827178955078, 0.0314654712677002, 0.031061023712158204, 0.031070783615112306, 0.030953887939453126, 0.03108652877807617, 0.0309105281829834, 0.03166396713256836, 0.031188831329345704, 0.031115423202514647, 0.031230112075805665, 0.030975519180297853, 0.03184409523010254, 0.03131839942932129, 0.03202492904663086, 0.031528959274291994, 0.031405567169189456, 0.03147532844543457, 0.03138003158569336, 0.031449407577514646, 0.03115827178955078, 0.03146137619018555, 0.03133468818664551, 0.03117846488952637, 0.031309215545654294, 0.03131017684936523, 0.031080703735351562, 0.030979263305664063, 0.03113862419128418, 0.030922079086303712, 0.030884000778198244, 0.031212032318115233, 0.031192415237426756, 0.03104630470275879, 0.03101286315917969, 0.031012832641601564, 0.030946624755859374, 0.031018848419189452, 0.03134684753417969, 0.030991071701049804, 0.03099033546447754, 0.03097804832458496, 0.03092243194580078, 0.030966079711914063, 0.031270912170410156, 0.03100057601928711, 0.03099158477783203, 0.031046016693115235, 0.030983903884887695, 0.031066783905029295, 0.030967840194702147, 0.030898176193237304, 0.030877695083618165, 0.03094937515258789, 0.03097395133972168, 0.030863359451293947, 0.030935039520263673, 0.03070742416381836, 0.030798112869262696, 0.030957183837890624, 0.030812543869018556, 0.03073023986816406, 0.030914655685424806, 0.030859167098999024, 0.030830591201782227, 0.03111302375793457, 0.030947519302368165, 0.030975231170654295, 0.030836511611938476, 0.03082748794555664, 0.031804607391357424, 0.030972736358642578, 0.031123455047607423, 0.030916608810424805, 0.030846975326538087, 0.03093062400817871, 0.030795135498046876, 0.03083145523071289, 0.030658655166625977, 0.030686431884765625, 0.030915359497070312, 0.030849023818969725, 0.03094937515258789, 0.03087295913696289, 0.030847103118896484, 0.03086934471130371, 0.03092515182495117, 0.030815776824951173, 0.031025663375854492, 0.030820512771606447, 0.030797632217407226, 0.03098355293273926, 0.030876607894897462, 0.030811168670654296, 0.030767328262329103, 0.03087379264831543, 0.031217376708984376, 0.030734783172607423, 0.030673311233520507, 0.03078937530517578, 0.03072435188293457, 0.03079110336303711, 0.03138822364807129, 0.03082854461669922, 0.03072204780578613, 0.030748544692993166, 0.03081180763244629, 0.0307891845703125, 0.030716224670410155, 0.03102908706665039, 0.030976768493652343, 0.03093881607055664, 0.03094764709472656, 0.03227852630615234, 0.031164384841918944, 0.031018688201904298, 0.0308656005859375, 0.03100595283508301, 0.03105596733093262, 0.03111404800415039, 0.03111065673828125, 0.031164608001708984, 0.031156160354614257, 0.031123071670532226, 0.031126272201538085, 0.03121561622619629, 0.03239321517944336, 0.03134259223937988, 0.031508672714233396, 0.031311168670654296, 0.031105119705200194, 0.031166879653930665, 0.03111510467529297, 0.031994911193847654, 0.031379520416259764, 0.0310645751953125, 0.031296096801757815, 0.03154105567932129, 0.03124838447570801, 0.03137564849853516, 0.03133616065979004, 0.03119923210144043, 0.031058143615722657, 0.03088787269592285, 0.031121248245239257, 0.03107148742675781, 0.031039327621459962, 0.031033727645874025, 0.031150527954101562, 0.031164512634277344, 0.030810111999511718, 0.03080771255493164, 0.03084659194946289, 0.030869760513305665, 0.031236576080322265, 0.03091766357421875, 0.030884063720703125, 0.031070976257324218, 0.031425664901733395, 0.031121728897094726, 0.031449663162231446, 0.031090688705444337, 0.031150175094604493, 0.03200400161743164, 0.031318016052246093, 0.031322111129760744, 0.03142451286315918, 0.031157760620117186, 0.031197696685791015, 0.031252159118652346, 0.03123436737060547, 0.031014911651611327, 0.03131113624572754, 0.03123062324523926, 0.03091257667541504, 0.030980064392089845, 0.030886943817138673, 0.030880672454833984, 0.031160640716552734, 0.030777120590209962, 0.03074870491027832, 0.030969823837280273, 0.03094937515258789, 0.0307906551361084, 0.03101590347290039, 0.03088387107849121, 0.03083375930786133, 0.030808895111083985, 0.0309965763092041, 0.03089148712158203, 0.031031839370727538, 0.03100262451171875, 0.030899999618530273, 0.030851295471191406, 0.030958911895751954, 0.0308897590637207, 0.03161702346801758, 0.03092889595031738, 0.03122380828857422, 0.031055871963500976, 0.030838783264160157, 0.031015039443969727, 0.03092691230773926, 0.030889568328857423, 0.0310797119140625, 0.0310947208404541, 0.031015199661254884, 0.030974687576293944, 0.030998271942138673, 0.030911935806274413, 0.03097200012207031, 0.031001087188720702, 0.031340383529663084, 0.030982528686523438, 0.030926847457885744, 0.03094870376586914, 0.030992256164550782, 0.03092131233215332, 0.03230944061279297, 0.030898176193237304, 0.030956607818603515, 0.030949663162231446, 0.030884511947631837, 0.030922752380371094, 0.031047679901123046, 0.031036767959594726, 0.030954143524169923, 0.03082854461669922, 0.030856895446777343, 0.030926816940307616, 0.03098988723754883, 0.03097884750366211, 0.031129119873046875, 0.030943391799926757, 0.03104185676574707, 0.030943231582641603, 0.03120128059387207, 0.031076351165771485, 0.031051103591918944, 0.031088960647583007, 0.030964223861694336, 0.031, 0.03091497611999512, 0.030930944442749023, 0.031520000457763674, 0.03108940887451172, 0.03105177688598633, 0.03101286315917969, 0.031025152206420898, 0.03122380828857422, 0.031080095291137696, 0.030894432067871094, 0.030967487335205077, 0.03108691215515137, 0.031202816009521486, 0.03111737632751465, 0.031081951141357422, 0.031185888290405275, 0.031154176712036134, 0.037282337188720704, 0.03128892707824707, 0.030972063064575197, 0.0314204158782959, 0.031076351165771485, 0.03282649612426758, 0.03185910415649414, 0.031001056671142578, 0.030898176193237304, 0.03096531105041504, 0.03101932716369629, 0.031076480865478515, 0.03114156723022461, 0.031760704040527346, 0.031303680419921875, 0.031335968017578125, 0.031178655624389647, 0.03147987174987793, 0.031265024185180665, 0.031193344116210938, 0.03129548835754394, 0.031635040283203124, 0.03367772674560547, 0.03166185569763184, 0.03130745506286621, 0.031536832809448245, 0.03157251167297363, 0.031351039886474606, 0.031569984436035155, 0.033527488708496096, 0.03156563186645508, 0.03198412895202637, 0.031473663330078124, 0.03129491233825683, 0.031222335815429686, 0.031464576721191406, 0.03140431976318359, 0.03253452682495117, 0.03148246383666992, 0.03143062400817871, 0.03150441551208496, 0.031337760925292966, 0.03133308792114258, 0.0314204158782959, 0.03129548835754394, 0.03154944038391113, 0.03191324806213379, 0.031421152114868164, 0.03146342468261719, 0.03120947265625, 0.03135420799255371, 0.03126953506469726, 0.031341856002807617, 0.03131465530395508, 0.03157366371154785, 0.03117024040222168, 0.03117862319946289, 0.03111724853515625, 0.031214431762695314, 0.031030879974365235, 0.03130956840515137, 0.031299392700195314, 0.031269344329833984, 0.03180179214477539, 0.031146303176879882, 0.030993568420410157, 0.03093724822998047, 0.030968511581420898, 0.03087283134460449, 0.030977888107299803, 0.030905248641967774, 0.031387168884277346, 0.03397439956665039, 0.03126006317138672, 0.031669183731079104, 0.031007871627807618, 0.031021055221557618, 0.03104243278503418, 0.03132163238525391, 0.03133872032165527, 0.03133260726928711, 0.031399072647094725, 0.031232864379882812, 0.031023103713989256, 0.03095747184753418, 0.031126720428466797, 0.03129436874389648, 0.03139788818359375, 0.03118489646911621, 0.03198361587524414, 0.03126681518554687, 0.0312579517364502, 0.031195199966430665, 0.031023712158203126, 0.03116646385192871, 0.031164064407348632, 0.031072959899902344, 0.031272607803344725, 0.03128319931030273, 0.031018463134765625, 0.0314619197845459, 0.03121766471862793, 0.031111135482788085, 0.031040672302246095, 0.03124323272705078, 0.03528851318359375, 0.0309334716796875, 0.031074304580688477, 0.03096713638305664, 0.031085216522216796, 0.030851072311401367, 0.03103497505187988, 0.031013280868530273, 0.031032928466796873, 0.03090678405761719, 0.030956768035888673, 0.03092889595031738, 0.030877567291259764, 0.030984960556030273, 0.030941343307495116, 0.03078144073486328, 0.031654048919677734, 0.0310020809173584, 0.03089651107788086, 0.031153696060180664, 0.030957344055175782, 0.03204095840454101, 0.03098214340209961, 0.030904319763183592, 0.03077529525756836, 0.030811775207519532, 0.03291993713378906, 0.03315507125854492, 0.031049728393554688, 0.031031295776367186, 0.03100262451171875, 0.03093017578125, 0.031116031646728517, 0.03161702346801758, 0.031340543746948245, 0.03110291290283203, 0.03113376045227051, 0.031991455078125, 0.031521120071411134, 0.031268192291259767, 0.03147996711730957, 0.03145779228210449, 0.03213107299804688, 0.03130572891235352, 0.03129334449768066, 0.03136726379394531, 0.03137740707397461, 0.03154300880432129, 0.031525152206420895, 0.032778240203857424, 0.0315043830871582, 0.031498239517211916, 0.031389696121215824, 0.03203481674194336, 0.031232032775878906, 0.0314552001953125, 0.031318016052246093, 0.03142633628845215, 0.031414495468139646, 0.033912158966064455, 0.03150908851623535, 0.031121471405029296, 0.031119359970092773, 0.031106527328491212, 0.03144076728820801, 0.031257247924804686, 0.03134819221496582, 0.031608736038208005, 0.03132428741455078, 0.031094783782958983, 0.03128995132446289, 0.03138345527648926, 0.03128070449829102, 0.03129388809204101, 0.0321055679321289, 0.03135990333557129, 0.03193401527404785, 0.03140243148803711, 0.03126681518554687, 0.031151391983032226, 0.031213407516479493, 0.03125862312316895, 0.031178720474243166, 0.03131075286865234, 0.031988639831542966, 0.031352127075195316, 0.03704492950439453, 0.03240345764160156, 0.03129548835754394, 0.03158425521850586, 0.03116217613220215, 0.031376800537109374, 0.03257334518432617, 0.03152985572814941, 0.031139839172363282, 0.03181113624572754, 0.03135654449462891, 0.031243072509765626, 0.03178291130065918, 0.03216950225830078, 0.03147315216064453, 0.03177702331542969, 0.033321441650390624, 0.031281408309936524, 0.031053823471069338, 0.03188057518005371, 0.03071401596069336, 0.031082975387573243, 0.03113532829284668, 0.031208000183105468, 0.03145916748046875, 0.03128268814086914, 0.031185407638549805, 0.03136678314208984, 0.0311628475189209, 0.03139497566223144, 0.031072160720825196, 0.03243708801269531, 0.031543296813964845, 0.0344637451171875, 0.03103539276123047, 0.03119024085998535, 0.03126761627197266, 0.031213567733764647, 0.031294687271118164, 0.031235904693603517, 0.031161312103271485, 0.03161497688293457, 0.03124224090576172, 0.031243616104125977, 0.03107209587097168, 0.03200096130371094, 0.03082566452026367, 0.03177910423278808, 0.03125494384765625, 0.030969184875488283, 0.032919872283935545, 0.031099231719970703, 0.03117670440673828, 0.03129343986511231, 0.03117670440673828, 0.03101625633239746, 0.03140473556518555, 0.03110860824584961, 0.03143116760253906, 0.03136716842651367, 0.03130131149291992]",tokens/s,31.97801515795187,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8154.529792,11251.089408,0.0,10848.567296,10616.027648,s,1,14.68530078125,14.68530078125,0.0,14.68530078125,14.68530078125,14.68530078125,14.68530078125,[14.68530078125],,kWh,0.00021833083696667468,2.4074120734856615e-05,6.566449697599508e-05,0.0003080694546775264,,MB,3835.26912,11672.61696,0.0,11255.414784,11070.470656,s,10,3.6982595825195315,0.36982595825195314,0.0016546003656661222,0.37028662109375,0.3710934844970703,0.3717191970825195,0.3722197671508789,"[0.36617440795898437, 0.3680224609375, 0.3702292175292969, 0.37034402465820315, 0.37093032836914064, 0.37234490966796874, 0.36918389892578124, 0.37095443725585936, 0.3693240661621094, 0.3707518310546875]",tokens/s,692.2174993070487,kWh,1.082723246219145e-05,1.194031689604212e-06,7.155633296518566e-06,1.9176897448314228e-05,tokens/kWh,13349396.099654485,MB,3839.524864,11674.714112,0.0,11257.511936,11070.473216,s,10,28.627691406249998,2.8627691406249998,0.004573717331479009,2.86322998046875,2.8682132568359378,2.868764221191406,2.8692049926757814,"[2.8571474609375, 2.8563994140625, 2.856251220703125, 2.86196923828125, 2.863158203125, 2.865311767578125, 2.869315185546875, 2.8633017578125, 2.8680908203125, 2.866746337890625]",tokens/s,22.00666449347216,kWh,8.378808828239136e-05,9.240122668859486e-06,5.583166709328138e-05,0.00014885987804453218,tokens/kWh,423216.79170765704,,s,630,28.624257297515847,0.04543532904367598,0.00041357450996904496,0.0454036636352539,0.04586983299255371,0.04597235126495361,0.04726500995635987,"[0.047309024810791016, 0.045224479675292965, 0.04496588897705078, 0.04491420745849609, 0.04480819320678711, 0.04472675323486328, 0.044813472747802736, 0.045310592651367186, 0.04545558547973633, 0.04493875122070313, 0.04509900665283203, 0.04571376037597656, 0.04548828887939453, 0.04513552093505859, 0.04505753707885742, 0.0453210563659668, 0.045172737121582034, 0.045004512786865236, 0.04508700942993164, 0.04509468841552734, 0.04541241455078125, 0.04529119873046875, 0.045136417388916016, 0.045423999786376956, 0.04542307281494141, 0.04512982559204102, 0.044830398559570314, 0.04488224029541016, 0.04543075180053711, 0.04526851272583008, 0.04514409637451172, 0.045142303466796874, 0.045485343933105465, 0.04574262237548828, 0.04534255981445313, 0.045537822723388674, 0.04597555160522461, 0.04566016006469727, 0.04515430450439453, 0.045010303497314455, 0.04527286529541016, 0.04541078567504883, 0.04518320083618164, 0.045334686279296876, 0.04568678283691406, 0.045412353515625, 0.045061599731445315, 0.04527276611328125, 0.04563571166992188, 0.045650657653808595, 0.04533449554443359, 0.04523971176147461, 0.0453752326965332, 0.04546416091918945, 0.045195552825927736, 0.04550041580200195, 0.04612422561645508, 0.045876033782958986, 0.04553286361694336, 0.045455680847167966, 0.04574399948120117, 0.04548732757568359, 0.045539520263671876, 0.047341087341308596, 0.045437694549560544, 0.04511743927001953, 0.04485286331176758, 0.04464678573608399, 0.044843006134033206, 0.044900161743164066, 0.04527238464355469, 0.04525350570678711, 0.045071582794189456, 0.04491312026977539, 0.04499283218383789, 0.045287647247314454, 0.04513075256347656, 0.04475519943237305, 0.045254302978515626, 0.04518783950805664, 0.044930400848388674, 0.045133792877197265, 0.04535583877563477, 0.04543910217285156, 0.04516236877441406, 0.04584243011474609, 0.04541439819335937, 0.045400062561035154, 0.04506412887573242, 0.04502441787719726, 0.044993438720703126, 0.0451503677368164, 0.04547107315063476, 0.04520329666137695, 0.0451671028137207, 0.04547356796264648, 0.04530771255493164, 0.045332542419433595, 0.04514252853393555, 0.045195262908935545, 0.04549763107299805, 0.04532025527954102, 0.045159072875976564, 0.0452393913269043, 0.04525353622436523, 0.04563148880004883, 0.04547135925292969, 0.04536751937866211, 0.04560070419311523, 0.04538755035400391, 0.04552339172363281, 0.04521548843383789, 0.045754398345947266, 0.045699295043945314, 0.04552425765991211, 0.04573052978515625, 0.04543657684326172, 0.04517923355102539, 0.04539958572387695, 0.045935230255126955, 0.045604705810546875, 0.045397598266601565, 0.0454285774230957, 0.045521472930908205, 0.045375038146972656, 0.04590227127075195, 0.04748892974853516, 0.04540367889404297, 0.0450683822631836, 0.044939743041992185, 0.04502511978149414, 0.04489056015014648, 0.044923648834228516, 0.044923873901367185, 0.0450621452331543, 0.044943359375, 0.044951553344726565, 0.04500889587402344, 0.04515225601196289, 0.044902400970458986, 0.04513792037963867, 0.045313312530517576, 0.045486175537109375, 0.04537142562866211, 0.045162494659423826, 0.045353569030761716, 0.045608478546142577, 0.045555809020996096, 0.045395713806152344, 0.04534540939331055, 0.04518707275390625, 0.045058048248291016, 0.045199104309082035, 0.04492108917236328, 0.045112510681152344, 0.04509980773925781, 0.04521136093139649, 0.04497235107421875, 0.045166305541992184, 0.0451927375793457, 0.045246368408203126, 0.04507036972045898, 0.04531814575195312, 0.04573062515258789, 0.045484031677246094, 0.04514963150024414, 0.045365825653076175, 0.04583628845214844, 0.04586086273193359, 0.04536832046508789, 0.045644351959228516, 0.04570505523681641, 0.04544982528686523, 0.04508262252807617, 0.04519756698608399, 0.04558822250366211, 0.04544307327270508, 0.04514166259765625, 0.045218017578125, 0.04515852737426758, 0.045707263946533204, 0.04545228958129883, 0.0454257926940918, 0.04573481750488281, 0.04588438415527344, 0.04556595230102539, 0.045484031677246094, 0.04558233642578125, 0.04554956817626953, 0.047889728546142575, 0.04538243103027344, 0.0448359375, 0.04494019317626953, 0.04494131088256836, 0.04470988845825195, 0.04497612762451172, 0.04460543823242188, 0.04507033538818359, 0.044974079132080076, 0.04526265716552735, 0.04503366470336914, 0.045000705718994144, 0.04487168121337891, 0.045262847900390625, 0.0450203857421875, 0.045514686584472656, 0.04549923324584961, 0.04558230209350586, 0.045413536071777345, 0.04552793502807617, 0.04605542373657227, 0.04551424026489258, 0.04513552093505859, 0.04508041763305664, 0.044983009338378906, 0.04535734558105469, 0.04504275131225586, 0.0449600944519043, 0.04481459045410156, 0.04547020721435547, 0.0450596809387207, 0.04504560089111328, 0.04523382568359375, 0.04556876754760742, 0.04533782577514649, 0.04537212753295899, 0.04580470275878906, 0.045644702911376955, 0.04567036819458008, 0.045797409057617186, 0.04595663833618164, 0.04597094345092773, 0.045867774963378904, 0.04591024017333984, 0.045778942108154294, 0.045592575073242186, 0.04558412933349609, 0.04575872039794922, 0.04549017715454102, 0.04534272003173828, 0.04526489639282227, 0.045400062561035154, 0.04537753677368164, 0.04542451095581055, 0.04542272186279297, 0.045552734375, 0.045775775909423826, 0.045676513671875, 0.04546559906005859, 0.045828128814697264, 0.04596886444091797, 0.04590860748291015, 0.04684598541259766, 0.04510924911499024, 0.04513177490234375, 0.04493107223510742, 0.045271041870117185, 0.045703327178955075, 0.044805023193359376, 0.044757537841796875, 0.04507075119018555, 0.04492083358764649, 0.04490348815917969, 0.045150142669677734, 0.045292545318603515, 0.045090816497802735, 0.04508371353149414, 0.04532524871826172, 0.04574617767333984, 0.04553113555908203, 0.04520140838623047, 0.045244415283203124, 0.045676544189453126, 0.045674495697021485, 0.045623294830322264, 0.045231616973876954, 0.04550297546386719, 0.04551270294189453, 0.045184608459472655, 0.045152671813964845, 0.0450682258605957, 0.0452619514465332, 0.04510201644897461, 0.04499251174926758, 0.04537753677368164, 0.04556595230102539, 0.045430015563964844, 0.04546384048461914, 0.045653728485107424, 0.04565475082397461, 0.045583774566650394, 0.04549091339111328, 0.045485984802246096, 0.04597350311279297, 0.04565983963012695, 0.04578153610229492, 0.04602243041992188, 0.04588947296142578, 0.0456800308227539, 0.045709983825683594, 0.045699073791503904, 0.045385726928710936, 0.04540351867675781, 0.04536137771606445, 0.0452567024230957, 0.04541072082519531, 0.04539187240600586, 0.04569833755493164, 0.04564451217651367, 0.04554713439941406, 0.04571078491210938, 0.04586966323852539, 0.04591036987304688, 0.04545945739746094, 0.04558147048950195, 0.04668374252319336, 0.0451011848449707, 0.04490643310546875, 0.04488768005371094, 0.04476976013183594, 0.04498166275024414, 0.044956321716308596, 0.04470364761352539, 0.04519094467163086, 0.045128097534179686, 0.04523427200317383, 0.04538991928100586, 0.045176734924316404, 0.04477542495727539, 0.04514121627807617, 0.0458691520690918, 0.04557017517089844, 0.04498284912109375, 0.04555980682373047, 0.04592038345336914, 0.04559244918823242, 0.045203006744384766, 0.04551724624633789, 0.04554751968383789, 0.045553665161132816, 0.04527308654785156, 0.045211647033691404, 0.04511743927001953, 0.04530614471435547, 0.045163265228271486, 0.045146625518798826, 0.04553776168823242, 0.04557823944091797, 0.04544281768798828, 0.04531430435180664, 0.045186752319335936, 0.0460843505859375, 0.04573603057861328, 0.04578822326660156, 0.045884319305419925, 0.04575174331665039, 0.045711936950683596, 0.045873153686523435, 0.04586102294921875, 0.04564339065551758, 0.04547769546508789, 0.045582752227783206, 0.04554342269897461, 0.0452751350402832, 0.04532428741455078, 0.04584563064575195, 0.04556889724731445, 0.04557376098632813, 0.04528985595703125, 0.045508289337158205, 0.04571958541870117, 0.0453983039855957, 0.04581785583496094, 0.04619468688964844, 0.045932769775390625, 0.04594627380371094, 0.04585846328735352, 0.04611936187744141, 0.04737068939208984, 0.04564585494995117, 0.04515225601196289, 0.044875423431396486, 0.045088577270507815, 0.045019680023193356, 0.04529689788818359, 0.04491750335693359, 0.04486147308349609, 0.045428703308105466, 0.04528332901000977, 0.045246463775634765, 0.04517068862915039, 0.045146110534667966, 0.04540988922119141, 0.04557660675048828, 0.04560486221313476, 0.04595097732543945, 0.04553644943237305, 0.04530464172363281, 0.04519116973876953, 0.04548198318481445, 0.04582710266113281, 0.04559062576293945, 0.04513241577148437, 0.0454207992553711, 0.045328384399414064, 0.045211647033691404, 0.04494540786743164, 0.0452935676574707, 0.045434879302978515, 0.04524851226806641, 0.045530719757080076, 0.04568105697631836, 0.04576870346069336, 0.04527228927612305, 0.04539616012573242, 0.045873760223388675, 0.04605952072143555, 0.04588521575927734, 0.04567267227172851, 0.045784576416015625, 0.045623809814453124, 0.045932544708251956, 0.0461578254699707, 0.04573798370361328, 0.045721408843994144, 0.045583808898925784, 0.04552511978149414, 0.045755008697509765, 0.045649921417236325, 0.04518064117431641, 0.04545561599731445, 0.04574211120605469, 0.045625343322753906, 0.045295265197753905, 0.04560246276855469, 0.04589433670043945, 0.045760353088378905, 0.045746463775634766, 0.04591132736206055, 0.04609468841552734, 0.046029056549072266, 0.0481929931640625, 0.04533712005615234, 0.04491782379150391, 0.044882209777832034, 0.04476134490966797, 0.04469392013549805, 0.04480409622192383, 0.045131649017333984, 0.045243934631347654, 0.0451835823059082, 0.04538531112670898, 0.04519952011108398, 0.045871360778808594, 0.045049854278564457, 0.04487097549438476, 0.04520211029052734, 0.04604927825927734, 0.04546284866333008, 0.045100894927978516, 0.04509756851196289, 0.04541462326049805, 0.04557372665405274, 0.045257152557373045, 0.045350910186767575, 0.04538729476928711, 0.04520934295654297, 0.04500316619873047, 0.044964256286621096, 0.045518753051757815, 0.04550156784057617, 0.04557503890991211, 0.04545241546630859, 0.04569174575805664, 0.04548611068725586, 0.04547174453735352, 0.04551196670532227, 0.045896415710449216, 0.045606910705566404, 0.045155616760253904, 0.045353694915771486, 0.04593868637084961, 0.045666015625, 0.04537577438354492, 0.04552864074707031, 0.04569747161865234, 0.045590465545654296, 0.045109310150146485, 0.045049854278564457, 0.045601856231689455, 0.045384449005126955, 0.04519750213623047, 0.04540364837646484, 0.045703678131103515, 0.04578684616088867, 0.04544300842285156, 0.04561955261230469, 0.04567859268188477, 0.04554956817626953, 0.045473377227783204, 0.04585718536376953, 0.04580934524536133, 0.04593900680541992, 0.04578713607788086, 0.04715724945068359, 0.04528073501586914, 0.04516304016113281, 0.0450123519897461, 0.04493747329711914, 0.04482851028442383, 0.04495004653930664, 0.04522927856445313, 0.045093151092529295, 0.04486809539794922, 0.04513792037963867, 0.04557619094848633, 0.04535049438476563, 0.044978591918945314, 0.04499577713012695, 0.04588627243041992, 0.04583200073242188, 0.0454453125, 0.045158401489257816, 0.04532223892211914, 0.045412353515625, 0.04571478271484375, 0.04534281539916992, 0.04535500717163086, 0.04529619216918945, 0.0455079345703125, 0.04522870254516602, 0.045127616882324216, 0.045045345306396485, 0.045468128204345704, 0.04521923065185547, 0.04538390350341797, 0.045450721740722656, 0.04533497619628906, 0.04508835220336914, 0.04518809509277344, 0.045578113555908205, 0.04563353729248047, 0.04540150451660156, 0.04547439956665039, 0.045699073791503904, 0.045639041900634766, 0.04554406356811523, 0.045686367034912106, 0.04603903961181641, 0.045838558197021484, 0.045803489685058596, 0.04560611343383789, 0.04590431976318359, 0.04566614532470703, 0.04567932891845703, 0.04539801788330078, 0.04595711898803711, 0.045758174896240233, 0.04585062408447266, 0.04585100936889648, 0.04572476959228516, 0.04605830383300781, 0.046216896057128906, 0.04610899353027344, 0.045862911224365234, 0.04609843063354492, 0.046243839263916016, 0.04822880172729492, 0.04569430541992187, 0.04546844863891602, 0.04509846496582031, 0.044848670959472654, 0.04521686553955078, 0.04496169662475586, 0.044953567504882816, 0.0451297607421875, 0.045284671783447264, 0.04522844696044922, 0.04520486450195312, 0.04523715209960937, 0.04517635345458984, 0.045958751678466796, 0.04552179336547851, 0.045774848937988284, 0.04552659225463867, 0.04566409683227539, 0.04521635055541992, 0.04544102478027344, 0.04584214401245117, 0.045558048248291017, 0.045483230590820316, 0.04514057540893555, 0.045373470306396484, 0.04542889785766602, 0.04524236679077148, 0.04499647903442383, 0.0450909423828125, 0.045373184204101566, 0.04518118286132813, 0.04497817611694336, 0.04539542388916016, 0.04550048065185547, 0.045265182495117184, 0.04528966522216797, 0.04564534378051758, 0.045838817596435544, 0.04553932952880859, 0.04506825637817383, 0.04534684753417969, 0.04601641464233398, 0.04570259094238281, 0.04535516738891601, 0.045617664337158206, 0.04563705444335937, 0.04569760131835938, 0.04539123153686524, 0.04563827133178711, 0.04569926452636719, 0.045674304962158206, 0.045321823120117184, 0.045353534698486325, 0.04562876892089844, 0.04565411376953125, 0.04550044631958008, 0.04562777709960938, 0.04585062408447266, 0.045693984985351564, 0.04592947387695313, 0.045985950469970706, 0.04599971389770508]",tokens/s,22.009304676515537,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 58057 has 14.69 GiB memory in use. Of the allocated memory 14.29 GiB is allocated by PyTorch, and 313.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4060.491776,4705.878016,0.0,4303.355904,4034.388992,s,1,10.473888671875,10.473888671875,0.0,10.473888671875,10.473888671875,10.473888671875,10.473888671875,[10.473888671875],,kWh,9.017919102497368e-05,9.9400225292148e-06,2.9026412110000344e-05,0.0001291456256641888,,MB,4148.432896,4764.598272,0.0,4347.396096,4202.566656,s,10,1.9258825225830078,0.19258825225830076,0.00019691995323813642,0.1926078186035156,0.19284299621582032,0.1928462646484375,0.19284887939453124,"[0.19284953308105468, 0.19239561462402344, 0.1922071990966797, 0.19253578186035156, 0.19275856018066406, 0.19267120361328124, 0.19262034606933592, 0.19284226989746095, 0.19240672302246092, 0.1925952911376953]",tokens/s,1329.2607259172328,kWh,5.645777938701859e-06,6.226279070460332e-07,3.739271153807673e-06,1.0007676999555565e-05,tokens/kWh,25580361.957262293,MB,4154.544128,4764.598272,0.0,4347.396096,4213.08416,s,10,16.21162060546875,1.6211620605468748,0.01643749453642429,1.6152102661132812,1.6450425048828123,1.6482319458007813,1.6507834985351562,"[1.6224825439453125, 1.65142138671875, 1.644333740234375, 1.6212415771484374, 1.6068294677734376, 1.6058837890625, 1.636740478515625, 1.6048421630859375, 1.609178955078125, 1.60866650390625]",tokens/s,38.86101305550408,kWh,4.6901585540045816e-05,5.172983285834536e-06,3.125370128499161e-05,8.332827011087197e-05,tokens/kWh,756045.9363452008,,s,630,16.20921557998657,0.025728913619026305,0.00046659421688790306,0.025638720512390138,0.026279296112060545,0.026467916393280028,0.027240028095245367,"[0.02680441665649414, 0.025929216384887696, 0.025833759307861328, 0.026040319442749024, 0.025651199340820312, 0.02554265594482422, 0.025640575408935547, 0.02564476776123047, 0.025610912322998048, 0.02599475288391113, 0.025713151931762695, 0.02557267189025879, 0.025596223831176757, 0.025571712493896483, 0.025433343887329103, 0.025619199752807617, 0.025847808837890625, 0.025824735641479492, 0.025643487930297852, 0.026457887649536133, 0.02558799934387207, 0.025685951232910155, 0.025527360916137696, 0.026515520095825196, 0.025523136138916016, 0.02569375991821289, 0.0256759033203125, 0.0257457275390625, 0.025703935623168944, 0.02549580764770508, 0.025610496520996093, 0.025751455307006836, 0.02558291244506836, 0.025503551483154297, 0.02558870315551758, 0.025468095779418946, 0.02561311912536621, 0.025447999954223633, 0.02550601577758789, 0.02544771194458008, 0.025843807220458984, 0.025579872131347655, 0.02589244842529297, 0.025780288696289063, 0.02561257553100586, 0.025739839553833008, 0.02563248062133789, 0.025825504302978516, 0.0263701114654541, 0.026044416427612304, 0.025774080276489256, 0.025910367965698244, 0.025756576538085937, 0.02573311996459961, 0.02572287940979004, 0.025860095977783205, 0.025821184158325194, 0.025734464645385743, 0.026337120056152345, 0.025637344360351564, 0.025636608123779298, 0.02559609603881836, 0.02576335906982422, 0.026878944396972658, 0.02609916877746582, 0.02612384033203125, 0.026004447937011718, 0.02593561553955078, 0.025934080123901367, 0.025889984130859377, 0.025780992507934572, 0.025920703887939454, 0.025868703842163086, 0.02630022430419922, 0.026040992736816405, 0.02611561584472656, 0.028037504196166994, 0.026120288848876953, 0.025980928421020507, 0.026712064743041993, 0.02630451202392578, 0.026160991668701172, 0.02599033546447754, 0.025710784912109375, 0.025826047897338868, 0.025720096588134764, 0.02578451156616211, 0.02574188804626465, 0.025864160537719727, 0.02663532829284668, 0.025928672790527345, 0.02575881576538086, 0.026008480072021483, 0.026001407623291017, 0.026390527725219725, 0.025997312545776367, 0.026183679580688478, 0.02612838363647461, 0.026318559646606444, 0.025982303619384764, 0.026010271072387695, 0.026214496612548828, 0.026116287231445313, 0.02809017562866211, 0.027270912170410156, 0.02710163116455078, 0.02615449523925781, 0.025938432693481447, 0.025911296844482422, 0.025997312545776367, 0.025903104782104492, 0.026124256134033203, 0.02611203193664551, 0.026216447830200194, 0.026222591400146485, 0.026439680099487304, 0.026212352752685547, 0.026441728591918946, 0.02645792007446289, 0.026394336700439454, 0.026339199066162108, 0.026180192947387694, 0.026215967178344728, 0.0262392635345459, 0.02634566307067871, 0.026283615112304686, 0.027010271072387695, 0.0263536319732666, 0.02667398452758789, 0.026481664657592774, 0.026288223266601563, 0.02627881622314453, 0.026148639678955077, 0.02617888069152832, 0.026477056503295897, 0.02627337646484375, 0.02600124740600586, 0.026190816879272463, 0.025862144470214843, 0.02598297691345215, 0.0258702392578125, 0.026060127258300782, 0.0264869441986084, 0.026153760910034178, 0.02584761619567871, 0.025834592819213867, 0.02569251251220703, 0.025901439666748047, 0.02639686393737793, 0.025958175659179687, 0.026138368606567382, 0.0259703369140625, 0.025998111724853515, 0.02593356704711914, 0.025896640777587892, 0.02584217643737793, 0.027989343643188478, 0.026278656005859376, 0.025784320831298828, 0.02618079948425293, 0.025750335693359376, 0.02573311996459961, 0.025612287521362305, 0.02572697639465332, 0.025667583465576172, 0.025700128555297852, 0.025882240295410155, 0.026097631454467772, 0.025800607681274415, 0.02579638481140137, 0.025725887298583983, 0.025853952407836913, 0.025851680755615235, 0.026018016815185546, 0.025931776046752928, 0.025999359130859375, 0.0259102725982666, 0.02584592056274414, 0.025788415908813478, 0.02589731216430664, 0.025803104400634765, 0.025956512451171875, 0.026327039718627928, 0.0271011848449707, 0.02781564712524414, 0.025878816604614257, 0.025987071990966795, 0.02615705680847168, 0.026054784774780272, 0.02677555274963379, 0.02672764778137207, 0.026249727249145507, 0.02623014450073242, 0.026143104553222656, 0.026339872360229492, 0.025833152770996095, 0.025680192947387694, 0.025683967590332032, 0.02624118423461914, 0.02903638458251953, 0.026050527572631835, 0.026215551376342773, 0.0258439998626709, 0.025703039169311524, 0.025483264923095703, 0.025386112213134766, 0.025445247650146486, 0.025597951889038087, 0.02548531150817871, 0.025345375061035156, 0.02553104019165039, 0.025456512451171875, 0.02550592041015625, 0.02534934425354004, 0.0254431037902832, 0.025608192443847655, 0.025628288269042968, 0.025559423446655273, 0.025914688110351563, 0.02542457580566406, 0.02550921630859375, 0.025526943206787108, 0.025432064056396485, 0.025429567337036132, 0.0254335994720459, 0.025571327209472656, 0.02545484733581543, 0.02564521598815918, 0.02556777572631836, 0.025960096359252928, 0.025379167556762696, 0.025788415908813478, 0.02581491279602051, 0.025536031723022462, 0.025469535827636718, 0.025353824615478516, 0.02538528060913086, 0.025522272109985353, 0.0256114559173584, 0.02536457633972168, 0.025764127731323243, 0.025663936614990234, 0.025548383712768553, 0.02538115119934082, 0.025436416625976562, 0.025698175430297853, 0.02551398468017578, 0.025611583709716796, 0.02559056091308594, 0.02554870414733887, 0.026038272857666016, 0.02554640007019043, 0.026290176391601562, 0.026368000030517577, 0.026713407516479493, 0.025778879165649415, 0.025661439895629884, 0.025655296325683592, 0.02539072036743164, 0.02542732810974121, 0.025348320007324218, 0.025444255828857423, 0.025391231536865233, 0.025303808212280274, 0.025446367263793946, 0.025319135665893555, 0.025352096557617186, 0.02530950355529785, 0.025294944763183592, 0.025306400299072267, 0.025332447052001952, 0.025556032180786132, 0.02527507209777832, 0.02543132781982422, 0.025485824584960938, 0.025528799057006835, 0.025456607818603514, 0.025384384155273436, 0.02538147163391113, 0.025511936187744142, 0.025454111099243164, 0.025561567306518554, 0.02550579261779785, 0.026420448303222658, 0.02562505531311035, 0.02557574462890625, 0.02557155227661133, 0.025443647384643556, 0.025473503112792967, 0.025823007583618163, 0.025467103958129882, 0.025435136795043944, 0.02541423988342285, 0.025398847579956054, 0.025498464584350587, 0.02535968017578125, 0.025353952407836913, 0.025321599960327148, 0.025283424377441407, 0.025476640701293945, 0.025532896041870118, 0.02549964714050293, 0.025364479064941405, 0.02539107131958008, 0.025423904418945313, 0.025345151901245117, 0.025367424011230467, 0.025257984161376954, 0.02551398468017578, 0.02535424041748047, 0.025398975372314454, 0.02534432029724121, 0.025362432479858397, 0.02549964714050293, 0.0253439998626709, 0.02668339157104492, 0.025701375961303712, 0.02550169563293457, 0.02552115249633789, 0.025377119064331054, 0.025353055953979492, 0.025702144622802733, 0.025364479064941405, 0.02523756790161133, 0.02533545684814453, 0.025268575668334962, 0.025221120834350585, 0.025231359481811523, 0.025478527069091796, 0.025141887664794922, 0.02524569511413574, 0.025233407974243165, 0.02535628890991211, 0.025251840591430662, 0.025216960906982423, 0.025343551635742187, 0.025309696197509765, 0.025270431518554688, 0.025216863632202147, 0.025191455841064452, 0.025414623260498048, 0.02529280090332031, 0.025229312896728515, 0.02519455909729004, 0.02521491241455078, 0.025189855575561523, 0.025227712631225585, 0.025149440765380858, 0.025241695404052734, 0.025219072341918947, 0.02558742332458496, 0.025501216888427734, 0.025620992660522462, 0.025413888931274414, 0.025601791381835937, 0.02543436813354492, 0.0255467529296875, 0.025397247314453125, 0.025403232574462892, 0.025348255157470703, 0.025812095642089843, 0.025426816940307618, 0.025462783813476563, 0.025529407501220704, 0.025594816207885743, 0.025597951889038087, 0.025612064361572266, 0.025556320190429686, 0.025619327545166017, 0.02577155113220215, 0.025807327270507812, 0.026050559997558592, 0.02593791961669922, 0.02604627227783203, 0.025923295974731444, 0.025974336624145507, 0.025944320678710938, 0.025979007720947266, 0.026717344284057618, 0.026071039199829102, 0.025873247146606444, 0.025923583984375, 0.025852960586547853, 0.026010431289672852, 0.02615033531188965, 0.025840351104736328, 0.02567919921875, 0.025873056411743166, 0.025771392822265624, 0.02582387161254883, 0.02572195243835449, 0.02592576026916504, 0.02710403251647949, 0.025767936706542968, 0.02589695930480957, 0.025947168350219728, 0.026065887451171874, 0.02596656036376953, 0.026328096389770506, 0.02631372833251953, 0.02615500831604004, 0.02612428855895996, 0.02613043212890625, 0.02616316795349121, 0.025849855422973633, 0.02637385559082031, 0.026016319274902344, 0.02625494384765625, 0.025919551849365233, 0.025976543426513673, 0.025835775375366212, 0.02615433692932129, 0.025975584030151367, 0.026038272857666016, 0.025800703048706054, 0.02592518424987793, 0.02573151969909668, 0.02573311996459961, 0.025643007278442383, 0.025851936340332032, 0.025745376586914063, 0.025808895111083984, 0.025636863708496094, 0.02613248062133789, 0.025825279235839844, 0.025843711853027345, 0.025899007797241212, 0.02613657569885254, 0.025819135665893556, 0.026259519577026366, 0.026299936294555664, 0.025776544570922853, 0.025761791229248047, 0.025819135665893556, 0.025947647094726564, 0.02589952087402344, 0.02626464080810547, 0.025820095062255858, 0.025831424713134765, 0.025858047485351563, 0.025821184158325194, 0.026519392013549806, 0.025938655853271483, 0.025937023162841796, 0.026006399154663087, 0.026228031158447265, 0.027752992630004882, 0.026013856887817384, 0.02577961540222168, 0.025608192443847655, 0.025586271286010744, 0.02537183952331543, 0.025367359161376953, 0.02530713653564453, 0.02556723213195801, 0.025382911682128906, 0.02532147216796875, 0.02529280090332031, 0.02530713653564453, 0.025417343139648437, 0.025962623596191406, 0.0269171199798584, 0.025178016662597655, 0.02521638488769531, 0.025164512634277342, 0.02510643196105957, 0.025210271835327147, 0.02514748764038086, 0.02522889518737793, 0.025086143493652343, 0.025098976135253907, 0.025114208221435546, 0.025894943237304686, 0.025419551849365233, 0.025241472244262694, 0.025537055969238283, 0.02521232032775879, 0.025225791931152344, 0.025405664443969727, 0.025833375930786134, 0.025677503585815428, 0.02630191993713379, 0.025281471252441408, 0.025276416778564452, 0.025234655380249025, 0.025194944381713866, 0.025334112167358397, 0.025161727905273438, 0.025145343780517578, 0.02517193603515625, 0.025176095962524413, 0.02508595275878906, 0.02515100860595703, 0.025202688217163087, 0.025231840133666993, 0.025241600036621094, 0.025159135818481445, 0.025141792297363283, 0.025067327499389648, 0.02518649673461914, 0.02540336036682129, 0.02548726463317871, 0.025294815063476563, 0.025102495193481445, 0.027011072158813477, 0.02572073554992676, 0.02564009666442871, 0.025543615341186522, 0.025540479660034178, 0.02572096061706543, 0.02558768081665039, 0.025532447814941406, 0.02565456008911133, 0.02566828727722168, 0.02564214324951172, 0.025633472442626953, 0.025473215103149413, 0.02548531150817871, 0.025472000122070314, 0.025656320571899413, 0.02562188720703125, 0.025522239685058595, 0.02548588752746582, 0.02553775978088379, 0.025651327133178713, 0.025707008361816407, 0.02564838409423828, 0.02621321678161621, 0.02647609519958496, 0.02687228775024414, 0.025540159225463866, 0.02564518356323242, 0.025647455215454102, 0.025612287521362305, 0.025697696685791017, 0.02547331237792969, 0.025296384811401368, 0.02530796813964844, 0.025163232803344728, 0.025196544647216795, 0.025081920623779296, 0.02518412780761719, 0.025066080093383788, 0.02595430374145508, 0.02513920021057129, 0.025171199798583985, 0.02558028793334961, 0.02639801597595215, 0.025422527313232423, 0.025364479064941405, 0.025255359649658204, 0.025251968383789063, 0.02541811180114746, 0.025524288177490233, 0.025415231704711914, 0.02556153678894043, 0.025411487579345703, 0.025332927703857422, 0.02518294334411621, 0.025226655960083007, 0.025160480499267578, 0.025266176223754884, 0.025155775070190428, 0.025187679290771484, 0.02534604835510254, 0.02531990432739258, 0.025280511856079102, 0.02643974494934082, 0.026018016815185546, 0.02553241539001465, 0.02524937629699707, 0.0253767032623291, 0.02522979164123535, 0.025221120834350585, 0.025251359939575196, 0.026132959365844727, 0.02534809684753418, 0.0254619197845459, 0.025646944046020508, 0.025381792068481446, 0.02535843276977539, 0.025314655303955078, 0.025240224838256838, 0.025309183120727538, 0.025370624542236327, 0.02529484748840332, 0.02529689598083496, 0.025233407974243165, 0.02530713653564453, 0.025354143142700195, 0.025337600708007814, 0.02551024055480957, 0.025475072860717773, 0.025458688735961913, 0.02534160041809082, 0.02545199966430664, 0.025698368072509765, 0.02570323181152344, 0.02556716728210449, 0.025609888076782227, 0.025631135940551757, 0.025595903396606445, 0.025462623596191405, 0.027087007522583008, 0.026380287170410157, 0.02559756851196289, 0.02643574333190918, 0.027164415359497072, 0.025688543319702148, 0.02572902488708496, 0.02548940849304199, 0.025341951370239257, 0.02535424041748047, 0.025276512145996095, 0.02543401527404785, 0.025403392791748046, 0.025377920150756836, 0.025826175689697264, 0.025370624542236327, 0.025454591751098633, 0.025487360000610353, 0.02529193687438965, 0.025212959289550783, 0.02517238426208496, 0.02522902488708496, 0.025365472793579102, 0.025269983291625976, 0.025264127731323242, 0.02531942367553711, 0.025206144332885743]",tokens/s,38.86677901784819,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4300.57472,4676.517888,0.0,4273.995776,4273.234432,s,1,10.92390234375,10.92390234375,0.0,10.92390234375,10.92390234375,10.92390234375,10.92390234375,[10.92390234375],,kWh,9.766080204998767e-05,1.0765113677532453e-05,2.9338079025997832e-05,0.00013776399475351795,,MB,4267.23328,4789.764096,0.0,4372.56192,4345.727488,s,10,1.9558348999023436,0.1955834899902344,0.0005623967211336827,0.19550585174560547,0.19618326416015625,0.19646907348632814,0.19669772094726562,"[0.1953104705810547, 0.19491679382324217, 0.19554595947265624, 0.1954701385498047, 0.19539181518554688, 0.19554156494140626, 0.19604669189453125, 0.19473683166503905, 0.1961197509765625, 0.1967548828125]",tokens/s,1308.903936691089,kWh,5.769409732516472e-06,6.362576179991425e-07,3.815395209176265e-06,1.0221062559691879e-05,tokens/kWh,25046319.646801703,MB,4268.982272,4804.44416,0.0,4387.241984,4345.730048,s,10,22.26866162109375,2.226866162109375,0.0126298046444065,2.228846923828125,2.2398080322265628,2.241662194824219,2.2431455249023435,"[2.21790966796875, 2.2042265625, 2.214501708984375, 2.233455322265625, 2.215757080078125, 2.23939599609375, 2.224238525390625, 2.243516357421875, 2.237091796875, 2.238568603515625]",tokens/s,28.29087848742734,kWh,6.495487555123338e-05,7.164428798795252e-06,4.276575316682381e-05,0.00011488505751685241,tokens/kWh,548374.186875944,,s,630,22.26608266448976,0.035342988356332934,0.0004971713368762225,0.03531028747558594,0.03580552597045898,0.03610323543548584,0.03699232967376709,"[0.036014720916748046, 0.036464576721191404, 0.035372447967529294, 0.035560096740722656, 0.035186080932617186, 0.035113502502441406, 0.0362490234375, 0.03595737457275391, 0.03543225479125976, 0.03549993515014648, 0.035907264709472655, 0.03558256149291992, 0.03532147216796875, 0.03532828903198242, 0.035262561798095705, 0.03505871963500977, 0.035173023223876956, 0.03487145614624024, 0.034810016632080075, 0.03529299163818359, 0.03482028961181641, 0.035074047088623043, 0.034985023498535155, 0.03506681442260742, 0.034995967864990235, 0.03518899154663086, 0.035092479705810545, 0.03518463897705078, 0.035321792602539065, 0.03517193603515625, 0.035095008850097656, 0.03522534561157226, 0.03521868896484375, 0.035318782806396484, 0.03533939361572266, 0.03533452987670899, 0.035094974517822265, 0.03527686309814453, 0.03519075012207031, 0.03538742446899414, 0.035182048797607425, 0.03488201522827149, 0.03520025634765625, 0.0351506233215332, 0.034819297790527344, 0.034698047637939454, 0.03487744140625, 0.03485081481933594, 0.034864383697509764, 0.035033214569091795, 0.03491904067993164, 0.03507555389404297, 0.03500291061401367, 0.03486105728149414, 0.03515596771240234, 0.03487744140625, 0.03487871932983398, 0.03511536026000977, 0.03511747360229492, 0.035135486602783206, 0.035118846893310546, 0.03499033737182617, 0.03501260757446289, 0.03607567977905273, 0.03526518249511719, 0.03518406295776367, 0.03526895904541016, 0.03527907180786133, 0.035333759307861326, 0.03555078506469726, 0.035618976593017576, 0.035487552642822266, 0.035557697296142575, 0.035377696990966795, 0.03527679824829102, 0.03542966461181641, 0.035490528106689456, 0.03546883010864258, 0.03538489532470703, 0.03541904067993164, 0.035489791870117186, 0.03653535842895508, 0.039287166595458986, 0.03572383880615235, 0.03523379135131836, 0.03493273544311523, 0.03504537582397461, 0.0347586555480957, 0.03458822250366211, 0.034750656127929686, 0.03472316741943359, 0.03516918563842773, 0.03490611267089844, 0.03470336151123047, 0.03451465606689453, 0.034562335968017575, 0.03435289764404297, 0.03453363037109375, 0.03433830261230469, 0.03450931167602539, 0.03447769546508789, 0.03419171142578125, 0.034277408599853516, 0.03440332794189453, 0.034165760040283204, 0.034344959259033206, 0.03452252960205078, 0.034460254669189457, 0.03444508743286133, 0.03437958526611328, 0.03490857696533203, 0.03449651336669922, 0.03442278289794922, 0.03435244750976563, 0.03445532989501953, 0.034526111602783204, 0.03461529541015625, 0.03458796691894531, 0.03469382476806641, 0.034741409301757814, 0.03491449737548828, 0.03487161636352539, 0.035006431579589846, 0.03497580718994141, 0.034941249847412106, 0.03467673492431641, 0.03579452896118164, 0.03495616149902344, 0.034885665893554685, 0.03518588638305664, 0.03515875244140625, 0.03518825531005859, 0.03516880035400391, 0.035272705078125, 0.03533574295043945, 0.035251678466796874, 0.03518767929077148, 0.03523174285888672, 0.03527475357055664, 0.0352952651977539, 0.03531977462768555, 0.035135360717773435, 0.035324031829833985, 0.035645439147949216, 0.03526780700683594, 0.03547216033935547, 0.03556147384643555, 0.03532799911499023, 0.03521331024169922, 0.03540991973876953, 0.035675136566162106, 0.03525056076049805, 0.03546585464477539, 0.035337600708007816, 0.03528076934814453, 0.03510553741455078, 0.03497884750366211, 0.03501929473876953, 0.03502944183349609, 0.03503513717651367, 0.035089534759521486, 0.035044063568115236, 0.03471318435668945, 0.034732608795166015, 0.034609153747558595, 0.03457024002075195, 0.03456735992431641, 0.03515868759155273, 0.03503510284423828, 0.03470355224609375, 0.034872447967529294, 0.035127201080322266, 0.034657249450683596, 0.03489791870117188, 0.035059711456298825, 0.03497574234008789, 0.0350140495300293, 0.035103328704833986, 0.03508956909179688, 0.0358633918762207, 0.03546112060546875, 0.035166206359863283, 0.03514572906494141, 0.03513958358764648, 0.035133438110351564, 0.035094528198242186, 0.034977790832519534, 0.034988033294677735, 0.035194881439208986, 0.037384449005126955, 0.03515801620483398, 0.03530688095092773, 0.03536505508422851, 0.03546515274047852, 0.03563910293579101, 0.03532400131225586, 0.035281505584716794, 0.035590145111083986, 0.035659774780273434, 0.03547750473022461, 0.03547727966308594, 0.03544905471801758, 0.03559404754638672, 0.03534048080444336, 0.03539148712158203, 0.03554662322998047, 0.035366432189941406, 0.035713214874267575, 0.039596832275390625, 0.035700225830078126, 0.036565502166748046, 0.035659774780273434, 0.03551846313476562, 0.03567567825317383, 0.035215614318847656, 0.0351234245300293, 0.0351907844543457, 0.0351723518371582, 0.03501987075805664, 0.034683807373046875, 0.03471974563598633, 0.034627296447753905, 0.03469481658935547, 0.03471424102783203, 0.03584000015258789, 0.03457843017578125, 0.034548961639404296, 0.03470159912109375, 0.03495987319946289, 0.035915775299072264, 0.03547750473022461, 0.03486310577392578, 0.03584815979003906, 0.036963520050048826, 0.03532659149169922, 0.035160289764404294, 0.03530956649780274, 0.03528230285644531, 0.03531808090209961, 0.03530556869506836, 0.03549929428100586, 0.03582048034667969, 0.03566592025756836, 0.03523379135131836, 0.035520416259765625, 0.03509411239624023, 0.03550054550170898, 0.03515315246582031, 0.03516697692871094, 0.035167552947998046, 0.03521308898925781, 0.03536732864379883, 0.035590015411376956, 0.03520937728881836, 0.034947391510009765, 0.03497702407836914, 0.0348436164855957, 0.03495731353759766, 0.03503513717651367, 0.03508838272094727, 0.03509657669067383, 0.03509436798095703, 0.035036319732666014, 0.035195903778076174, 0.034947071075439456, 0.03509036636352539, 0.03524409484863281, 0.03531100845336914, 0.03486883163452149, 0.03535497665405273, 0.03519145584106445, 0.03504742431640625, 0.034936702728271485, 0.03489804840087891, 0.03508838272094727, 0.034947071075439456, 0.03496345520019531, 0.034805408477783205, 0.034701057434082035, 0.03481865692138672, 0.03509862518310547, 0.035146846771240234, 0.03525312042236328, 0.03501059341430664, 0.03492454528808594, 0.03485001754760742, 0.03502364730834961, 0.03510476684570313, 0.03500425720214844, 0.03516227340698242, 0.03521535873413086, 0.03527679824829102, 0.03510233688354492, 0.03505769729614258, 0.03511331176757813, 0.03507199859619141, 0.035093696594238284, 0.03509491348266602, 0.035149246215820315, 0.03507283020019531, 0.035178241729736326, 0.035124671936035155, 0.03566694259643555, 0.03556556701660156, 0.035480640411376954, 0.035414974212646486, 0.03521734237670898, 0.03544380950927734, 0.03625878524780273, 0.03550566482543945, 0.0358682861328125, 0.03563932800292969, 0.035396446228027345, 0.03528908920288086, 0.035321758270263674, 0.036691616058349606, 0.03531401443481445, 0.035555328369140625, 0.035738880157470704, 0.03549660873413086, 0.03569219207763672, 0.035539390563964844, 0.0366382064819336, 0.03621734237670898, 0.035418113708496096, 0.03589324951171875, 0.03579852676391602, 0.03582207870483398, 0.036104129791259765, 0.03562092971801758, 0.03541155242919922, 0.035416065216064455, 0.03561027145385742, 0.0353512954711914, 0.03532185745239258, 0.035127201080322266, 0.035170398712158206, 0.0352542724609375, 0.03533004760742187, 0.035155296325683594, 0.03518326568603516, 0.03534438323974609, 0.03749017715454102, 0.036378368377685544, 0.03623190307617188, 0.03549769592285156, 0.03537952041625977, 0.03520819091796875, 0.035345409393310545, 0.035364864349365234, 0.035503646850585935, 0.035332096099853515, 0.0354923210144043, 0.035684032440185545, 0.03527030563354492, 0.03541263961791992, 0.035495616912841796, 0.035137855529785156, 0.035434497833251956, 0.03523993682861328, 0.03554304122924805, 0.0357498893737793, 0.035127296447753906, 0.03593199920654297, 0.03535446548461914, 0.0353526725769043, 0.03584636688232422, 0.03544473648071289, 0.03522544097900391, 0.03537526321411133, 0.03528908920288086, 0.03626598358154297, 0.03555737686157227, 0.03512319946289062, 0.03497574234008789, 0.03490816116333008, 0.03495935821533203, 0.03500646209716797, 0.03595980834960937, 0.03546860885620117, 0.0356563835144043, 0.0351907844543457, 0.03508838272094727, 0.03551641464233399, 0.03534185409545899, 0.035324256896972654, 0.035233921051025394, 0.03526607894897461, 0.035119583129882816, 0.03524607849121094, 0.03527206420898438, 0.03520358276367187, 0.03501068878173828, 0.03504742431640625, 0.034841598510742186, 0.034825214385986326, 0.03474764633178711, 0.035059967041015626, 0.03501907348632813, 0.0349370231628418, 0.034740222930908206, 0.03513958358764648, 0.034852863311767575, 0.03489791870117188, 0.035063648223876955, 0.0351110725402832, 0.035194881439208986, 0.035143680572509765, 0.03548115158081055, 0.03524857711791992, 0.03527065658569336, 0.03518668746948242, 0.03537715148925781, 0.035434497833251956, 0.03525190353393555, 0.03547987365722656, 0.035418113708496096, 0.03531980895996094, 0.035218849182128906, 0.03518320083618164, 0.03524607849121094, 0.036335521697998044, 0.0355206069946289, 0.03547340774536133, 0.03506927871704101, 0.0356808967590332, 0.035172286987304686, 0.03502614212036133, 0.03580377578735352, 0.0354859504699707, 0.03537100982666016, 0.035381153106689454, 0.03579068756103516, 0.035200672149658205, 0.03543833541870117, 0.03542512130737305, 0.035745792388916016, 0.03611027145385742, 0.03539878463745117, 0.03509942245483398, 0.035821151733398435, 0.03562982559204102, 0.034938880920410156, 0.034912254333496096, 0.03490611267089844, 0.03488153457641602, 0.03545427322387695, 0.03495100784301758, 0.03499913787841797, 0.035135486602783206, 0.036274177551269535, 0.03499622344970703, 0.03508224105834961, 0.03508335876464844, 0.035406303405761716, 0.03643423843383789, 0.03535475158691406, 0.035266559600830076, 0.035356670379638674, 0.03572531127929687, 0.03517766571044922, 0.03700409698486328, 0.0357105598449707, 0.035557472229003906, 0.035461441040039066, 0.03581491088867188, 0.0353524169921875, 0.03553142547607422, 0.03555942535400391, 0.036173824310302735, 0.03758009719848633, 0.03552531051635742, 0.03571712112426758, 0.035808609008789065, 0.035645919799804686, 0.03528297424316406, 0.0360978889465332, 0.035352863311767575, 0.03524995040893555, 0.03535212707519531, 0.03546796798706055, 0.03545292663574219, 0.035579902648925785, 0.03593830490112305, 0.035573760986328126, 0.035677375793457033, 0.035557823181152345, 0.03574208068847656, 0.03567411041259766, 0.03578633499145508, 0.0358936653137207, 0.035941566467285156, 0.035547969818115234, 0.03774195098876953, 0.03589593505859375, 0.035649280548095706, 0.035703041076660155, 0.035697792053222654, 0.03547020721435547, 0.03558982467651367, 0.03570105743408203, 0.035622913360595705, 0.035357982635498046, 0.0352421760559082, 0.03634505462646485, 0.035899295806884765, 0.03562326431274414, 0.035574302673339844, 0.0354252815246582, 0.03545737457275391, 0.03591574478149414, 0.03547820663452148, 0.03549798583984375, 0.03558108901977539, 0.03539440155029297, 0.035487743377685545, 0.03560403060913086, 0.03565407943725586, 0.03556259155273438, 0.03627715301513672, 0.035528289794921876, 0.035463584899902346, 0.03537673568725586, 0.03528335952758789, 0.035297279357910154, 0.03522355270385742, 0.0353546257019043, 0.03551846313476562, 0.035340286254882815, 0.03532745742797851, 0.035533344268798825, 0.03545702362060547, 0.03531161499023437, 0.03571712112426758, 0.0351907844543457, 0.035151039123535156, 0.03505849456787109, 0.03505152130126953, 0.03530547332763672, 0.03506995010375977, 0.035125247955322264, 0.03580518341064453, 0.035272705078125, 0.035248126983642575, 0.03539260864257812, 0.03542108917236328, 0.03552809524536133, 0.035363422393798825, 0.035264511108398434, 0.03542134475708008, 0.035379840850830076, 0.035450111389160155, 0.03633414459228516, 0.035549598693847655, 0.03539148712158203, 0.035796993255615236, 0.03554099273681641, 0.03584553527832031, 0.03569315338134765, 0.03527884674072266, 0.03548137664794922, 0.035348705291748043, 0.03610214233398437, 0.035536544799804684, 0.03564742279052734, 0.03554300689697266, 0.0357196159362793, 0.03624604797363281, 0.03535244750976563, 0.035579200744628905, 0.035488094329833984, 0.035422687530517576, 0.03539932632446289, 0.03542201614379883, 0.03556988906860352, 0.035477344512939456, 0.035409984588623045, 0.03530115127563477, 0.03550476837158203, 0.035381248474121094, 0.035262081146240236, 0.03641535949707031, 0.03685817718505859, 0.035579551696777345, 0.03513974380493164, 0.03527622222900391, 0.035027198791503907, 0.035293888092041016, 0.03543369674682617, 0.035237728118896486, 0.03547785568237305, 0.035178207397460935, 0.035447681427001956, 0.03548972702026367, 0.03518265533447266, 0.0352911376953125, 0.035362049102783205, 0.03530342483520508, 0.035364864349365234, 0.035252193450927734, 0.03514857482910156, 0.03521529769897461, 0.035493953704833985, 0.03601408004760742, 0.0359628791809082, 0.03576355361938476, 0.03543497467041016, 0.03558816146850586, 0.03555075073242187, 0.0356478385925293, 0.035620128631591794, 0.03591465759277344, 0.03562092971801758, 0.03581942367553711, 0.03553839874267578, 0.03577715301513672, 0.03557321548461914, 0.035619361877441406, 0.035632606506347655, 0.03623123168945312, 0.03572169494628906, 0.03556665420532226, 0.035572673797607424, 0.03536076736450195, 0.035479198455810546, 0.03537267303466797, 0.03539023971557617, 0.03545075225830078, 0.03547324752807617, 0.035324127197265624]",tokens/s,28.29415526264674,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 4 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 28.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 191018 has 14.74 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 80.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1349, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1142, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 868, in forward hidden_states = self.mlp(hidden_states) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 781, in forward final_hidden_states.index_add_(0, top_x, current_hidden_states.to(hidden_states.dtype)) RuntimeError: CUDA error: invalid configuration argument CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1582.342144,1596.915712,0.0,1201.668096,1189.151232,s,1,8.6505029296875,8.6505029296875,0.0,8.6505029296875,8.6505029296875,8.6505029296875,8.6505029296875,[8.6505029296875],,kWh,3.685549087500135e-05,4.0580436646186965e-06,1.1927509542004194e-05,5.284104408162424e-05,,MB,1563.049984,1794.048,0.0,1384.12032,1351.367168,s,10,0.4121473541259766,0.04121473541259766,0.0002022758176152118,0.04114734268188476,0.04153757705688477,0.041589892578124996,0.04163174499511719,"[0.04164220809936523, 0.04098281478881836, 0.041086112976074216, 0.04125983810424805, 0.04104713439941406, 0.041223487854003905, 0.04108512115478516, 0.04152595138549805, 0.04118703842163086, 0.04110764694213867]",tokens/s,6211.370701211666,kWh,1.2486447791132654e-06,1.3770349550795996e-07,8.256404990683729e-07,2.211988773689598e-06,tokens/kWh,115732956.26315133,MB,1572.442112,1835.99104,0.0,1426.06336,1407.548416,s,10,10.61557373046875,1.061557373046875,0.005552198334380235,1.0607288208007812,1.0706404907226563,1.0711919494628908,1.071633116455078,"[1.0594141845703124, 1.0570184326171874, 1.0705179443359376, 1.06061181640625, 1.0608458251953126, 1.0575704345703125, 1.071743408203125, 1.0526453857421876, 1.062847900390625, 1.0623583984375]",tokens/s,59.34676881304852,kWh,3.098585049172095e-05,3.4171700681130072e-06,1.5428455837531704e-05,4.983147639736567e-05,tokens/kWh,1264261.1568966173,,s,630,10.611997209548951,0.016844440015157062,0.0002947726225189542,0.01678551959991455,0.017162866401672364,0.01731639394760132,0.017957598571777342,"[0.017451072692871095, 0.017297407150268555, 0.017342464447021484, 0.01744895935058594, 0.01720684814453125, 0.017137151718139648, 0.017303680419921873, 0.017027904510498047, 0.01723353576660156, 0.0169168643951416, 0.016577983856201173, 0.016558048248291015, 0.016688800811767577, 0.016571327209472655, 0.016566272735595702, 0.01682009506225586, 0.016686399459838866, 0.016845632553100585, 0.016680959701538087, 0.016692256927490233, 0.016493024826049803, 0.016673023223876954, 0.016505088806152344, 0.01654560089111328, 0.016789695739746095, 0.016633440017700195, 0.016837024688720705, 0.016939008712768554, 0.016987327575683595, 0.016929599761962892, 0.016754079818725585, 0.016622047424316406, 0.016738431930541992, 0.0167587833404541, 0.016670719146728515, 0.016709632873535156, 0.016766592025756834, 0.016744384765625, 0.016711679458618164, 0.016721887588500975, 0.01676131248474121, 0.016676000595092774, 0.016565088272094727, 0.016502784729003905, 0.016496639251708984, 0.01643712043762207, 0.017168512344360353, 0.016590368270874022, 0.017137247085571287, 0.016565120697021485, 0.016664287567138673, 0.016777503967285157, 0.016833536148071288, 0.016686080932617187, 0.016695295333862305, 0.016498239517211914, 0.016601280212402345, 0.016535423278808595, 0.01654207992553711, 0.0186079044342041, 0.01702729606628418, 0.01662156867980957, 0.016537599563598633, 0.016503583908081054, 0.016575647354125977, 0.016554847717285156, 0.016551008224487306, 0.01651395225524902, 0.016324607849121094, 0.016479583740234376, 0.016461471557617187, 0.01647702407836914, 0.01651113510131836, 0.016631296157836914, 0.01672809600830078, 0.01696201515197754, 0.01697996711730957, 0.016798944473266603, 0.01676144027709961, 0.0167524471282959, 0.016743904113769532, 0.01680476760864258, 0.016920576095581053, 0.01657151985168457, 0.01662656021118164, 0.016687103271484375, 0.016846847534179688, 0.016939008712768554, 0.01695884895324707, 0.01686355209350586, 0.016666431427001954, 0.016719808578491212, 0.016720447540283203, 0.01716223907470703, 0.016729663848876954, 0.01656262397766113, 0.016525312423706053, 0.016654144287109374, 0.016635391235351564, 0.016847551345825194, 0.01699737548828125, 0.016933696746826172, 0.016909631729125976, 0.016720703125, 0.016834592819213866, 0.016748575210571288, 0.017229824066162108, 0.016768896102905273, 0.016633663177490234, 0.016742719650268554, 0.016685056686401366, 0.016654016494750977, 0.016572351455688476, 0.01679782485961914, 0.016810239791870116, 0.016915840148925783, 0.016792192459106445, 0.017161920547485353, 0.016748287200927733, 0.016734207153320312, 0.016648767471313476, 0.016643104553222657, 0.016561119079589844, 0.017476640701293945, 0.017296255111694334, 0.01794451141357422, 0.017136831283569336, 0.01717945671081543, 0.017006591796875, 0.017069536209106444, 0.016922399520874022, 0.016787967681884765, 0.016718080520629883, 0.016755807876586915, 0.01679657554626465, 0.016773120880126953, 0.016802976608276367, 0.01673097610473633, 0.016815935134887695, 0.01702112007141113, 0.016847936630249024, 0.01666099166870117, 0.016709407806396483, 0.016681631088256835, 0.016644096374511717, 0.018257568359375, 0.019118431091308594, 0.01719478416442871, 0.017072351455688476, 0.017295360565185547, 0.017141632080078125, 0.016875232696533203, 0.01685696029663086, 0.01693503952026367, 0.01691484832763672, 0.016858848571777343, 0.016818464279174803, 0.01671500778198242, 0.016831167221069337, 0.01713145637512207, 0.018671743392944334, 0.01787017631530762, 0.017562271118164063, 0.017112640380859374, 0.016931264877319337, 0.01722127914428711, 0.016879968643188477, 0.01690345573425293, 0.016857376098632814, 0.016716224670410156, 0.01686028861999512, 0.016845695495605467, 0.016742399215698242, 0.016725215911865234, 0.016838464736938476, 0.016762943267822267, 0.016718751907348634, 0.01678335952758789, 0.016811391830444337, 0.016959104537963867, 0.016870399475097657, 0.016831935882568358, 0.016806463241577148, 0.016734207153320312, 0.016711679458618164, 0.016660255432128908, 0.017047775268554687, 0.016889312744140624, 0.01686787223815918, 0.017353471755981446, 0.017210752487182616, 0.01717737579345703, 0.016963136672973632, 0.016852800369262694, 0.016831104278564452, 0.01692803192138672, 0.01665865516662598, 0.01672857666015625, 0.016793344497680666, 0.01693712043762207, 0.016992223739624022, 0.016904319763183594, 0.016969280242919924, 0.017099199295043947, 0.017080320358276366, 0.016892927169799805, 0.016773183822631835, 0.016613471984863282, 0.016587615966796875, 0.016647647857666016, 0.016623584747314454, 0.016553728103637696, 0.016537919998168945, 0.016626176834106447, 0.016721920013427736, 0.016703487396240235, 0.017117183685302736, 0.016946367263793945, 0.016955839157104493, 0.016781696319580076, 0.016842111587524414, 0.01670003128051758, 0.016695295333862305, 0.01661270332336426, 0.016765600204467775, 0.01696710395812988, 0.016869951248168945, 0.016885759353637696, 0.016709632873535156, 0.0167106876373291, 0.016724960327148437, 0.01666662406921387, 0.01658399963378906, 0.016628416061401367, 0.01665433692932129, 0.01660518455505371, 0.017032224655151366, 0.0168253116607666, 0.0169671688079834, 0.016806047439575197, 0.016836959838867186, 0.01687321662902832, 0.0172359676361084, 0.016883935928344727, 0.01679145622253418, 0.01678335952758789, 0.01681011199951172, 0.016683008193969725, 0.01677516746520996, 0.016932159423828124, 0.016926496505737305, 0.016823200225830077, 0.01686387252807617, 0.016883712768554687, 0.01677052879333496, 0.016775711059570313, 0.016723968505859374, 0.01682431983947754, 0.01679974365234375, 0.016901920318603516, 0.01700864028930664, 0.01696995162963867, 0.01699827194213867, 0.017452928543090822, 0.01718092727661133, 0.016957439422607423, 0.016793279647827147, 0.016757055282592772, 0.016690303802490234, 0.01674025535583496, 0.016690143585205076, 0.016697343826293946, 0.016676864624023437, 0.017032480239868163, 0.017193695068359376, 0.01687049674987793, 0.016724063873291017, 0.016806720733642578, 0.01682431983947754, 0.01680998420715332, 0.017360895156860352, 0.017012224197387696, 0.016830976486206056, 0.016864831924438477, 0.01696988868713379, 0.01690140724182129, 0.016843679428100587, 0.017008447647094728, 0.01709699249267578, 0.017082368850708008, 0.0168407039642334, 0.016713727951049806, 0.016695295333862305, 0.016752639770507814, 0.01679897689819336, 0.0168721923828125, 0.0169881591796875, 0.01683865547180176, 0.016904191970825197, 0.016815488815307617, 0.016633823394775392, 0.016732831954956055, 0.01656831932067871, 0.01681203269958496, 0.016695295333862305, 0.01658060836791992, 0.01659596824645996, 0.016638528823852538, 0.01656262397766113, 0.016623615264892578, 0.016632863998413086, 0.016630752563476563, 0.017035039901733398, 0.01665660858154297, 0.01661337661743164, 0.01661337661743164, 0.01660518455505371, 0.016677984237670897, 0.016710559844970704, 0.016738304138183592, 0.01687084770202637, 0.01699897575378418, 0.016915935516357422, 0.016595487594604493, 0.017386816024780274, 0.01676767921447754, 0.01663711929321289, 0.01654457664489746, 0.01667647933959961, 0.017084096908569334, 0.017472192764282225, 0.01714995193481445, 0.016780799865722656, 0.016638463973999023, 0.016817663192749025, 0.01654550361633301, 0.016712480545043946, 0.01670252799987793, 0.016652416229248047, 0.016610111236572266, 0.01677926445007324, 0.016563552856445313, 0.01661404800415039, 0.01765171241760254, 0.016590848922729492, 0.016531455993652345, 0.016547840118408205, 0.016600128173828124, 0.0165283203125, 0.016530847549438475, 0.01652387237548828, 0.01656831932067871, 0.016573728561401366, 0.016622304916381836, 0.01652236747741699, 0.01661427116394043, 0.016621183395385743, 0.016637632369995117, 0.016906335830688478, 0.017150367736816406, 0.017219776153564452, 0.01697750473022461, 0.016859136581420898, 0.01674870491027832, 0.01677337646484375, 0.016680608749389647, 0.016644447326660157, 0.016742399215698242, 0.01666662406921387, 0.017180543899536133, 0.017081663131713866, 0.016775999069213867, 0.016821504592895508, 0.016838464736938476, 0.01674336051940918, 0.01681407928466797, 0.01729030418395996, 0.01687443161010742, 0.017922048568725587, 0.017962944030761718, 0.017193023681640624, 0.01722572708129883, 0.01738137626647949, 0.016969728469848632, 0.016934911727905275, 0.018257568359375, 0.017338720321655274, 0.018175199508666993, 0.01744156837463379, 0.017542463302612304, 0.017328832626342775, 0.017270784378051757, 0.01712656021118164, 0.017015199661254882, 0.016795904159545898, 0.016736064910888672, 0.017590175628662108, 0.01686895942687988, 0.01683340835571289, 0.01686275291442871, 0.01694748878479004, 0.016906015396118163, 0.0169005126953125, 0.01682156753540039, 0.017078975677490234, 0.017127424240112304, 0.01694476890563965, 0.016777599334716797, 0.01681782341003418, 0.0167541446685791, 0.01681702423095703, 0.01677926445007324, 0.016979135513305665, 0.016814943313598632, 0.01694921684265137, 0.017285120010375975, 0.017466367721557616, 0.017293664932250978, 0.017140384674072265, 0.016879615783691407, 0.01703321647644043, 0.017006591796875, 0.017008319854736328, 0.016900415420532226, 0.016743743896484375, 0.017015487670898437, 0.016602975845336914, 0.01669264030456543, 0.016739072799682616, 0.01672985649108887, 0.016734464645385742, 0.016562175750732423, 0.01662156867980957, 0.016611328125, 0.016620800018310546, 0.016610048294067384, 0.016682815551757813, 0.016639999389648438, 0.016533695220947265, 0.016504512786865235, 0.016662847518920897, 0.01666489601135254, 0.01659516716003418, 0.016832735061645506, 0.016553567886352538, 0.01653398323059082, 0.016618623733520507, 0.016832832336425782, 0.016665151596069336, 0.01658470344543457, 0.016664575576782227, 0.01684480094909668, 0.016701440811157226, 0.01678303909301758, 0.01663167953491211, 0.016662975311279297, 0.016668479919433595, 0.0165762882232666, 0.01668943977355957, 0.016667999267578126, 0.01648259162902832, 0.01657244873046875, 0.016601343154907227, 0.01662518310546875, 0.016603839874267577, 0.016564224243164064, 0.01657651138305664, 0.016479711532592773, 0.0165949764251709, 0.016429567337036134, 0.0165150089263916, 0.016563743591308595, 0.016546239852905275, 0.01658684730529785, 0.016486400604248046, 0.016728063583374024, 0.01696476745605469, 0.016734880447387697, 0.01664224052429199, 0.016639999389648438, 0.016734207153320312, 0.01668035125732422, 0.01697443199157715, 0.017004480361938478, 0.01674553680419922, 0.016666976928710938, 0.016740447998046876, 0.016773183822631835, 0.01672652816772461, 0.016781312942504883, 0.016699392318725585, 0.016760223388671874, 0.016706144332885742, 0.01675468826293945, 0.016752479553222656, 0.016826528549194336, 0.01679088020324707, 0.016910560607910158, 0.016717920303344725, 0.016873823165893555, 0.016855039596557618, 0.01680499267578125, 0.01682111930847168, 0.017536735534667967, 0.017137567520141603, 0.01711680030822754, 0.01692310333251953, 0.01679132843017578, 0.01684707260131836, 0.016873151779174804, 0.016877887725830078, 0.016928543090820314, 0.016996383666992188, 0.01693244743347168, 0.016800352096557617, 0.016756032943725584, 0.016765056610107423, 0.01681056022644043, 0.016893951416015626, 0.017053152084350588, 0.01720944023132324, 0.017316287994384765, 0.017229663848876954, 0.017062047958374023, 0.016963584899902344, 0.016957439422607423, 0.01697737693786621, 0.016949792861938477, 0.017065568923950194, 0.01710483169555664, 0.016903999328613282, 0.01678767967224121, 0.01681657600402832, 0.016982015609741212, 0.0170700798034668, 0.016920576095581053, 0.016910335540771485, 0.016903392791748045, 0.01680668830871582, 0.01684480094909668, 0.017215360641479494, 0.01670297622680664, 0.01661734390258789, 0.016546560287475587, 0.016672479629516603, 0.016572160720825194, 0.016679328918457033, 0.016694528579711914, 0.016683839797973634, 0.0167445125579834, 0.016752639770507814, 0.016517120361328123, 0.017156095504760743, 0.01731328010559082, 0.01665279960632324, 0.016615423202514648, 0.016730112075805666, 0.016581823348999023, 0.016572799682617188, 0.017783231735229492, 0.0166146240234375, 0.01653619194030762, 0.016703647613525392, 0.016515071868896485, 0.01661248016357422, 0.016515104293823243, 0.016530271530151366, 0.01681135940551758, 0.016821920394897463, 0.016870399475097657, 0.01670512008666992, 0.016639968872070313, 0.01674630355834961, 0.016577152252197264, 0.016578367233276367, 0.016578752517700194, 0.016664447784423827, 0.017078399658203125, 0.016770368576049806, 0.016636608123779296, 0.016602239608764648, 0.016633823394775392, 0.016505760192871095, 0.01659235191345215, 0.016730655670166017, 0.016494272232055664, 0.016560447692871093, 0.016543359756469728, 0.01651545524597168, 0.016695295333862305, 0.016926528930664063, 0.016590816497802734, 0.01664841651916504, 0.016757984161376954, 0.01670822334289551, 0.016474271774291994, 0.0167891845703125, 0.017135263442993164, 0.016675071716308595, 0.01675817680358887, 0.016991231918334963, 0.01683046340942383, 0.017059648513793945, 0.01690812873840332, 0.017029087066650392, 0.016902336120605467, 0.016955583572387696, 0.016957439422607423, 0.017147903442382813, 0.016701440811157226, 0.01692188835144043, 0.016795520782470704, 0.01686409568786621, 0.017022335052490234, 0.01731648063659668, 0.0171615047454834, 0.01712611198425293, 0.017074176788330078, 0.017657632827758788, 0.016963808059692383, 0.016977920532226562, 0.016920576095581053, 0.016924671173095703, 0.016932384490966797, 0.01713404846191406, 0.017041120529174805, 0.017170719146728516, 0.017114591598510743, 0.017281120300292968, 0.017107168197631837]",tokens/s,59.36677022804998,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1583.456256,1596.915712,0.0,1201.668096,1189.151232,s,1,8.8428955078125,8.8428955078125,0.0,8.8428955078125,8.8428955078125,8.8428955078125,8.8428955078125,[8.8428955078125],,kWh,3.590696299166514e-05,3.950185491363577e-06,1.1906676192000726e-05,5.176382467502944e-05,,MB,1568.436224,1794.048,0.0,1384.12032,1351.367168,s,10,0.4130821113586426,0.04130821113586426,0.00022286802072661455,0.0412576961517334,0.041507010269165034,0.04170185794830322,0.041857736091613765,"[0.041896705627441404, 0.04109718322753906, 0.04121372985839844, 0.04126678466796875, 0.04124860763549805, 0.04128707122802734, 0.041103679656982424, 0.04146371078491211, 0.04115500640869141, 0.0413496322631836]",tokens/s,6197.315084838856,kWh,1.2452644686701731e-06,1.3733006921265444e-07,8.294391978297931e-07,2.2120337357126205e-06,tokens/kWh,115730603.86329415,MB,1573.920768,1835.99104,0.0,1426.06336,1407.548416,s,10,10.498130493164062,1.0498130493164062,0.006266048482070716,1.0488837890625,1.0573603149414061,1.0606197937011719,1.0632273767089844,"[1.056635986328125, 1.0469267578125, 1.04680224609375, 1.0506383056640625, 1.044119140625, 1.0502557373046875, 1.0475118408203126, 1.040291748046875, 1.0510694580078126, 1.0638792724609376]",tokens/s,60.01068479861527,kWh,3.0614132719246836e-05,3.376261120950881e-06,1.530290704136997e-05,4.929330088156768e-05,tokens/kWh,1278064.1359637105,,s,630,10.494630895614618,0.01665814427875337,0.00033934837523612196,0.016566128730773927,0.016881436538696292,0.0172128849029541,0.018070274982452398,"[0.017091936111450195, 0.016477983474731447, 0.016464927673339843, 0.017317983627319337, 0.01666227149963379, 0.016607168197631837, 0.01690451240539551, 0.016965375900268555, 0.01659507179260254, 0.016795520782470704, 0.016619136810302734, 0.01656412887573242, 0.016537471771240233, 0.016523359298706054, 0.016473920822143554, 0.01665273666381836, 0.016604799270629883, 0.0167325439453125, 0.01671833610534668, 0.016717664718627928, 0.01679315185546875, 0.016848735809326172, 0.01670729637145996, 0.016689952850341798, 0.016637184143066405, 0.016526079177856444, 0.016634016036987304, 0.0166910400390625, 0.016626943588256837, 0.016745088577270507, 0.016730239868164062, 0.01675823974609375, 0.0175164794921875, 0.02031407928466797, 0.016833152770996094, 0.017135200500488282, 0.016765344619750978, 0.016703487396240235, 0.016674943923950195, 0.01668492889404297, 0.016623231887817384, 0.016543231964111327, 0.016556768417358397, 0.016605344772338868, 0.016571584701538085, 0.016638784408569335, 0.016842975616455078, 0.017002368927001955, 0.016908191680908204, 0.016742080688476563, 0.016611648559570313, 0.016763904571533202, 0.01654854393005371, 0.016836095809936523, 0.01661836814880371, 0.01681952095031738, 0.01661372756958008, 0.016633216857910155, 0.01659587287902832, 0.01660313606262207, 0.01661452865600586, 0.01654431915283203, 0.01653094482421875, 0.016677728652954103, 0.016698400497436525, 0.016706335067749024, 0.01661894416809082, 0.016435327529907225, 0.01647385597229004, 0.016509408950805663, 0.016431583404541015, 0.016676095962524413, 0.016424959182739257, 0.016535167694091798, 0.01664080047607422, 0.016694623947143553, 0.016685600280761718, 0.016835071563720702, 0.016699392318725585, 0.016648000717163085, 0.01658060836791992, 0.01659903907775879, 0.016561216354370117, 0.01668611145019531, 0.016582784652709962, 0.01675388717651367, 0.016906816482543944, 0.016691328048706055, 0.016530912399291994, 0.01665065574645996, 0.016477695465087892, 0.016539840698242186, 0.016596895217895508, 0.01665043258666992, 0.016730112075805666, 0.01648371124267578, 0.016382816314697266, 0.016558080673217773, 0.016523263931274415, 0.0165515193939209, 0.016525728225708008, 0.016504608154296874, 0.01641878318786621, 0.01638015937805176, 0.01647830390930176, 0.01655388832092285, 0.016680959701538087, 0.016590848922729492, 0.016530527114868163, 0.016690080642700195, 0.01649228858947754, 0.01688118362426758, 0.016558847427368163, 0.0170383358001709, 0.016556480407714843, 0.01659721565246582, 0.01640790367126465, 0.01655833625793457, 0.017318592071533204, 0.01661136054992676, 0.01659699249267578, 0.01671196746826172, 0.01649839973449707, 0.01676268768310547, 0.016574655532836914, 0.0167587833404541, 0.016698400497436525, 0.01652214431762695, 0.01661756706237793, 0.016695327758789062, 0.016475839614868162, 0.016708608627319335, 0.01663692855834961, 0.016599231719970704, 0.016619327545166016, 0.01657206344604492, 0.0166376953125, 0.017625696182250978, 0.016693248748779296, 0.0167093448638916, 0.016582944869995116, 0.016862720489501954, 0.016586528778076173, 0.01645654487609863, 0.016459648132324218, 0.016478208541870116, 0.016449535369873047, 0.01668908882141113, 0.0165, 0.016587392807006836, 0.016574111938476563, 0.016785184860229493, 0.01666223907470703, 0.01662054443359375, 0.016556032180786134, 0.016719871520996094, 0.016582656860351562, 0.016476320266723632, 0.016555871963500977, 0.016547840118408205, 0.016521215438842773, 0.016586143493652342, 0.0167491512298584, 0.016582399368286132, 0.0165295352935791, 0.016610687255859374, 0.016523935317993163, 0.018104415893554687, 0.01661747169494629, 0.01643123245239258, 0.016514080047607422, 0.01648441505432129, 0.016457984924316407, 0.0164234561920166, 0.016478208541870116, 0.016489791870117187, 0.01632736015319824, 0.016474367141723633, 0.01647385597229004, 0.017044479370117188, 0.016540735244750977, 0.01654368019104004, 0.016496639251708984, 0.016483840942382814, 0.0164564151763916, 0.01658412742614746, 0.0165479679107666, 0.01646636772155762, 0.016494367599487306, 0.01680588722229004, 0.01665238380432129, 0.016529312133789064, 0.016668832778930665, 0.016694496154785157, 0.016510719299316405, 0.01649158477783203, 0.016447231292724608, 0.016648384094238283, 0.016777088165283203, 0.017125280380249023, 0.017897247314453125, 0.016877887725830078, 0.016652288436889647, 0.01661337661743164, 0.01656345558166504, 0.016556800842285155, 0.01660927963256836, 0.016559648513793945, 0.016801279067993165, 0.0179866886138916, 0.016797536849975585, 0.017172479629516603, 0.0168222713470459, 0.016748544692993163, 0.01660927963256836, 0.016494783401489257, 0.016852800369262694, 0.016554048538208008, 0.017024959564208984, 0.01648182487487793, 0.016572895050048827, 0.01644361686706543, 0.016498207092285156, 0.016445695877075197, 0.01670070457458496, 0.016646879196166992, 0.016883712768554687, 0.016496448516845702, 0.016517311096191405, 0.016487808227539064, 0.016517536163330078, 0.016473600387573242, 0.016496864318847657, 0.016605695724487304, 0.01653286361694336, 0.016490144729614256, 0.01662393569946289, 0.01665705680847168, 0.016703487396240235, 0.016768768310546876, 0.01654537582397461, 0.016490144729614256, 0.01661849594116211, 0.01662774467468262, 0.01667068862915039, 0.016600896835327148, 0.016529024124145506, 0.01638243293762207, 0.016512287139892577, 0.016646528244018556, 0.016551488876342772, 0.016448095321655275, 0.01669647979736328, 0.016658912658691405, 0.01663363265991211, 0.01656425666809082, 0.016624191284179687, 0.016760831832885743, 0.016678848266601563, 0.016529632568359376, 0.016622623443603515, 0.016550336837768555, 0.016636287689208985, 0.01677654457092285, 0.01663657569885254, 0.016647647857666016, 0.017009279251098634, 0.01671721649169922, 0.016531103134155272, 0.016533504486083983, 0.016513919830322264, 0.016727455139160158, 0.016565984725952148, 0.016540512084960938, 0.016497695922851562, 0.0165644474029541, 0.016497407913208008, 0.016523008346557618, 0.016498943328857422, 0.016519264221191408, 0.016525087356567384, 0.0165295352935791, 0.016733407974243164, 0.016671232223510742, 0.01678531265258789, 0.01650636863708496, 0.016445728302001954, 0.016415327072143555, 0.016467967987060548, 0.01642291259765625, 0.016472063064575194, 0.01648748779296875, 0.01635568046569824, 0.01649135971069336, 0.016602880477905275, 0.016811168670654297, 0.016786272048950196, 0.016473472595214842, 0.016496992111206053, 0.016427295684814453, 0.016483583450317384, 0.01646227264404297, 0.01642527961730957, 0.01653932762145996, 0.016414751052856447, 0.016462112426757814, 0.01643519973754883, 0.016469215393066405, 0.016443487167358398, 0.016548288345336913, 0.01655628776550293, 0.016645824432373047, 0.01662393569946289, 0.01670908737182617, 0.0165053768157959, 0.016819839477539063, 0.016869184494018554, 0.016781280517578125, 0.016736799240112305, 0.016488191604614257, 0.016576927185058595, 0.016425312042236326, 0.016528032302856446, 0.016493408203125, 0.016719104766845704, 0.01693503952026367, 0.016679328918457033, 0.016483871459960938, 0.01656028747558594, 0.016478912353515625, 0.01673539161682129, 0.01669206428527832, 0.016670719146728515, 0.016525312423706053, 0.016612991333007813, 0.01650931167602539, 0.016554208755493165, 0.016469280242919923, 0.016458240509033203, 0.016526975631713868, 0.016889984130859376, 0.016519071578979493, 0.01675254440307617, 0.016612831115722655, 0.016677888870239257, 0.016588544845581053, 0.016454879760742187, 0.016422048568725586, 0.016567583084106444, 0.016529983520507812, 0.016545024871826172, 0.01655593681335449, 0.01648691177368164, 0.016471616744995116, 0.01655084800720215, 0.016504640579223632, 0.016524896621704102, 0.016412992477416993, 0.016513151168823244, 0.016455039978027344, 0.016406431198120117, 0.01655471992492676, 0.01663145637512207, 0.016683040618896486, 0.016557695388793946, 0.01654035186767578, 0.016527328491210937, 0.016754720687866213, 0.016684255599975585, 0.016522016525268555, 0.016726015090942382, 0.016720096588134767, 0.01678927993774414, 0.016739904403686525, 0.017084863662719725, 0.017033376693725587, 0.01822697639465332, 0.018415679931640627, 0.017523136138916016, 0.017217536926269532, 0.016865280151367186, 0.016986112594604492, 0.01660927963256836, 0.016615423202514648, 0.01656831932067871, 0.016570367813110352, 0.016635168075561525, 0.016456703186035156, 0.016532735824584963, 0.016640480041503907, 0.016758687973022462, 0.016892000198364256, 0.0168175048828125, 0.016799999237060548, 0.016800159454345702, 0.01676313591003418, 0.01666227149963379, 0.016699392318725585, 0.016564224243164064, 0.016740352630615234, 0.016547359466552735, 0.016535072326660155, 0.016573375701904296, 0.016566272735595702, 0.01648454475402832, 0.01657360076904297, 0.016485023498535156, 0.016541919708251952, 0.01646335983276367, 0.016529695510864258, 0.016518367767333984, 0.01638275146484375, 0.016678720474243163, 0.016558080673217773, 0.016748191833496094, 0.016793535232543944, 0.016501344680786133, 0.016424959182739257, 0.01639833641052246, 0.01640457534790039, 0.016539072036743162, 0.016466400146484376, 0.01652889633178711, 0.01662822341918945, 0.016666656494140626, 0.016626976013183595, 0.0167425594329834, 0.01664463996887207, 0.01664521598815918, 0.016526144027709962, 0.016506975173950195, 0.016456800460815428, 0.016733152389526367, 0.016617408752441408, 0.016581920623779296, 0.016483327865600587, 0.016498559951782225, 0.016408063888549804, 0.01655228805541992, 0.016513023376464844, 0.016495967864990236, 0.016802175521850586, 0.01655193519592285, 0.01660745620727539, 0.01645955276489258, 0.016465919494628906, 0.01660927963256836, 0.016547168731689453, 0.016519392013549804, 0.01663337516784668, 0.01652934455871582, 0.016509920120239257, 0.01645484733581543, 0.016497472763061523, 0.016451583862304688, 0.016582271575927735, 0.0165230712890625, 0.016457664489746095, 0.016456319808959962, 0.016445119857788085, 0.01638387107849121, 0.016455423355102538, 0.01646771240234375, 0.01648736000061035, 0.01650499153137207, 0.016417856216430663, 0.016458528518676758, 0.016433120727539063, 0.016455135345458984, 0.016427583694458008, 0.016424959182739257, 0.016465280532836916, 0.016411264419555663, 0.01653913688659668, 0.01661939239501953, 0.0166015682220459, 0.016494752883911133, 0.01639423942565918, 0.0164270076751709, 0.01652118492126465, 0.01658064079284668, 0.016453472137451172, 0.016449695587158204, 0.016459840774536133, 0.01642255973815918, 0.01646620750427246, 0.016565568923950197, 0.016538303375244142, 0.0164454402923584, 0.016476160049438478, 0.016455455780029295, 0.016521087646484377, 0.01648089599609375, 0.016430816650390624, 0.01643734359741211, 0.016543968200683594, 0.01641212844848633, 0.016506399154663086, 0.016597312927246095, 0.016727935791015624, 0.01656268882751465, 0.016418560028076172, 0.0164552001953125, 0.017152448654174805, 0.016714784622192384, 0.01652124786376953, 0.016458688735961916, 0.016565311431884767, 0.01648736000061035, 0.01645523262023926, 0.016470464706420898, 0.016441343307495117, 0.016541696548461913, 0.016490848541259765, 0.016506528854370116, 0.016561983108520507, 0.016670143127441406, 0.016548608779907225, 0.017452959060668946, 0.019414623260498046, 0.01674678421020508, 0.01699567985534668, 0.016751487731933593, 0.016631999969482423, 0.01649203109741211, 0.01672137641906738, 0.016611743927001953, 0.01671340751647949, 0.016554752349853517, 0.016625631332397462, 0.016477407455444334, 0.016512096405029295, 0.01646972846984863, 0.016660480499267577, 0.016531455993652345, 0.016476160049438478, 0.016463872909545898, 0.016461599349975587, 0.016363616943359374, 0.01657644844055176, 0.01645996856689453, 0.016438592910766603, 0.016446144104003906, 0.016442752838134764, 0.01646860885620117, 0.01660518455505371, 0.01653555107116699, 0.016453632354736326, 0.016568191528320314, 0.017970304489135742, 0.01645804786682129, 0.016741056442260743, 0.016471647262573243, 0.01645199966430664, 0.016377504348754884, 0.016375167846679688, 0.016376800537109375, 0.01666975975036621, 0.018975679397583007, 0.016713727951049806, 0.017374656677246095, 0.016542272567749025, 0.01646518325805664, 0.016378591537475586, 0.01664723205566406, 0.01645254325866699, 0.0164270076751709, 0.016817663192749025, 0.016674911499023438, 0.016589664459228517, 0.01655948829650879, 0.016436960220336912, 0.01651299285888672, 0.016752960205078125, 0.016716415405273438, 0.016699392318725585, 0.016584928512573243, 0.01650787162780762, 0.016466720581054688, 0.01647830390930176, 0.01650886344909668, 0.016529632568359376, 0.016445215225219727, 0.01644476890563965, 0.016406784057617186, 0.016482208251953127, 0.016546239852905275, 0.016580671310424806, 0.016555807113647462, 0.016865503311157225, 0.01696998405456543, 0.017135360717773437, 0.01738479995727539, 0.01736969566345215, 0.017295295715332032, 0.01719718360900879, 0.01723391914367676, 0.017320032119750976, 0.017207199096679688, 0.01739366340637207, 0.017253503799438477, 0.017158687591552733, 0.017285472869873048, 0.01745305633544922, 0.017662208557128908, 0.017401599884033205, 0.01873823928833008, 0.016734336853027342, 0.01657734489440918, 0.01651456069946289, 0.016600576400756836, 0.016484640121459962, 0.017732320785522462, 0.01759052848815918, 0.017706111907958986, 0.016631935119628908, 0.01669990348815918, 0.016725887298583986, 0.016681087493896483, 0.016531455993652345, 0.01656012725830078, 0.01653539276123047, 0.016534879684448243, 0.01654662322998047, 0.01658060836791992, 0.016697343826293946, 0.01696713638305664, 0.016904735565185548, 0.01698406410217285, 0.01694633674621582]",tokens/s,60.03069629283077,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 21490 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,6768.418816,7762.542592,0.0,7367.294976,7351.94368,s,1,12.4044169921875,12.4044169921875,0.0,12.4044169921875,12.4044169921875,12.4044169921875,12.4044169921875,[12.4044169921875],,kWh,0.00015872177171666712,1.7501032018332932e-05,5.042504033999848e-05,0.00022664784407499852,,MB,1417.826304,8404.271104,0.0,7994.343424,7863.794176,s,10,2.982906372070313,0.29829063720703125,0.00042369488986336985,0.29844981384277347,0.2986946716308594,0.298739990234375,0.2987762451171875,"[0.29835421752929686, 0.298471435546875, 0.29781222534179685, 0.2973498840332031, 0.2985828857421875, 0.2979638671875, 0.2987853088378906, 0.2984737548828125, 0.29868460083007814, 0.2984281921386719]",tokens/s,858.2233837340357,kWh,8.727604025857624e-06,9.621517366482535e-07,5.779473577823583e-06,1.546922934032946e-05,tokens/kWh,16548982.135301886,MB,1438.72,8676.900864,0.0,8266.973184,8120.408064,s,10,24.584218749999998,2.458421875,0.0029808773235690616,2.459900634765625,2.4606286865234375,2.4616724975585935,2.462507546386719,"[2.452980712890625, 2.454088134765625, 2.455698974609375, 2.45816455078125, 2.460396728515625, 2.45989404296875, 2.4599072265625, 2.460341064453125, 2.460031005859375, 2.46271630859375]",tokens/s,25.626195666681497,kWh,7.195771981330942e-05,7.937285538448352e-06,4.791437493277619e-05,0.00012780938028453392,tokens/kWh,492921.56694404664,,s,630,24.580845401763934,0.03901721492343479,0.000344952758432959,0.03898783874511719,0.0394716007232666,0.03956559257507324,0.03973260978698731,"[0.03861248016357422, 0.03829539108276367, 0.03821443176269531, 0.03829779052734375, 0.03813715362548828, 0.03831273651123047, 0.03855257415771484, 0.03852585601806641, 0.038358047485351564, 0.03833673477172852, 0.03850726318359375, 0.03876169586181641, 0.038717952728271485, 0.03879759979248047, 0.03879935836791992, 0.03861094284057617, 0.03860070419311523, 0.03877068710327149, 0.03867407989501953, 0.03870896148681641, 0.038890113830566404, 0.038834175109863284, 0.038780033111572264, 0.038701313018798825, 0.038986366271972654, 0.0389918098449707, 0.03897932815551758, 0.03889593505859375, 0.0387665901184082, 0.0386448974609375, 0.038695743560791016, 0.03877020645141602, 0.03877289581298828, 0.038809951782226564, 0.038801185607910155, 0.0389073600769043, 0.03910294342041016, 0.03914115142822266, 0.03913356781005859, 0.039319679260253905, 0.03939328002929687, 0.039329792022705076, 0.0392674560546875, 0.039203712463378906, 0.0392086067199707, 0.039346527099609375, 0.03924790573120117, 0.03914070510864258, 0.03931523132324219, 0.03948531341552734, 0.0394024658203125, 0.039255840301513675, 0.03922723388671875, 0.03942643356323242, 0.03933171081542969, 0.03926796722412109, 0.039246337890625, 0.0392171516418457, 0.039272449493408204, 0.03922323226928711, 0.03929884719848633, 0.03952041625976563, 0.03955513763427734, 0.038827072143554686, 0.03851753616333008, 0.03855091094970703, 0.038583072662353515, 0.03852492904663086, 0.03839299011230469, 0.038617759704589846, 0.038627231597900394, 0.038451297760009766, 0.03856150436401367, 0.03861516952514649, 0.03841878509521485, 0.0384532470703125, 0.03838908767700195, 0.03837020874023438, 0.038631168365478516, 0.03880550384521484, 0.03878870391845703, 0.03872604751586914, 0.038714752197265626, 0.03894745635986328, 0.0390041618347168, 0.03880361557006836, 0.03894255828857422, 0.03891404724121094, 0.0388403205871582, 0.0389119987487793, 0.038801406860351564, 0.03885055923461914, 0.03902582550048828, 0.039082817077636715, 0.03902211380004883, 0.03895347213745117, 0.038891166687011716, 0.03887926483154297, 0.03924720001220703, 0.03915673446655273, 0.038903423309326175, 0.03881961441040039, 0.038906143188476565, 0.039137569427490235, 0.03899987030029297, 0.039051456451416014, 0.039292160034179686, 0.03930393600463867, 0.03919257736206055, 0.03913667297363281, 0.03911945724487305, 0.039049217224121094, 0.03936870574951172, 0.03941904067993164, 0.03942627334594727, 0.03941798400878906, 0.039391742706298825, 0.039346176147460936, 0.03925196838378906, 0.03952435302734375, 0.03949318313598633, 0.039307712554931644, 0.03919462585449219, 0.03927859115600586, 0.039299072265625, 0.03924905776977539, 0.038626625061035154, 0.038763198852539066, 0.03858163070678711, 0.03854985427856445, 0.03858233642578125, 0.038663585662841796, 0.03869779205322266, 0.03858169555664062, 0.038343231201171876, 0.03857779312133789, 0.038680320739746095, 0.03871603012084961, 0.03865190505981445, 0.03864371109008789, 0.03881132888793945, 0.03872998428344727, 0.03859462356567383, 0.0385986557006836, 0.03855155181884766, 0.03850649642944336, 0.03859443283081055, 0.038905982971191404, 0.038817790985107424, 0.038778881072998046, 0.038978912353515624, 0.038981441497802735, 0.0388043212890625, 0.038760448455810545, 0.03880755233764648, 0.03896713638305664, 0.038965408325195315, 0.03894668960571289, 0.03908607864379883, 0.03936249542236328, 0.03899571228027344, 0.03882438278198242, 0.038895584106445315, 0.03898076629638672, 0.03914012908935547, 0.03916400146484375, 0.039182464599609376, 0.0390695686340332, 0.0390203857421875, 0.03896950531005859, 0.03929292678833008, 0.03948086547851563, 0.03934163284301758, 0.03939420700073242, 0.03938508987426758, 0.039346176147460936, 0.039354366302490236, 0.039376449584960935, 0.039239425659179684, 0.039158462524414066, 0.03902668762207031, 0.039139102935791016, 0.03956694412231445, 0.03963881683349609, 0.039430335998535154, 0.0395015983581543, 0.03937750244140625, 0.03935043334960937, 0.03955904006958008, 0.03853311920166016, 0.03847577667236328, 0.03842863845825195, 0.03855791854858399, 0.03859151840209961, 0.03849929428100586, 0.03844281768798828, 0.03857619094848633, 0.03845113754272461, 0.038703102111816406, 0.038694911956787106, 0.03857161712646484, 0.0385577278137207, 0.038689216613769534, 0.038739902496337894, 0.03869900894165039, 0.03885465621948242, 0.03862720108032226, 0.038645313262939456, 0.03885113525390625, 0.03873532867431641, 0.038713886260986326, 0.03894796752929688, 0.03891811370849609, 0.03890473556518555, 0.03889152145385742, 0.03890991973876953, 0.03883625411987305, 0.039034881591796876, 0.03887513732910156, 0.03882393646240234, 0.03878092956542969, 0.03893657684326172, 0.039204864501953124, 0.039041023254394534, 0.039051136016845706, 0.03925619125366211, 0.03938051223754883, 0.039223041534423825, 0.03907043075561523, 0.03951001739501953, 0.03932950210571289, 0.03937516784667969, 0.039411582946777345, 0.039241825103759766, 0.03940156936645508, 0.039444385528564455, 0.039241825103759766, 0.03925187301635742, 0.03925763320922852, 0.0390865592956543, 0.03913318252563477, 0.03952230453491211, 0.039354366302490236, 0.03937257766723633, 0.039276737213134766, 0.03941584014892578, 0.039534591674804685, 0.03957356643676758, 0.03943008041381836, 0.039632190704345704, 0.0396479377746582, 0.03960543823242187, 0.039279361724853516, 0.038668449401855466, 0.038520862579345706, 0.03839740753173828, 0.03834726333618164, 0.038634719848632815, 0.0387305908203125, 0.03861004638671875, 0.038561729431152346, 0.03854415893554688, 0.038735904693603516, 0.0391550407409668, 0.03881817626953125, 0.03862563323974609, 0.03878092956542969, 0.039002113342285157, 0.03884236907958984, 0.03881564712524414, 0.03890528106689453, 0.03897529602050781, 0.03894768142700195, 0.03896121597290039, 0.03897235107421875, 0.03884134292602539, 0.03871705627441406, 0.038954910278320314, 0.03894102478027344, 0.038885505676269534, 0.038852222442626955, 0.038873374938964846, 0.0389571533203125, 0.03891167831420898, 0.039134719848632815, 0.039058238983154296, 0.03891814422607422, 0.039099681854248045, 0.03903948974609375, 0.03905110549926758, 0.039325759887695315, 0.03927219009399414, 0.03918700790405273, 0.03935612869262695, 0.03938742446899414, 0.03935356903076172, 0.03924614334106445, 0.0391602897644043, 0.03917168045043945, 0.03911721420288086, 0.03921100616455078, 0.03916595077514649, 0.0393994255065918, 0.0394439697265625, 0.03930777740478516, 0.03929235076904297, 0.03925459289550781, 0.03939123153686523, 0.039462913513183595, 0.039376895904541014, 0.03956531143188476, 0.039702529907226565, 0.03959807968139648, 0.039669761657714846, 0.03957715225219727, 0.03886896133422851, 0.03856921768188477, 0.03834969711303711, 0.03845951843261719, 0.03861830520629883, 0.03855817413330078, 0.03854275131225586, 0.03844112014770508, 0.038855457305908205, 0.03879267120361328, 0.0388265266418457, 0.03873791885375977, 0.03864950561523438, 0.03856118392944336, 0.03865103912353516, 0.03884009552001953, 0.03874816131591797, 0.03859008026123047, 0.038805343627929687, 0.03889539337158203, 0.038808319091796876, 0.03893779373168945, 0.03915449523925781, 0.03909222412109375, 0.038950912475585936, 0.03888742446899414, 0.0389172477722168, 0.03879328155517578, 0.038770561218261716, 0.03885782241821289, 0.03895280075073242, 0.0388485107421875, 0.03905507278442383, 0.03899820709228516, 0.03895036697387695, 0.03907551956176758, 0.039140289306640624, 0.03908198547363281, 0.039139102935791016, 0.039233470916748045, 0.03922358322143555, 0.0392171516418457, 0.03928799819946289, 0.03935702514648438, 0.039321823120117186, 0.03929087829589844, 0.03918745422363281, 0.039330814361572264, 0.039327743530273435, 0.039317089080810545, 0.03942031860351562, 0.03946905517578125, 0.03938889694213867, 0.03929116821289062, 0.03955712127685547, 0.03947510528564453, 0.039497825622558595, 0.03927449417114258, 0.03978035354614258, 0.039702529907226565, 0.03962166213989258, 0.03956582260131836, 0.03964156723022461, 0.0387740478515625, 0.03865468978881836, 0.03848515319824219, 0.0384903678894043, 0.03848259353637695, 0.03833139038085937, 0.03831084823608399, 0.03867238235473633, 0.038963359832763673, 0.03888470458984375, 0.03877529525756836, 0.03855974578857422, 0.03846259307861328, 0.03853612899780273, 0.03865593719482422, 0.03863532638549805, 0.03879539108276367, 0.03892025756835937, 0.038899711608886715, 0.03879935836791992, 0.03883827209472656, 0.03911884689331055, 0.03889583969116211, 0.038849857330322264, 0.03883055877685547, 0.039013504028320316, 0.03883712005615234, 0.038983680725097655, 0.03895296096801758, 0.038829345703125, 0.038793952941894534, 0.03890176010131836, 0.03916799926757813, 0.0391044807434082, 0.03905235290527344, 0.03948992156982422, 0.039219806671142575, 0.03920444869995117, 0.03913532638549805, 0.03897993469238281, 0.039061473846435546, 0.039256065368652344, 0.03911065673828125, 0.03908403015136719, 0.03925571060180664, 0.03928940963745117, 0.03920054244995117, 0.03917737579345703, 0.03952931213378906, 0.039411712646484375, 0.03940505599975586, 0.03939718246459961, 0.03936735916137695, 0.03938304138183594, 0.039396991729736326, 0.039857887268066404, 0.03961718368530273, 0.03955913543701172, 0.039511199951171874, 0.03949606323242188, 0.039352832794189455, 0.03987251281738281, 0.039744895935058595, 0.03879305648803711, 0.038383838653564456, 0.03863929748535156, 0.038570240020751954, 0.03851446533203125, 0.03897366333007812, 0.038870849609375, 0.038674625396728515, 0.03870851135253906, 0.038645633697509764, 0.03857084655761719, 0.038662143707275394, 0.038746208190917966, 0.03861222457885742, 0.038547199249267576, 0.0387694091796875, 0.03891238403320312, 0.03880527877807617, 0.03866419219970703, 0.03882572937011719, 0.03885696029663086, 0.03892351913452148, 0.03892844772338867, 0.03887142562866211, 0.038959423065185544, 0.03874745559692383, 0.03873452758789062, 0.03881369781494141, 0.039008255004882815, 0.03901030349731445, 0.03892428970336914, 0.03927859115600586, 0.039142494201660154, 0.03909856033325195, 0.03904608154296875, 0.03909609603881836, 0.03922681427001953, 0.03950678253173828, 0.039323360443115234, 0.03938508987426758, 0.039090175628662106, 0.03908198547363281, 0.03912499237060547, 0.03924991989135742, 0.03923164749145508, 0.039182079315185546, 0.03915164947509766, 0.03938515090942383, 0.039362560272216796, 0.039479297637939455, 0.039446529388427735, 0.039384735107421874, 0.039368030548095706, 0.03925484848022461, 0.03967830276489258, 0.039626590728759764, 0.03948339080810547, 0.03938620758056641, 0.03933638381958008, 0.03926883316040039, 0.03943971252441406, 0.03948716735839844, 0.03966432189941406, 0.038912448883056644, 0.03842047882080078, 0.03872742462158203, 0.03859072113037109, 0.038569854736328124, 0.03854355239868164, 0.03854713439941406, 0.03867212677001953, 0.03849615859985352, 0.038480480194091796, 0.03863740921020508, 0.03857424163818359, 0.038563838958740236, 0.03868057632446289, 0.03870230484008789, 0.038683425903320315, 0.038965248107910154, 0.038860321044921875, 0.03895548629760742, 0.038983680725097655, 0.03871088027954102, 0.03858451080322266, 0.038713569641113284, 0.03868880081176758, 0.03864476776123047, 0.03899488067626953, 0.03907788848876953, 0.038948863983154294, 0.03892838287353516, 0.03907788848876953, 0.03898931121826172, 0.03939583969116211, 0.039403518676757815, 0.03932364654541016, 0.03915776062011719, 0.039203937530517576, 0.03899417495727539, 0.038847137451171875, 0.038978816986083985, 0.03901708984375, 0.039118785858154294, 0.039233409881591796, 0.03934828948974609, 0.03916825485229492, 0.03950732803344727, 0.0393570556640625, 0.03930316925048828, 0.039634944915771485, 0.03928473663330078, 0.039137279510498044, 0.03946080017089844, 0.039446590423583984, 0.03945209503173828, 0.03939395141601563, 0.039411617279052735, 0.03942399978637695, 0.039577598571777346, 0.039585792541503906, 0.03958169555664062, 0.03951599884033203, 0.03954643249511719, 0.03947148895263672, 0.039472606658935545, 0.038924415588378905, 0.03868239974975586, 0.03857827377319336, 0.038459007263183596, 0.03850243377685547, 0.03862972640991211, 0.03869510269165039, 0.038467391967773434, 0.038682174682617185, 0.03880799865722656, 0.038703102111816406, 0.0388403205871582, 0.03902668762207031, 0.0388485107421875, 0.03883404922485351, 0.038856510162353516, 0.03890950393676758, 0.038855422973632814, 0.03883747100830078, 0.038722335815429686, 0.03912086486816406, 0.03922332763671875, 0.0388935661315918, 0.038982719421386716, 0.0388474235534668, 0.038808895111083985, 0.03883078384399414, 0.038787071228027346, 0.03878297424316406, 0.038973438262939454, 0.03902054214477539, 0.03886899185180664, 0.039419902801513675, 0.039411712646484375, 0.03919257736206055, 0.03927014541625977, 0.03912054443359375, 0.03913584136962891, 0.0389911994934082, 0.03899868774414063, 0.03917391967773438, 0.03915711975097656, 0.03905593490600586, 0.03911231994628906, 0.03919529724121094, 0.03926220703125, 0.03926806259155274, 0.039200958251953126, 0.03953468704223633, 0.0394048957824707, 0.039430816650390624, 0.039419902801513675, 0.039282047271728515, 0.0397973747253418, 0.03960319900512695, 0.03955542373657227, 0.03957827377319336, 0.03954035186767578, 0.03997119903564453, 0.039751201629638674, 0.03964483261108399, 0.03944736099243164, 0.039436286926269534]",tokens/s,25.629712473387563,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,8219.242496,11243.814912,0.0,10848.567296,10616.027648,s,1,14.3545263671875,14.3545263671875,0.0,14.3545263671875,14.3545263671875,14.3545263671875,14.3545263671875,[14.3545263671875],,kWh,0.0002154615354791531,2.3759469206828034e-05,6.835061023599542e-05,0.00030757161492197654,,MB,3966.40256,11665.342464,0.0,11255.414784,11070.470656,s,10,3.729839172363282,0.3729839172363281,0.0009508679458567862,0.3731391906738281,0.3740673522949219,0.3741419219970703,0.37420157775878904,"[0.37110870361328124, 0.37287899780273437, 0.37146514892578125, 0.37303924560546875, 0.3732391357421875, 0.37294482421875, 0.3742164916992187, 0.37360546875, 0.37329037475585936, 0.37405078125]",tokens/s,686.3566716143276,kWh,1.0907724441358089e-05,1.2028850010034015e-06,7.236518134889388e-06,1.9347127577250877e-05,tokens/kWh,13231938.38350531,MB,3970.727936,11667.439616,0.0,11257.511936,11070.473216,s,10,28.761244873046877,2.876124487304687,0.005087828976131349,2.875763671875,2.882142236328125,2.8829097412109377,2.8835237451171873,"[2.86629638671875, 2.871433349609375, 2.873718994140625, 2.872728271484375, 2.87521826171875, 2.87630908203125, 2.879038330078125, 2.880853271484375, 2.8819716796875, 2.88367724609375]",tokens/s,21.90447606773774,kWh,8.428156574822338e-05,9.29480960613096e-06,5.6135504784910435e-05,0.00014971188013926482,tokens/kWh,420808.2881692235,,s,630,28.75854958343503,0.04564849140227787,0.0004311421835084933,0.0456529598236084,0.04601856193542481,0.04610665893554687,0.04772453594207764,"[0.04760335922241211, 0.045615966796875, 0.04535631942749024, 0.044825313568115234, 0.04515020751953125, 0.044977664947509766, 0.04483955383300781, 0.045158206939697264, 0.04504336166381836, 0.04497411346435547, 0.04489379119873047, 0.04549302291870117, 0.045245983123779296, 0.04515862274169922, 0.04527471923828125, 0.04521769714355469, 0.04527763366699219, 0.04541062545776367, 0.04560076904296875, 0.04552703857421875, 0.04550630569458008, 0.04552524948120117, 0.04588339233398438, 0.04562041473388672, 0.045222721099853515, 0.045174785614013675, 0.04522761535644531, 0.04529916763305664, 0.045203487396240236, 0.04520166397094726, 0.04525923156738281, 0.045241985321044925, 0.04537936019897461, 0.04537942504882812, 0.045644737243652346, 0.045486080169677735, 0.04538127899169922, 0.04561955261230469, 0.04570851135253906, 0.04561548614501953, 0.045870849609375, 0.0460681266784668, 0.04603110504150391, 0.04592844772338867, 0.045848320007324216, 0.04579734420776367, 0.04549871826171875, 0.045389759063720704, 0.045774848937988284, 0.04569462585449219, 0.0454447021484375, 0.045333248138427734, 0.045505569458007815, 0.04549731063842773, 0.045658111572265625, 0.04544921493530273, 0.04547923278808594, 0.0457383041381836, 0.045658462524414065, 0.04564585494995117, 0.04575020980834961, 0.0458770866394043, 0.04589385604858399, 0.04733542251586914, 0.04528742218017578, 0.045034656524658205, 0.044890975952148436, 0.0447498893737793, 0.04465964889526367, 0.04481568145751953, 0.04517055892944336, 0.045396800994873046, 0.045139968872070314, 0.04511948776245117, 0.045401313781738284, 0.04517977523803711, 0.04521084976196289, 0.04517561721801758, 0.045480865478515625, 0.04585980987548828, 0.045565185546875, 0.045166912078857424, 0.04543532943725586, 0.04574031829833984, 0.04568239974975586, 0.045330432891845705, 0.04571043014526367, 0.04566438293457031, 0.04535507202148437, 0.045283649444580076, 0.045344287872314454, 0.04534912109375, 0.04536380767822266, 0.04549407958984375, 0.04563516616821289, 0.045507198333740236, 0.04547379302978516, 0.04568841552734375, 0.04569708633422852, 0.04581568145751953, 0.04599350357055664, 0.04608710479736328, 0.04589507293701172, 0.04575718307495117, 0.04588937759399414, 0.046018558502197264, 0.04591772842407227, 0.04580969619750976, 0.04578323364257812, 0.045875167846679686, 0.045506847381591796, 0.045461505889892576, 0.045641281127929687, 0.045736480712890625, 0.04570102310180664, 0.04552294540405273, 0.0454769287109375, 0.04580819320678711, 0.045717281341552736, 0.04570377731323242, 0.045827167510986325, 0.045964351654052736, 0.04587628936767578, 0.04589648056030273, 0.045975006103515625, 0.04611747360229492, 0.047839168548583985, 0.04606982421875, 0.045096958160400394, 0.04497200012207031, 0.04487734222412109, 0.04507644653320313, 0.04497615814208984, 0.04492134475708008, 0.04503731155395508, 0.045309505462646484, 0.045294559478759766, 0.04520489501953125, 0.045459712982177734, 0.04526496124267578, 0.045225311279296875, 0.04523686218261719, 0.045578559875488284, 0.04570697784423828, 0.04534067153930664, 0.045256671905517576, 0.04546073532104492, 0.04593494415283203, 0.045695423126220706, 0.04553318405151367, 0.045553665161132816, 0.04539542388916016, 0.04547174453735352, 0.04521014404296875, 0.04517817687988281, 0.045480640411376956, 0.0454205436706543, 0.04539587020874023, 0.045486175537109375, 0.04557561492919922, 0.045752864837646484, 0.04561103820800781, 0.04580707168579102, 0.04586918258666992, 0.04581008148193359, 0.04590387344360351, 0.045676673889160156, 0.046161441802978515, 0.045807968139648436, 0.04590591812133789, 0.04604313659667969, 0.04586905670166016, 0.04562739181518555, 0.04578713607788086, 0.04570924758911133, 0.045588542938232425, 0.04560809707641601, 0.045837150573730466, 0.04568681716918945, 0.04555977630615234, 0.04554912185668945, 0.04594473648071289, 0.04610710525512695, 0.04574816131591797, 0.04585075378417969, 0.04595507049560547, 0.045794654846191406, 0.04628473663330078, 0.046069793701171875, 0.0483836784362793, 0.04588102340698242, 0.045115230560302734, 0.044816287994384765, 0.044901313781738283, 0.04505388641357422, 0.045227710723876956, 0.045107391357421874, 0.04504800033569336, 0.045421695709228514, 0.04532912063598633, 0.045236385345458985, 0.04526694488525391, 0.045502079010009765, 0.04525094223022461, 0.04525590515136719, 0.04548303985595703, 0.04568857574462891, 0.045692928314208986, 0.04550841522216797, 0.04584262466430664, 0.0462224006652832, 0.045939777374267576, 0.045524417877197264, 0.04530598449707031, 0.0452426872253418, 0.045338558197021483, 0.04514575958251953, 0.04518953704833984, 0.04542464065551758, 0.04552908706665039, 0.045246463775634765, 0.04531814575195312, 0.04545232009887695, 0.045378528594970706, 0.04560076904296875, 0.04572323226928711, 0.045609375, 0.04568438339233399, 0.045480480194091795, 0.04571855926513672, 0.046007072448730466, 0.04592025756835937, 0.04605132675170898, 0.045948928833007815, 0.04579942321777344, 0.045471839904785157, 0.04552080154418945, 0.04568819046020508, 0.04560550308227539, 0.04549148941040039, 0.04568534469604492, 0.0455865592956543, 0.0455551986694336, 0.04589328002929687, 0.04581568145751953, 0.045616096496582034, 0.04569497680664063, 0.04605747222900391, 0.046104576110839846, 0.046036991119384765, 0.04572918319702148, 0.04608060836791992, 0.047745471954345704, 0.04548198318481445, 0.04513382339477539, 0.044799999237060545, 0.04491775894165039, 0.04507340621948242, 0.04506131362915039, 0.045090911865234375, 0.045160255432128905, 0.04551897430419922, 0.045443550109863284, 0.04543315124511719, 0.04522137451171875, 0.045400577545166014, 0.04558643341064453, 0.045461505889892576, 0.045418014526367186, 0.04550403213500977, 0.04581372833251953, 0.0455463981628418, 0.04578656005859375, 0.04580825424194336, 0.045742080688476565, 0.04555084609985351, 0.04518169784545899, 0.04542611312866211, 0.04529926300048828, 0.04504886245727539, 0.045305824279785155, 0.04558233642578125, 0.04563558578491211, 0.04558812713623047, 0.04554991912841797, 0.04550239944458008, 0.045776958465576174, 0.04573583984375, 0.04575446319580078, 0.045963264465332034, 0.04575641632080078, 0.045445121765136716, 0.04604108810424805, 0.0458158073425293, 0.045721408843994144, 0.04583033752441406, 0.045995105743408204, 0.045846912384033205, 0.04574019241333008, 0.04579366302490234, 0.04557574462890625, 0.045792766571044925, 0.04588035202026367, 0.04589052963256836, 0.04584339141845703, 0.04555980682373047, 0.0456888313293457, 0.04586918258666992, 0.04571942520141602, 0.045666305541992185, 0.04597711944580078, 0.04592601776123047, 0.04611568069458008, 0.04613119888305664, 0.046290111541748044, 0.04823040008544922, 0.04608787155151367, 0.04502764892578125, 0.04483686447143555, 0.04501251220703125, 0.04497884750366211, 0.0447916145324707, 0.04482252883911133, 0.04498636627197266, 0.04543203353881836, 0.045555648803710935, 0.04541641616821289, 0.0454071044921875, 0.04536524963378906, 0.04528057479858399, 0.04525897598266602, 0.04538748931884766, 0.04570608139038086, 0.04566006469726563, 0.0453851203918457, 0.04580822372436524, 0.04593407821655274, 0.045928958892822266, 0.04550361633300781, 0.045347297668457034, 0.045528961181640626, 0.04536348724365234, 0.04529507064819336, 0.04534348678588867, 0.045373470306396484, 0.04544211196899414, 0.04528838348388672, 0.04566425704956055, 0.04571955108642578, 0.04566233444213867, 0.04564476776123047, 0.04593961715698242, 0.045846080780029295, 0.04582233428955078, 0.04570880126953125, 0.04592697525024414, 0.045849727630615233, 0.045992832183837894, 0.04610611343383789, 0.04597148895263672, 0.04557257461547851, 0.04592156982421875, 0.04586979293823242, 0.0456328010559082, 0.045607646942138674, 0.04578227233886719, 0.04577497482299805, 0.045652286529541015, 0.04588521575927734, 0.04578937530517578, 0.045809761047363284, 0.045961471557617185, 0.045907936096191405, 0.04603692626953125, 0.04594195175170898, 0.04586761474609375, 0.04618172836303711, 0.04620387268066406, 0.04806150436401367, 0.045575103759765624, 0.04521152114868164, 0.04496192169189453, 0.04497532653808594, 0.04480694580078125, 0.044918495178222655, 0.04500905609130859, 0.04532396697998047, 0.04518713760375977, 0.04530601501464844, 0.045811168670654295, 0.04565068817138672, 0.04555286407470703, 0.04525955200195313, 0.04559462356567383, 0.04587724685668945, 0.045617057800292966, 0.04535919952392578, 0.04550204849243164, 0.0461082878112793, 0.04602140808105469, 0.04570025634765625, 0.04557043075561523, 0.04552479934692383, 0.045369598388671876, 0.04519180679321289, 0.04520460891723633, 0.045546142578125, 0.045559585571289064, 0.0454268798828125, 0.04563766479492187, 0.04558774566650391, 0.04547369766235351, 0.04564051055908203, 0.04596464157104492, 0.04585539245605469, 0.04554060745239258, 0.045742561340332034, 0.046026878356933594, 0.045913761138916015, 0.04588784027099609, 0.04623807907104492, 0.045960544586181644, 0.0459554557800293, 0.04597884750366211, 0.0460849609375, 0.04577212905883789, 0.04560863876342774, 0.0455055046081543, 0.04562739181518555, 0.04568617630004883, 0.045647808074951175, 0.045892257690429684, 0.046007999420166014, 0.04592876815795898, 0.0458271369934082, 0.04597856140136719, 0.045882785797119144, 0.04601712036132812, 0.04672716903686523, 0.046170112609863284, 0.04615068817138672, 0.04795024108886719, 0.0455392951965332, 0.04531817626953125, 0.04518707275390625, 0.045013057708740235, 0.04490598297119141, 0.045264640808105466, 0.04542329788208008, 0.0454389762878418, 0.04513177490234375, 0.04543078231811523, 0.04558956909179687, 0.045523582458496095, 0.045313793182373045, 0.04563183975219726, 0.045850112915039064, 0.045772705078125, 0.04546438217163086, 0.04550796890258789, 0.04568332672119141, 0.04575436782836914, 0.04568848037719726, 0.04580124664306641, 0.04567097473144531, 0.0457154541015625, 0.045561569213867184, 0.04549660873413086, 0.045328254699707034, 0.045492351531982424, 0.045696575164794924, 0.04549004745483398, 0.04570966339111328, 0.04568652725219727, 0.04573769760131836, 0.04578531265258789, 0.0457564811706543, 0.04587158584594726, 0.046020606994628906, 0.04590172958374023, 0.04573964691162109, 0.04609641647338867, 0.04605305480957031, 0.04588399887084961, 0.04551286315917969, 0.04581990432739258, 0.04595225524902344, 0.04578937530517578, 0.04565049743652344, 0.04605132675170898, 0.04588339233398438, 0.045553665161132816, 0.045701087951660155, 0.046018592834472655, 0.04592844772338867, 0.04563353729248047, 0.04583388900756836, 0.0460926399230957, 0.046129150390625, 0.04593664169311523, 0.04595916748046875, 0.046086143493652344, 0.045929759979248044, 0.04623977661132812, 0.04767327880859375, 0.04568502426147461, 0.04534889602661133, 0.04512697601318359, 0.04500547027587891, 0.04538982391357422, 0.04541439819335937, 0.04526489639282227, 0.045412353515625, 0.045618335723876954, 0.045484897613525394, 0.045238273620605465, 0.045592575073242186, 0.045502464294433595, 0.04568473434448242, 0.04558393478393555, 0.04560140609741211, 0.045967166900634765, 0.04570425415039062, 0.04545836639404297, 0.045932544708251956, 0.045835777282714846, 0.04581631851196289, 0.04565343856811523, 0.04571807861328125, 0.04562636947631836, 0.04571043014526367, 0.045617057800292966, 0.045879295349121094, 0.04570316696166992, 0.045682689666748044, 0.045676544189453126, 0.04567244720458984, 0.04562684631347656, 0.045529407501220705, 0.0456317138671875, 0.04590739059448242, 0.04571503829956055, 0.045783870697021486, 0.04570326232910156, 0.045793342590332034, 0.045752513885498045, 0.045755233764648434, 0.045977760314941406, 0.045880126953125, 0.045809120178222654, 0.045860542297363284, 0.0460184326171875, 0.045954017639160155, 0.04563497543334961, 0.04581846237182617, 0.045795169830322266, 0.045969566345214846, 0.04601241683959961, 0.04604108810424805, 0.0458620491027832, 0.045820766448974606, 0.045840385437011716, 0.046080001831054686, 0.04590703964233398, 0.04588864135742188, 0.046058624267578126, 0.04602521514892578, 0.04845353698730469, 0.04565804672241211, 0.04511385726928711, 0.04543932723999024, 0.04522390365600586, 0.0451297607421875, 0.04523209762573242, 0.045426239013671876, 0.04569955062866211, 0.04552291107177735, 0.04518300628662109, 0.04547113418579102, 0.045492351531982424, 0.04529404830932617, 0.04549856185913086, 0.04547564697265625, 0.04580556869506836, 0.045690879821777344, 0.04538163375854492, 0.04548710250854492, 0.0457237434387207, 0.04587971115112305, 0.045697536468505856, 0.04582918548583984, 0.045760929107666014, 0.04557619094848633, 0.045518367767333985, 0.045456382751464845, 0.045817119598388675, 0.04554620742797852, 0.04563148880004883, 0.045803680419921874, 0.045803329467773435, 0.04577196884155273, 0.04571017456054687, 0.045795326232910154, 0.04589878463745117, 0.04565248107910156, 0.045697502136230465, 0.04589478302001953, 0.04597849655151367, 0.04588544082641602, 0.04610867309570312, 0.04597555160522461, 0.04595507049560547, 0.045954334259033204, 0.04607577514648437, 0.04598806381225586, 0.04584307098388672, 0.045873153686523435, 0.04605952072143555, 0.045725696563720705, 0.04570521545410156, 0.04614963150024414, 0.04597455978393555, 0.0457287368774414, 0.04589068984985351, 0.0462611198425293, 0.045939777374267576, 0.04581049728393555, 0.0462583999633789, 0.046153343200683594, 0.045943073272705075]",tokens/s,21.906528984440868,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1174.069248,987.62752,0.0,585.105408,557.135872,s,1,8.1814892578125,8.1814892578125,0.0,8.1814892578125,8.1814892578125,8.1814892578125,8.1814892578125,[8.1814892578125],,kWh,2.9423575533360234e-05,3.238060160255963e-06,8.725840314000033e-06,4.138747600761623e-05,,MB,1375.076352,1147.011072,0.0,729.808896,689.092096,s,10,0.30642246437072757,0.030642246437072758,0.00013190258608197062,0.030630080223083497,0.0307716007232666,0.030850728034973143,0.03091402988433838,"[0.030929855346679688, 0.03045359992980957, 0.030556032180786133, 0.03067024040222168, 0.030480831146240235, 0.030641759872436523, 0.030754016876220702, 0.03061840057373047, 0.03059644889831543, 0.03072127914428711]",tokens/s,8354.478857342405,kWh,9.139025930599112e-07,1.0078674311577176e-07,6.033294757187315e-07,1.6180188118944147e-06,tokens/kWh,158218185.17688873,MB,1389.142016,1293.811712,0.0,876.609536,689.094656,s,10,10.849275756835937,1.0849275756835937,0.02074983080585636,1.078933837890625,1.1120206176757812,1.1142853942871094,1.116097215576172,"[1.07029833984375, 1.0584957275390625, 1.108294677734375, 1.0651680908203125, 1.08034619140625, 1.111517333984375, 1.0983621826171874, 1.1165501708984376, 1.077521484375, 1.0627215576171876]",tokens/s,58.06839222452689,kWh,3.1304545994439036e-05,3.4524201525863438e-06,1.338768171708143e-05,4.81446478641068e-05,tokens/kWh,1308556.66818509,,s,630,10.842747695922842,0.017210710628448967,0.0005346828702832428,0.017195311546325684,0.017828045463562012,0.018000730895996093,0.01855224822998047,"[0.0174653434753418, 0.0182857608795166, 0.017646400451660157, 0.01785856056213379, 0.01803273582458496, 0.017987360000610353, 0.017865055084228514, 0.017710880279541017, 0.017530879974365234, 0.018053279876708985, 0.017581439971923827, 0.0179399356842041, 0.017310560226440428, 0.01738972854614258, 0.01705779266357422, 0.01697107124328613, 0.01694585609436035, 0.01682022476196289, 0.016695295333862305, 0.017039552688598632, 0.016731967926025392, 0.016589120864868166, 0.016584352493286134, 0.016885791778564453, 0.016660703659057616, 0.01660495948791504, 0.016567392349243162, 0.01652214431762695, 0.016467967987060548, 0.01668515205383301, 0.016682592391967774, 0.016629056930541994, 0.01662259292602539, 0.016593183517456055, 0.016642847061157227, 0.016473119735717773, 0.016522815704345703, 0.016582815170288086, 0.016518911361694335, 0.016787328720092774, 0.016599615097045897, 0.016504831314086914, 0.016465951919555664, 0.01679155158996582, 0.016799711227416993, 0.01701273536682129, 0.016840127944946288, 0.016625696182250977, 0.016555744171142577, 0.01699907112121582, 0.016554143905639647, 0.01761052894592285, 0.017847711563110352, 0.017571903228759764, 0.016642816543579103, 0.016537599563598633, 0.016568063735961914, 0.016640256881713868, 0.016879039764404295, 0.01686300849914551, 0.017214239120483397, 0.01663088035583496, 0.01651910400390625, 0.016357215881347656, 0.01640617561340332, 0.01653196716308594, 0.01646556854248047, 0.016470367431640626, 0.01663593673706055, 0.01671369552612305, 0.01661667251586914, 0.016888416290283204, 0.016576704025268556, 0.01655193519592285, 0.01660710334777832, 0.01646972846984863, 0.01655628776550293, 0.016627872467041015, 0.016664575576782227, 0.01657241630554199, 0.01677926445007324, 0.016809120178222656, 0.016748767852783203, 0.016456319808959962, 0.018998720169067382, 0.01668908882141113, 0.01655062484741211, 0.016438655853271485, 0.016593151092529297, 0.016775392532348634, 0.016762943267822267, 0.016511999130249023, 0.01683308792114258, 0.016339391708374024, 0.01641881561279297, 0.016469696044921874, 0.016861503601074218, 0.01683865547180176, 0.017331584930419922, 0.0168536319732666, 0.016859136581420898, 0.016602304458618163, 0.01677302360534668, 0.016837087631225586, 0.017197216033935547, 0.01705603218078613, 0.01738956832885742, 0.016895999908447267, 0.01673200035095215, 0.01650908851623535, 0.0165515193939209, 0.01667523193359375, 0.016733280181884767, 0.01657744026184082, 0.017127103805541992, 0.016868896484375, 0.016871583938598632, 0.017172607421875, 0.017881248474121095, 0.01721379280090332, 0.016898048400878905, 0.016580352783203123, 0.016914688110351562, 0.01678745651245117, 0.016877376556396484, 0.017449087142944335, 0.016799072265625, 0.01753558349609375, 0.017384672164916994, 0.017421024322509766, 0.018126976013183593, 0.018327552795410155, 0.017285120010375975, 0.017569759368896484, 0.017117216110229493, 0.017233983993530273, 0.017327392578125, 0.017308319091796875, 0.017115135192871094, 0.016977920532226562, 0.017053152084350588, 0.017386079788208008, 0.016953279495239258, 0.017006399154663086, 0.017096895217895508, 0.017026880264282226, 0.01697100830078125, 0.017280128479003905, 0.017530271530151367, 0.018389408111572265, 0.01784419250488281, 0.017653888702392578, 0.017530784606933594, 0.017551359176635743, 0.01779097557067871, 0.017735551834106446, 0.01779248046875, 0.018045600891113282, 0.0175861759185791, 0.017690624237060547, 0.017505632400512696, 0.017629823684692382, 0.017678272247314452, 0.0176296329498291, 0.017921920776367187, 0.017706783294677734, 0.018245344161987306, 0.018178335189819338, 0.017618656158447266, 0.01724675178527832, 0.0172541446685791, 0.01779654312133789, 0.017827840805053712, 0.017730112075805663, 0.01775987243652344, 0.017703168869018553, 0.0177706241607666, 0.017579103469848634, 0.01794755172729492, 0.017440576553344727, 0.0174289608001709, 0.017509855270385743, 0.01749836730957031, 0.0174552001953125, 0.018890207290649413, 0.018112512588500978, 0.017646047592163087, 0.017838111877441408, 0.017582239151000975, 0.01726464080810547, 0.017551359176635743, 0.01787446403503418, 0.0177873592376709, 0.017278976440429687, 0.017432256698608397, 0.018085504531860353, 0.017470144271850587, 0.017342079162597657, 0.017113439559936522, 0.016971807479858398, 0.016883712768554687, 0.017059839248657227, 0.01721343994140625, 0.016982015609741212, 0.01683046340942383, 0.0166297607421875, 0.016629247665405272, 0.016533792495727537, 0.01643289566040039, 0.01660371208190918, 0.016412576675415038, 0.01660438346862793, 0.016539487838745117, 0.01672492790222168, 0.01648361587524414, 0.016646879196166992, 0.01672105598449707, 0.016816160202026368, 0.01678214454650879, 0.01677884864807129, 0.016603551864624023, 0.016732160568237304, 0.01664204788208008, 0.016564224243164064, 0.016533504486083983, 0.01656012725830078, 0.016486400604248046, 0.016639999389648438, 0.016558080673217773, 0.01687459182739258, 0.017205408096313476, 0.016796319961547852, 0.016498336791992186, 0.0168022403717041, 0.016885055541992187, 0.017180479049682618, 0.0167903995513916, 0.016644096374511717, 0.016777215957641603, 0.01756159973144531, 0.0170883846282959, 0.017105024337768556, 0.016722112655639648, 0.016844608306884765, 0.016873472213745116, 0.017047552108764647, 0.016678176879882812, 0.016595327377319335, 0.01675503921508789, 0.01701478385925293, 0.01698406410217285, 0.017088512420654296, 0.016496192932128905, 0.016531871795654296, 0.016615423202514648, 0.016588512420654296, 0.01647439956665039, 0.016683263778686522, 0.01643903923034668, 0.016438592910766603, 0.016457727432250976, 0.016640544891357422, 0.016586912155151366, 0.016973600387573243, 0.016736736297607423, 0.017084447860717774, 0.01703264045715332, 0.0169638729095459, 0.016850624084472656, 0.016610719680786132, 0.0165262393951416, 0.016748544692993163, 0.01701683235168457, 0.016883712768554687, 0.017015871047973634, 0.016818975448608397, 0.01674870491027832, 0.016969728469848632, 0.017002080917358397, 0.017355167388916015, 0.017079839706420897, 0.017437088012695313, 0.01747974395751953, 0.01741926383972168, 0.0172926082611084, 0.017059167861938476, 0.016859743118286134, 0.016815488815307617, 0.01683030319213867, 0.01699590492248535, 0.016906335830688478, 0.017191808700561525, 0.017317888259887695, 0.01737126350402832, 0.017573375701904297, 0.0174780158996582, 0.017634592056274413, 0.01751318359375, 0.017685792922973634, 0.01773417663574219, 0.017518688201904296, 0.017641311645507814, 0.017566144943237303, 0.01751612854003906, 0.0177741756439209, 0.017535615921020507, 0.01760870361328125, 0.017711231231689453, 0.017632575988769533, 0.017836383819580078, 0.017510623931884767, 0.017612960815429686, 0.01757689666748047, 0.017653791427612305, 0.017953664779663085, 0.017816255569458008, 0.017706880569458006, 0.01768876838684082, 0.01770639991760254, 0.017674655914306642, 0.017666048049926757, 0.017630752563476564, 0.017746623992919923, 0.019815488815307616, 0.01790025520324707, 0.017829887390136717, 0.01794047927856445, 0.017719295501708983, 0.01771878433227539, 0.017696735382080078, 0.018768255233764648, 0.01893120002746582, 0.018555551528930663, 0.017791200637817382, 0.017855680465698243, 0.017737855911254884, 0.017463775634765626, 0.017270143508911134, 0.017267072677612304, 0.01762124824523926, 0.01741209602355957, 0.01728102493286133, 0.017328128814697266, 0.01726780891418457, 0.01711510467529297, 0.017005504608154295, 0.017137664794921875, 0.01727894401550293, 0.017678367614746095, 0.01751030349731445, 0.017380992889404298, 0.017938911437988283, 0.017164287567138673, 0.01731331253051758, 0.01682259178161621, 0.01692483139038086, 0.01683456039428711, 0.01682784080505371, 0.016900672912597656, 0.016863231658935548, 0.016764928817749023, 0.016754240036010743, 0.017041088104248047, 0.017977504730224608, 0.01753353691101074, 0.017731584548950196, 0.01771628761291504, 0.017660863876342775, 0.017725439071655275, 0.017641471862792968, 0.01800169563293457, 0.017808895111083984, 0.017668767929077147, 0.01835935974121094, 0.019612863540649415, 0.017736576080322267, 0.01781158447265625, 0.0177324161529541, 0.017539039611816406, 0.017508384704589843, 0.017526784896850587, 0.017543167114257813, 0.01823744010925293, 0.017656991958618164, 0.017812320709228516, 0.01764761543273926, 0.017412351608276366, 0.018173696517944336, 0.017537023544311522, 0.017630847930908203, 0.017464736938476562, 0.017988576889038085, 0.017756160736083985, 0.017537151336669922, 0.017534719467163087, 0.017678464889526367, 0.01764169692993164, 0.01777862358093262, 0.017979232788085938, 0.017817407608032226, 0.017620159149169923, 0.017676448822021483, 0.017236032485961915, 0.0173450870513916, 0.017152223587036133, 0.017460800170898436, 0.01719340705871582, 0.017122720718383787, 0.01722835159301758, 0.017350336074829102, 0.01692095947265625, 0.01681814384460449, 0.017086463928222655, 0.017121280670166016, 0.01721958351135254, 0.01709004783630371, 0.016775455474853516, 0.016732383728027343, 0.01678745651245117, 0.016993663787841798, 0.01701728057861328, 0.016863264083862305, 0.01676873588562012, 0.016992704391479492, 0.01706188774108887, 0.016955392837524414, 0.01706598472595215, 0.017059904098510742, 0.018250944137573243, 0.01764249610900879, 0.01762892723083496, 0.017417503356933595, 0.017637344360351564, 0.017289312362670898, 0.017377952575683593, 0.017625408172607424, 0.01780294418334961, 0.017604608535766602, 0.017795072555541993, 0.01773151969909668, 0.017700063705444337, 0.017305471420288085, 0.01764339256286621, 0.01770982360839844, 0.01755366325378418, 0.017729087829589842, 0.018544160842895507, 0.01772332763671875, 0.017558496475219728, 0.017588224411010742, 0.017797311782836913, 0.017823583602905275, 0.017623008728027342, 0.017283071517944337, 0.017285120010375975, 0.017452224731445313, 0.017796960830688477, 0.017949663162231445, 0.017933792114257812, 0.017752511978149414, 0.017526111602783202, 0.017759008407592772, 0.018040159225463866, 0.01779964828491211, 0.01834409523010254, 0.01780940818786621, 0.018155519485473632, 0.017747007369995117, 0.01750259208679199, 0.017464128494262696, 0.017822559356689454, 0.017449888229370117, 0.017874303817749022, 0.017630975723266603, 0.01770822334289551, 0.01799955177307129, 0.018316511154174805, 0.017677087783813477, 0.017692607879638673, 0.0177675838470459, 0.017518815994262697, 0.01747385597229004, 0.01750864028930664, 0.017450143814086914, 0.017602815628051757, 0.01757360076904297, 0.017482719421386718, 0.018110464096069336, 0.017541120529174805, 0.01776361656188965, 0.017591232299804686, 0.01774393653869629, 0.017830911636352538, 0.017457408905029295, 0.017414623260498047, 0.017550399780273437, 0.01794963264465332, 0.0178606071472168, 0.01756924819946289, 0.0176092472076416, 0.017483776092529296, 0.01760665512084961, 0.01809119987487793, 0.017898303985595703, 0.017475584030151366, 0.01759846305847168, 0.017665504455566406, 0.01762067222595215, 0.01754710388183594, 0.017437280654907225, 0.01709712028503418, 0.01698406410217285, 0.017631135940551757, 0.017406303405761717, 0.01733827209472656, 0.017121120452880858, 0.017127424240112304, 0.01716646385192871, 0.017041215896606444, 0.017233983993530273, 0.016924671173095703, 0.017315839767456053, 0.01705379295349121, 0.017153951644897462, 0.01670479965209961, 0.01653548812866211, 0.01647648048400879, 0.016498336791992186, 0.016506879806518555, 0.01665020751953125, 0.01643401527404785, 0.01643929672241211, 0.016695295333862305, 0.016688224792480468, 0.016664608001708985, 0.01656921577453613, 0.016629087448120118, 0.016777631759643554, 0.01699862480163574, 0.016740383148193358, 0.01659903907775879, 0.016564224243164064, 0.016472063064575194, 0.016529279708862303, 0.016526912689208983, 0.016746335983276368, 0.01657244873046875, 0.016522079467773437, 0.01662345504760742, 0.016953344345092772, 0.01731203269958496, 0.017413856506347657, 0.017960735321044922, 0.01822332763671875, 0.01755094337463379, 0.017538848876953124, 0.017455711364746093, 0.017444896697998046, 0.017536832809448243, 0.017549503326416017, 0.017550975799560546, 0.017858816146850587, 0.01748726463317871, 0.017650016784667967, 0.017387615203857423, 0.017397727966308594, 0.017430496215820313, 0.0169597110748291, 0.017233760833740234, 0.017413856506347657, 0.017434463500976563, 0.017347007751464843, 0.017239871978759765, 0.01716387176513672, 0.017279584884643553, 0.016821376800537108, 0.01724835205078125, 0.0169134407043457, 0.01689068794250488, 0.016869951248168945, 0.01711142349243164, 0.018208480834960936, 0.017135904312133788, 0.016701440811157226, 0.016473535537719727, 0.016498367309570314, 0.016534400939941407, 0.01655193519592285, 0.0168089599609375, 0.017312063217163085, 0.01675468826293945, 0.016552640914916993, 0.01656831932067871, 0.016564224243164064, 0.016676864624023437, 0.017188863754272463, 0.017522687911987304, 0.017049472808837892, 0.016790655136108397, 0.016522239685058594, 0.016660480499267577, 0.016685056686401366, 0.01678335952758789, 0.016863231658935548, 0.016614879608154297, 0.016582527160644532, 0.016458400726318358, 0.016627712249755858, 0.016898048400878905, 0.0165928955078125, 0.01680384063720703, 0.016524896621704102, 0.016506656646728516, 0.01649715232849121, 0.016496768951416017, 0.017821695327758787, 0.016623615264892578, 0.017561023712158202, 0.017644096374511718, 0.016689407348632813, 0.016807680130004884, 0.01659427261352539, 0.016540319442749023, 0.016495872497558593, 0.01659760093688965, 0.016543487548828124, 0.016754240036010743, 0.016590976715087892, 0.016532192230224608, 0.016547264099121092]",tokens/s,58.103353289028036,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 58.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 46.12 MiB is free. Process 129149 has 14.69 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 193.68 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,7183.450112,7954.366464,0.0,7551.844352,7485.12768,s,1,13.5118994140625,13.5118994140625,0.0,13.5118994140625,13.5118994140625,13.5118994140625,13.5118994140625,[13.5118994140625],,kWh,0.00017229993727917568,1.899824784741853e-05,5.2585042067995746e-05,0.00024388322719458996,,MB,2861.71136,8243.77344,0.0,7826.571264,7735.225344,s,10,3.4504120483398437,0.34504120483398437,0.0003829093104681222,0.3449942321777344,0.3454532775878906,0.3456072235107422,0.3457303802490234,"[0.3444007263183594, 0.3454190673828125, 0.3452509460449219, 0.34490609741210937, 0.3453109436035156, 0.34494451904296874, 0.3450439453125, 0.34576116943359375, 0.34469049072265623, 0.34468414306640627]",tokens/s,741.940372377188,kWh,1.0088179598563003e-05,1.112337049763456e-06,6.662227551999944e-06,1.7862744200326404e-05,tokens/kWh,14331504.562178198,MB,2871.0912,8285.71648,0.0,7868.514304,7758.594048,s,10,25.6630771484375,2.56630771484375,0.004253445084780657,2.5648686523437503,2.5713380126953123,2.572952453613281,2.574244006347656,"[2.560034423828125, 2.562760986328125, 2.564687255859375, 2.56933203125, 2.563849609375, 2.57456689453125, 2.56900927734375, 2.562807373046875, 2.570979248046875, 2.565050048828125]",tokens/s,24.54888774078122,kWh,7.501972643435452e-05,8.272747194896979e-06,4.991853993479969e-05,0.00013321101356405114,tokens/kWh,472933.86871280015,,s,630,25.659303691864054,0.04072905347914922,0.00028134266652981384,0.04069736099243164,0.04111484298706055,0.04128495121002197,0.0415263228225708,"[0.04111376190185547, 0.04060348892211914, 0.04032147216796875, 0.04037836837768555, 0.04021247863769531, 0.04046438217163086, 0.04046847915649414, 0.04033548736572266, 0.04014012908935547, 0.040113792419433594, 0.04033219146728516, 0.04037564849853516, 0.04022544097900391, 0.0403900146484375, 0.040577438354492186, 0.04043523025512695, 0.04048665618896485, 0.04058771133422852, 0.04049151992797852, 0.04040499114990234, 0.040400894165039065, 0.040597503662109374, 0.040542209625244144, 0.04036812973022461, 0.04048646545410156, 0.04051808166503906, 0.04061190414428711, 0.040468448638916014, 0.040494911193847655, 0.040399009704589844, 0.04107059097290039, 0.040576416015625, 0.040606304168701174, 0.040513534545898434, 0.04045414352416992, 0.04044595336914063, 0.04085174560546875, 0.040855262756347654, 0.04079964828491211, 0.040769569396972655, 0.04066966247558594, 0.04062627029418946, 0.04063212966918945, 0.04074515151977539, 0.04079167938232422, 0.04090003204345703, 0.040797119140625, 0.04071014404296875, 0.040639999389648435, 0.040645408630371097, 0.0407405776977539, 0.040860862731933595, 0.04075715255737305, 0.040882720947265624, 0.04084323120117188, 0.04080467224121094, 0.04095772933959961, 0.04099103927612305, 0.04079411315917969, 0.04105625534057617, 0.04104995346069336, 0.04102979278564453, 0.041472000122070314, 0.04115135955810547, 0.040704254150390626, 0.040540767669677735, 0.04073062515258789, 0.040597503662109374, 0.0404310073852539, 0.04031343841552734, 0.040304641723632816, 0.040509441375732425, 0.040425472259521485, 0.04038358306884766, 0.040608158111572264, 0.0403807373046875, 0.040613792419433595, 0.040802593231201174, 0.04061196899414062, 0.040523551940917966, 0.04083244705200195, 0.04034761428833008, 0.04025539016723633, 0.04030748748779297, 0.040619873046875, 0.04063881683349609, 0.04059878540039062, 0.04063875198364258, 0.04062236785888672, 0.04077772903442383, 0.04097795104980469, 0.0407652473449707, 0.040817310333251956, 0.04079110336303711, 0.04079014587402344, 0.040786174774169924, 0.04083148956298828, 0.04074710464477539, 0.04054832077026367, 0.04060774230957031, 0.04084735870361328, 0.040853504180908204, 0.0407632942199707, 0.040664993286132815, 0.04072671890258789, 0.040516639709472654, 0.040473567962646485, 0.04057404708862305, 0.040541088104248044, 0.04062131118774414, 0.040575199127197266, 0.04058911895751953, 0.04065267181396484, 0.040711006164550784, 0.040722080230712894, 0.04065327835083008, 0.04064176177978516, 0.0407374382019043, 0.040941566467285154, 0.040908798217773434, 0.04099679946899414, 0.04076755142211914, 0.04084473419189453, 0.04104240036010742, 0.04096828842163086, 0.041115646362304685, 0.04096819305419922, 0.04052300643920898, 0.04065289688110352, 0.04066371154785156, 0.040478527069091795, 0.040528064727783204, 0.040716289520263675, 0.0405667839050293, 0.04055654525756836, 0.04073052978515625, 0.04042966461181641, 0.04069375991821289, 0.04058713531494141, 0.04062025451660156, 0.04057692718505859, 0.04124787139892578, 0.04099935913085938, 0.04052422332763672, 0.04055244827270508, 0.04055855941772461, 0.04062521743774414, 0.04062307357788086, 0.04053811264038086, 0.04025753784179688, 0.040474014282226564, 0.04078579330444336, 0.040618846893310544, 0.04046211242675781, 0.0407267837524414, 0.04071315383911133, 0.04067523193359375, 0.0407922248840332, 0.04072739028930664, 0.04065894317626953, 0.04078960037231445, 0.040517375946044924, 0.04075779342651367, 0.04093759918212891, 0.041114753723144534, 0.040809345245361325, 0.040615936279296876, 0.04083520126342773, 0.0407567024230957, 0.04076761627197266, 0.040710430145263675, 0.040602657318115236, 0.04051657485961914, 0.0405852165222168, 0.040548606872558596, 0.04059286499023437, 0.04059164810180664, 0.04072447967529297, 0.04064211273193359, 0.04067596817016601, 0.040824638366699216, 0.04063846588134765, 0.040842369079589845, 0.04093017578125, 0.0407789421081543, 0.04084729766845703, 0.04094655990600586, 0.041281185150146484, 0.04128803253173828, 0.04113628768920898, 0.040673118591308594, 0.04060774230957031, 0.040521728515625, 0.04070137786865234, 0.040502880096435545, 0.040389598846435545, 0.040460289001464846, 0.04038787078857422, 0.040375007629394534, 0.04065436935424805, 0.04048944091796875, 0.04053305435180664, 0.04077033615112305, 0.04106630325317383, 0.04108937454223633, 0.04101529693603516, 0.040718143463134765, 0.04059769439697265, 0.04050124740600586, 0.040441856384277344, 0.040562400817871096, 0.04032329559326172, 0.040624000549316405, 0.04077484893798828, 0.04068998336791992, 0.04066976165771485, 0.040556671142578125, 0.0407815055847168, 0.04089212799072266, 0.040483425140380856, 0.04068864059448242, 0.040412158966064454, 0.040742496490478515, 0.040548095703125, 0.040741535186767576, 0.04110883331298828, 0.040804351806640625, 0.04094192123413086, 0.04096614456176758, 0.040806720733642575, 0.040738815307617186, 0.040752223968505856, 0.04081081771850586, 0.04074147033691406, 0.040855648040771485, 0.04081619262695312, 0.0409686393737793, 0.04081983947753906, 0.04095049667358398, 0.040702014923095706, 0.040718048095703126, 0.04086185455322266, 0.04101686477661133, 0.04097052764892578, 0.04080192184448242, 0.040957889556884765, 0.04087827301025391, 0.041404544830322264, 0.04141852951049805, 0.041204383850097656, 0.041457664489746096, 0.0413573112487793, 0.041444927215576174, 0.040710784912109374, 0.040376319885253906, 0.040515583038330076, 0.04072243118286133, 0.04049926376342773, 0.04046425628662109, 0.04045612716674805, 0.04045836639404297, 0.040525825500488284, 0.04041932678222656, 0.04057907104492187, 0.04063436889648438, 0.040678497314453124, 0.04091791915893555, 0.04086783981323242, 0.040796192169189456, 0.04110742568969727, 0.04055654525756836, 0.04030476760864258, 0.040226688385009764, 0.040451488494873046, 0.04064112091064453, 0.04053401565551758, 0.04037200164794922, 0.0405808334350586, 0.04058777618408203, 0.04084697723388672, 0.04061187362670898, 0.04067094421386719, 0.04053670501708984, 0.040767040252685544, 0.04084307098388672, 0.040753089904785156, 0.04081692886352539, 0.04070851135253906, 0.04098179244995117, 0.040702945709228514, 0.04072627258300781, 0.04074067306518555, 0.040951904296875, 0.040887680053710934, 0.040764129638671875, 0.04053401565551758, 0.04051507186889648, 0.04057769775390625, 0.04057884979248047, 0.040528064727783204, 0.04055830383300781, 0.04063248062133789, 0.04076047897338867, 0.04069686508178711, 0.04079596710205078, 0.04079561614990234, 0.040667678833007814, 0.04067737579345703, 0.04066099166870117, 0.04080559921264648, 0.04090755081176758, 0.04097014236450195, 0.04099663925170898, 0.04089382553100586, 0.04118214416503906, 0.04122140884399414, 0.040699745178222654, 0.04081343841552734, 0.04075929641723633, 0.04069491195678711, 0.04129235076904297, 0.04096566390991211, 0.04084796905517578, 0.04055209732055664, 0.040480670928955076, 0.040430206298828125, 0.04082102584838867, 0.040851264953613284, 0.040669086456298825, 0.04109721755981445, 0.04087302398681641, 0.0408642578125, 0.040517856597900394, 0.04055039978027344, 0.04060710525512695, 0.0406286735534668, 0.04073260879516601, 0.0406409912109375, 0.04043366241455078, 0.040525825500488284, 0.04068966293334961, 0.04056883239746094, 0.0408265266418457, 0.040969982147216796, 0.04096646499633789, 0.04081078338623047, 0.040687614440917966, 0.040645950317382815, 0.040880832672119144, 0.04085289764404297, 0.04076604843139649, 0.040782848358154294, 0.04065792083740234, 0.04073455810546875, 0.04068783950805664, 0.04083248138427734, 0.04081097412109375, 0.04077363204956055, 0.040703998565673825, 0.0408616943359375, 0.04089632034301758, 0.04085984039306641, 0.040818145751953125, 0.04082668685913086, 0.040839710235595704, 0.040857406616210935, 0.041662559509277344, 0.04088860702514648, 0.04136284637451172, 0.041443870544433596, 0.041178592681884764, 0.04115875244140625, 0.041297824859619144, 0.04146031951904297, 0.041346942901611325, 0.04103184127807617, 0.041086944580078125, 0.04103094482421875, 0.04125942230224609, 0.040340225219726564, 0.04038143920898438, 0.04021241760253906, 0.040470462799072265, 0.04053580856323242, 0.04067929458618164, 0.04044844818115234, 0.04066620635986328, 0.040799137115478515, 0.040734432220458985, 0.04049046325683594, 0.040648670196533204, 0.041027713775634765, 0.04089904022216797, 0.040839359283447264, 0.04084735870361328, 0.04069792175292969, 0.04044800186157226, 0.040583168029785156, 0.040504543304443356, 0.04055311965942383, 0.04045427322387695, 0.04048227310180664, 0.040421920776367186, 0.040414657592773434, 0.04049299240112305, 0.04079884719848633, 0.04070588684082031, 0.04046249771118164, 0.04062419128417969, 0.0408023681640625, 0.04094348907470703, 0.040812545776367185, 0.040643936157226564, 0.040729248046875, 0.04079446411132812, 0.04067907333374023, 0.04072447967529297, 0.04084940719604492, 0.04094527816772461, 0.04074355316162109, 0.04070150375366211, 0.04089977645874023, 0.04066611099243164, 0.04069375991821289, 0.04059545516967773, 0.0405852165222168, 0.04062822341918945, 0.04059699249267578, 0.04072454452514648, 0.04075155258178711, 0.04096409606933594, 0.04107059097290039, 0.041266334533691405, 0.04133359909057617, 0.04125286483764649, 0.0411420783996582, 0.041558208465576174, 0.04148223876953125, 0.04176863861083984, 0.0411794548034668, 0.04118544006347656, 0.041017471313476564, 0.04033795166015625, 0.04028396987915039, 0.04018972778320312, 0.040253791809082035, 0.04024121475219727, 0.04032921600341797, 0.04030668640136719, 0.040218624114990234, 0.040425472259521485, 0.04071615982055664, 0.040564865112304685, 0.04043161773681641, 0.04053811264038086, 0.04065484619140625, 0.04039820861816406, 0.04052220916748047, 0.04041535949707031, 0.04043779373168945, 0.04043270492553711, 0.040616897583007815, 0.04041113662719727, 0.0404354248046875, 0.04037865447998047, 0.040601089477539064, 0.04043644714355469, 0.040406753540039066, 0.040355903625488285, 0.04050739288330078, 0.04062966537475586, 0.04042607879638672, 0.04049123382568359, 0.040554271697998044, 0.040697856903076174, 0.04057219314575195, 0.04048070526123047, 0.04079254531860352, 0.040880001068115235, 0.04060409545898438, 0.04072447967529297, 0.04079795074462891, 0.0406379508972168, 0.04064742279052734, 0.04080963134765625, 0.04073353576660156, 0.04086566543579102, 0.040939006805419925, 0.04083062362670899, 0.04077020645141602, 0.04089916610717773, 0.0408757438659668, 0.04107059097290039, 0.041323745727539066, 0.0407591667175293, 0.04148332977294922, 0.04145248031616211, 0.04116092681884766, 0.04084128189086914, 0.04078656005859375, 0.041218048095703126, 0.041217758178710935, 0.041076126098632815, 0.04153843307495117, 0.04103577423095703, 0.04024860763549805, 0.04068425750732422, 0.04053104019165039, 0.04038873672485352, 0.04039759826660156, 0.040482463836669924, 0.040667137145996096, 0.040508865356445316, 0.04056496047973633, 0.040508094787597655, 0.04054995346069336, 0.04061228942871094, 0.04063436889648438, 0.04087782287597656, 0.04104422378540039, 0.04058217620849609, 0.04044883346557617, 0.040347488403320315, 0.04058143997192383, 0.040476673126220705, 0.040621505737304685, 0.04062217712402344, 0.04046412658691406, 0.04047536087036133, 0.04050028610229492, 0.04048083114624024, 0.04049190521240234, 0.04058313751220703, 0.04072220611572266, 0.040786174774169924, 0.04068966293334961, 0.04084326553344726, 0.040673152923583984, 0.040738945007324216, 0.04072582244873047, 0.04057158279418945, 0.04084035110473633, 0.04122671890258789, 0.0407283821105957, 0.04066156768798828, 0.040766624450683596, 0.04080300903320312, 0.04137590408325195, 0.041199615478515625, 0.04129587173461914, 0.04100505447387695, 0.04102054214477539, 0.04114223861694336, 0.04102406311035156, 0.04092745590209961, 0.04105023956298828, 0.04113011169433594, 0.04101884841918945, 0.04087587356567383, 0.040855457305908206, 0.04091743850708008, 0.041686721801757816, 0.04157433700561523, 0.041299713134765624, 0.04130656051635742, 0.041342529296875, 0.041347232818603516, 0.041113536834716795, 0.04050956726074219, 0.04047209548950195, 0.04053631973266601, 0.04034572982788086, 0.04030403137207031, 0.04041996765136719, 0.040103935241699216, 0.040142494201660155, 0.04045001602172851, 0.04061222457885742, 0.0406479377746582, 0.0403054084777832, 0.04063436889648438, 0.040630271911621094, 0.040419422149658206, 0.04031689453125, 0.040398784637451175, 0.040365856170654295, 0.04045391845703125, 0.04034809494018555, 0.04051792144775391, 0.0405233268737793, 0.04044611358642578, 0.040439006805419925, 0.04041603088378906, 0.04041865539550781, 0.0404911994934082, 0.040468929290771484, 0.04045171356201172, 0.04042956924438477, 0.040468929290771484, 0.04052169418334961, 0.040674465179443356, 0.04082361602783203, 0.040556671142578125, 0.040709728240966796, 0.04070003128051758, 0.040976383209228515, 0.040898624420166015, 0.04089622497558594, 0.040673118591308594, 0.0407946891784668, 0.040699905395507815, 0.04063846588134765, 0.04071788787841797, 0.04096454238891602, 0.04124262237548828, 0.04126518249511719, 0.0411627197265625, 0.04114332962036133, 0.04174127960205078, 0.04149667358398437, 0.04123788833618164, 0.04102998352050781, 0.041025825500488285, 0.04113340759277344, 0.04117721557617188, 0.0409502067565918, 0.0408616943359375, 0.04121308898925781, 0.041081695556640624, 0.04105615997314453]",tokens/s,24.552497899612096,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4967.366656,7338.917888,0.0,6943.670272,6539.1744,s,1,11.2944716796875,11.2944716796875,0.0,11.2944716796875,11.2944716796875,11.2944716796875,11.2944716796875,[11.2944716796875],,kWh,0.00012866641170834327,1.4180290562249682e-05,4.057586579400696e-05,0.0001834225680645999,,MB,5049.356288,7634.61632,0.0,7224.68864,6917.39904,s,10,1.9967895507812499,0.199678955078125,0.0007747060688834615,0.19981849670410157,0.20028613281250002,0.20068047332763672,0.20099594573974608,"[0.19812553405761718, 0.19894613647460938, 0.19987551879882812, 0.19898886108398436, 0.19967721557617188, 0.199761474609375, 0.200023193359375, 0.20019850158691407, 0.20011830139160156, 0.20107481384277343]",tokens/s,1282.05799103786,kWh,5.856164737500043e-06,6.455097372657867e-07,3.867208649319942e-06,1.036888312408577e-05,tokens/kWh,24689255.046702214,MB,5053.059072,7636.713472,0.0,7226.785792,6917.4016,s,10,19.06018884277344,1.9060188842773438,0.010296499028534297,1.9040180053710938,1.9154139160156252,1.922641162109375,1.928422958984375,"[1.929868408203125, 1.9029803466796875, 1.89453857421875, 1.9039404296875, 1.906664306640625, 1.913807861328125, 1.892323974609375, 1.8991402587890625, 1.9040955810546876, 1.9128291015625]",tokens/s,33.05318773055394,kWh,5.549881273124834e-05,6.121207348230365e-06,3.697640735888019e-05,9.859642743835888e-05,tokens/kWh,638968.3849284167,,s,630,19.05779961586,0.03025047558073014,0.00047673951892125124,0.03013876724243164,0.03063316173553467,0.03107121591567993,0.03180391098022461,"[0.03129859161376953, 0.030673088073730467, 0.031146303176879882, 0.03107683181762695, 0.03062326431274414, 0.030982624053955077, 0.030506240844726563, 0.030615903854370116, 0.030566816329956056, 0.030770944595336913, 0.031137439727783205, 0.030509599685668944, 0.030431295394897463, 0.030519296646118164, 0.03037798309326172, 0.03042918395996094, 0.030146080017089842, 0.03040239906311035, 0.03081484794616699, 0.03061759948730469, 0.030752288818359376, 0.030439456939697265, 0.030501312255859375, 0.030496768951416016, 0.030338495254516602, 0.03172614479064941, 0.03254886245727539, 0.031055231094360352, 0.030732927322387697, 0.0305798397064209, 0.03080691146850586, 0.0314202880859375, 0.030592575073242187, 0.030406879425048827, 0.03052783966064453, 0.030463199615478515, 0.030421791076660157, 0.030592063903808593, 0.030530496597290038, 0.030561344146728515, 0.0306426887512207, 0.03071996879577637, 0.031687135696411135, 0.030390272140502928, 0.030363071441650392, 0.03035100746154785, 0.030586944580078126, 0.030501535415649414, 0.030496959686279298, 0.030238399505615233, 0.03043177604675293, 0.0306296329498291, 0.030708959579467773, 0.030554815292358397, 0.03055628776550293, 0.030447616577148437, 0.03038822364807129, 0.030095359802246095, 0.03027519989013672, 0.03030393600463867, 0.030103872299194336, 0.030042112350463866, 0.029989248275756837, 0.0314582405090332, 0.030508384704589844, 0.03055174446105957, 0.03043414306640625, 0.03021536064147949, 0.03033580780029297, 0.030102943420410155, 0.03015331268310547, 0.030193727493286134, 0.030291904449462892, 0.031477407455444334, 0.03134499168395996, 0.03028700828552246, 0.03023548889160156, 0.030345216751098632, 0.03040460777282715, 0.03014067268371582, 0.030095104217529298, 0.029926431655883788, 0.03021308708190918, 0.030006464004516602, 0.030372575759887697, 0.032732513427734374, 0.030339359283447265, 0.030513023376464842, 0.030187328338623046, 0.030294815063476564, 0.030454912185668946, 0.030013952255249023, 0.02992086410522461, 0.029970495223999024, 0.029944543838500978, 0.02987238311767578, 0.030137216567993164, 0.02994166374206543, 0.029778911590576173, 0.02973695945739746, 0.02992313575744629, 0.030015296936035156, 0.030003583908081055, 0.029841535568237303, 0.029795679092407226, 0.029800832748413084, 0.02985795211791992, 0.029811872482299804, 0.02978895950317383, 0.02984457588195801, 0.02999385643005371, 0.03011996841430664, 0.030484575271606446, 0.030021440505981444, 0.030058080673217774, 0.029975103378295898, 0.030066688537597655, 0.030068288803100585, 0.030136159896850586, 0.029958751678466795, 0.029927167892456055, 0.029927200317382812, 0.029923807144165038, 0.03038003158569336, 0.02999465560913086, 0.030079328536987304, 0.03134668731689453, 0.03058073616027832, 0.03002787208557129, 0.029870336532592773, 0.029870880126953124, 0.02986892890930176, 0.02982659149169922, 0.030126560211181642, 0.03019164848327637, 0.030375904083251953, 0.03038934326171875, 0.0300677433013916, 0.030160512924194336, 0.029890815734863282, 0.030056671142578126, 0.02994767951965332, 0.029908992767333983, 0.030062591552734375, 0.030203903198242187, 0.030042335510253905, 0.030232351303100587, 0.029921152114868163, 0.02990662384033203, 0.03006892776489258, 0.030767168045043945, 0.02984940719604492, 0.0299237117767334, 0.030117664337158203, 0.03027292823791504, 0.030085952758789062, 0.030187231063842773, 0.03385782241821289, 0.030023679733276368, 0.02983443260192871, 0.02987273597717285, 0.029943552017211914, 0.030114336013793944, 0.030416831970214844, 0.02996227264404297, 0.029786079406738282, 0.029722335815429688, 0.02996169662475586, 0.02999193572998047, 0.029967327117919922, 0.029780832290649414, 0.02987606430053711, 0.029664640426635743, 0.02976028823852539, 0.02977177619934082, 0.02975334358215332, 0.029771520614624025, 0.030120288848876953, 0.029984031677246094, 0.0298604793548584, 0.02973695945739746, 0.02977382469177246, 0.02976972770690918, 0.030029247283935547, 0.02978054428100586, 0.02981622314453125, 0.02970844841003418, 0.029781919479370117, 0.029979167938232423, 0.03127779197692871, 0.030313440322875976, 0.030029951095581056, 0.029896575927734374, 0.02984934425354004, 0.029720191955566407, 0.02984204864501953, 0.029945375442504883, 0.030240671157836914, 0.030122592926025392, 0.029882144927978516, 0.029956287384033203, 0.029824384689331053, 0.02993404769897461, 0.029838911056518556, 0.030179935455322264, 0.02980611228942871, 0.02988902473449707, 0.029867040634155274, 0.02986899185180664, 0.029949535369873048, 0.03181609535217285, 0.03151615905761719, 0.030721920013427734, 0.030300256729125976, 0.030118431091308594, 0.030154624938964845, 0.029976703643798827, 0.029916608810424804, 0.030039871215820312, 0.030098175048828123, 0.029851648330688478, 0.02980463981628418, 0.029986976623535156, 0.03002956771850586, 0.03019161605834961, 0.029995008468627928, 0.02992153549194336, 0.02994268798828125, 0.030077791213989256, 0.030488576889038086, 0.03014041519165039, 0.030037311553955077, 0.030073055267333983, 0.030243295669555664, 0.03009292793273926, 0.02993814468383789, 0.030149951934814453, 0.031111135482788085, 0.03076995277404785, 0.03083353614807129, 0.030483327865600585, 0.03050931167602539, 0.030560096740722655, 0.03033497619628906, 0.030422304153442385, 0.03006057548522949, 0.030173120498657228, 0.030089984893798827, 0.03158835220336914, 0.030263296127319338, 0.030294015884399415, 0.03035116767883301, 0.030283615112304686, 0.0302127685546875, 0.030044063568115235, 0.029954271316528322, 0.029872127532958984, 0.030061792373657227, 0.030417695999145507, 0.030330368041992187, 0.030175552368164063, 0.030232736587524414, 0.02997865676879883, 0.03003331184387207, 0.030079296112060547, 0.029929344177246093, 0.030191776275634765, 0.0303819522857666, 0.030412767410278322, 0.03064463996887207, 0.030516416549682616, 0.03091244888305664, 0.03106435203552246, 0.030654144287109376, 0.030440128326416016, 0.03063216018676758, 0.03038003158569336, 0.030208000183105467, 0.030146495819091797, 0.030076992034912108, 0.029937664031982423, 0.02993561553955078, 0.02998624038696289, 0.030069311141967772, 0.029997055053710937, 0.03018454360961914, 0.030229408264160155, 0.030243839263916016, 0.030075904846191406, 0.02996156883239746, 0.03003664016723633, 0.029925376892089843, 0.029923295974731444, 0.030095392227172852, 0.030107648849487304, 0.03008892822265625, 0.030141984939575196, 0.030161376953125, 0.030243104934692382, 0.03025222396850586, 0.03034547233581543, 0.03033964729309082, 0.030948736190795897, 0.030835264205932616, 0.030393951416015624, 0.030439327239990235, 0.030407136917114257, 0.03040880012512207, 0.030279680252075194, 0.03032268714904785, 0.03037295913696289, 0.030321535110473634, 0.03030633544921875, 0.03055014419555664, 0.030279552459716797, 0.031188671112060546, 0.030441791534423827, 0.030107135772705077, 0.030621856689453126, 0.03038857650756836, 0.030642175674438478, 0.030537248611450196, 0.031631839752197265, 0.03040460777282715, 0.03137091255187988, 0.03005251121520996, 0.02997228813171387, 0.030007680892944335, 0.03055615997314453, 0.03082057571411133, 0.0305611515045166, 0.030538623809814452, 0.03056630325317383, 0.030550079345703127, 0.030353471755981444, 0.03060940742492676, 0.030556095123291015, 0.030343231201171876, 0.030060415267944337, 0.030159231185913085, 0.030228416442871095, 0.029913087844848633, 0.03017046356201172, 0.030073312759399413, 0.03034316825866699, 0.030430816650390626, 0.03053401565551758, 0.030408544540405272, 0.03039174461364746, 0.030246912002563478, 0.030294015884399415, 0.029952127456665038, 0.029970655441284178, 0.029894880294799805, 0.029890016555786134, 0.029890399932861328, 0.029889408111572265, 0.0299683837890625, 0.030014816284179686, 0.030849472045898437, 0.03034339141845703, 0.03051683235168457, 0.030314048767089843, 0.03025200080871582, 0.030181343078613282, 0.030070688247680662, 0.03010291290283203, 0.0309736328125, 0.030395263671875, 0.03004800033569336, 0.030218559265136717, 0.030455135345458986, 0.03018169593811035, 0.030140127182006836, 0.03191849517822266, 0.03042460823059082, 0.030165119171142576, 0.03045609664916992, 0.03101750373840332, 0.030507232666015623, 0.03056025505065918, 0.03038585662841797, 0.03057695960998535, 0.030148479461669923, 0.030130271911621095, 0.03000476837158203, 0.029847488403320313, 0.029998975753784178, 0.030148479461669923, 0.029969215393066406, 0.02988822364807129, 0.03003343963623047, 0.02985241508483887, 0.030066015243530274, 0.029794975280761717, 0.02985958480834961, 0.029802240371704102, 0.029825056076049804, 0.029839839935302735, 0.02975446319580078, 0.02974812889099121, 0.0297524471282959, 0.029846399307250977, 0.029747200012207032, 0.029717824935913087, 0.02983919906616211, 0.029868896484375, 0.02977996826171875, 0.030078975677490235, 0.030448896408081055, 0.030288576126098633, 0.029879871368408202, 0.02991360092163086, 0.02977177619934082, 0.029781536102294923, 0.030060800552368164, 0.02974332809448242, 0.02977961540222168, 0.02971683120727539, 0.029886463165283202, 0.02974211120605469, 0.03030729675292969, 0.029982336044311525, 0.02989913558959961, 0.029829248428344727, 0.029757312774658203, 0.02993152046203613, 0.030043743133544923, 0.029858207702636717, 0.030466047286987305, 0.030365695953369142, 0.030257152557373046, 0.030093311309814453, 0.030107648849487304, 0.03097599983215332, 0.03033497619628906, 0.03039232063293457, 0.030115840911865234, 0.02995199966430664, 0.030086847305297853, 0.029913408279418945, 0.030811840057373047, 0.030533632278442382, 0.030552831649780274, 0.030233983993530274, 0.030122623443603516, 0.030205215454101562, 0.030100191116333008, 0.03040412712097168, 0.029970912933349608, 0.03018067169189453, 0.029921663284301757, 0.030062816619873048, 0.030090335845947266, 0.03045084762573242, 0.030275423049926756, 0.030496351242065428, 0.030402816772460938, 0.03033103942871094, 0.03009903907775879, 0.030301952362060548, 0.030081695556640625, 0.030116064071655273, 0.030000064849853517, 0.029823423385620117, 0.029866399765014647, 0.029917184829711913, 0.029921279907226563, 0.03038617515563965, 0.030154367446899415, 0.030320608139038085, 0.029987232208251953, 0.030007295608520508, 0.02989206314086914, 0.029903392791748046, 0.03001753616333008, 0.029808000564575197, 0.02987071990966797, 0.0299204158782959, 0.02983612823486328, 0.02981068801879883, 0.03013222312927246, 0.03017932891845703, 0.03006163215637207, 0.03017375946044922, 0.03005404853820801, 0.029890335083007813, 0.02989148712158203, 0.029914655685424806, 0.02988287925720215, 0.029867040634155274, 0.030338016510009766, 0.029924543380737304, 0.03156870460510254, 0.03017728042602539, 0.030584640502929687, 0.03043756866455078, 0.03020595169067383, 0.030066688537597655, 0.03054182434082031, 0.029912128448486328, 0.030067007064819337, 0.0298338565826416, 0.030023679733276368, 0.030578655242919923, 0.030278591156005858, 0.030046207427978516, 0.029929471969604493, 0.0317071361541748, 0.0305248966217041, 0.029912704467773436, 0.03014672088623047, 0.02995916748046875, 0.03166921615600586, 0.029852447509765626, 0.02991321563720703, 0.0305511360168457, 0.030367679595947265, 0.030507871627807617, 0.030484672546386718, 0.031165855407714844, 0.030007711410522463, 0.0299718074798584, 0.029878944396972657, 0.03018137550354004, 0.03014860725402832, 0.029990976333618163, 0.02982700729370117, 0.029738624572753905, 0.03020841598510742, 0.03008665657043457, 0.030452192306518553, 0.03043328094482422, 0.03030624008178711, 0.03030431938171387, 0.030345216751098632, 0.030031871795654298, 0.029886463165283202, 0.02999513626098633, 0.02978332710266113, 0.030052959442138674, 0.029742143630981446, 0.029961151123046877, 0.030121984481811522, 0.030188831329345703, 0.030193471908569337, 0.030485408782958984, 0.030726144790649414, 0.03041663932800293, 0.029877504348754882, 0.02998284721374512, 0.029931711196899413, 0.030446271896362304, 0.029997055053710937, 0.030027584075927736, 0.03006892776489258, 0.030292064666748046, 0.03055779266357422, 0.030587200164794923, 0.030398176193237304, 0.0304597110748291, 0.030101984024047852, 0.03023052787780762, 0.029999359130859375, 0.029894399642944335, 0.030021631240844726, 0.02993152046203613, 0.03174355125427246, 0.030837568283081054, 0.030349311828613282, 0.030137407302856446, 0.030045120239257813, 0.029968671798706055, 0.030154464721679687, 0.030324735641479493, 0.029997055053710937, 0.03034716796875, 0.030263391494750977, 0.03030793571472168, 0.030294240951538084, 0.03177408027648926, 0.03032761573791504, 0.030089151382446288, 0.03006591987609863, 0.030472959518432617, 0.029880096435546875, 0.030283199310302735, 0.029752159118652345, 0.02980454444885254, 0.029884416580200194, 0.03009328079223633, 0.029855775833129882, 0.030046207427978516, 0.029988000869750977, 0.030093631744384765, 0.029975072860717773, 0.030078592300415038, 0.030038335800170898, 0.029921344757080078, 0.03014566421508789, 0.0304169921875, 0.030487039566040038, 0.030277824401855467, 0.030306400299072264, 0.030508287429809572, 0.030558687210083008, 0.033962272644042966, 0.030544992446899413, 0.030407583236694336, 0.03018288040161133, 0.03015292739868164, 0.030736576080322264, 0.030195775985717772, 0.030109888076782228, 0.029957952499389647, 0.029958208084106444, 0.030044160842895507, 0.02999087905883789, 0.030283456802368165, 0.0304268798828125, 0.03010982322692871, 0.02992313575744629, 0.029978624343872072, 0.029932191848754883, 0.02995167922973633, 0.030418495178222656, 0.02991961669921875, 0.03160623931884766, 0.033751617431640624, 0.03011961555480957]",tokens/s,33.057331522979766,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,2682.626048,3346.92352,0.0,2944.401408,2910.225408,s,1,9.938994140625,9.938994140625,0.0,9.938994140625,9.938994140625,9.938994140625,9.938994140625,[9.938994140625],,kWh,6.905283939166414e-05,7.609626959293189e-06,2.032529403800387e-05,9.69877603889612e-05,,MB,1606.08256,3565.027328,0.0,3147.825152,3105.082368,s,12,1.0891100845336914,0.09075917371114095,0.00022011345168456773,0.09074781036376953,0.09092230606079102,0.0910980453491211,0.0912656234741211,"[0.0913075180053711, 0.09088313293457032, 0.09042540740966797, 0.09067254638671875, 0.09072886657714843, 0.09053392028808593, 0.0909266586303711, 0.09080470275878906, 0.09058201599121093, 0.09076675415039062, 0.09086662292480469, 0.0906119384765625]",tokens/s,2820.6515058716896,kWh,2.6805505149081837e-06,2.9561680905673873e-07,1.77283780970646e-06,4.7490051336713825e-06,tokens/kWh,53906027.219239995,MB,1614.336,3565.027328,0.0,3147.825152,3105.084928,s,12,9.693273376464845,0.8077727813720704,0.003943744771756468,0.8059530639648438,0.8139446166992188,0.814626919555664,0.8149202081298828,"[0.8149935302734375, 0.8105034790039063, 0.8143269653320313, 0.809943115234375, 0.8034149780273437, 0.8058055419921875, 0.81023291015625, 0.8027545166015625, 0.804509765625, 0.8052086181640625, 0.8061005859375, 0.8054793701171875]",tokens/s,77.99222931600787,kWh,2.3707136658356505e-05,2.6128488350404907e-06,1.5832592941293623e-05,4.215257843469062e-05,tokens/kWh,1494570.4946047245,,s,756,9.690600998878466,0.0128182552895218,0.0002839240358972027,0.012785408020019531,0.012916895866394043,0.012958520174026489,0.013504964590072634,"[0.013310976028442383, 0.012850048065185547, 0.012836864471435547, 0.012748895645141601, 0.012763168334960938, 0.012728192329406739, 0.012717472076416016, 0.012750752449035644, 0.012801759719848633, 0.01279190444946289, 0.012735103607177735, 0.012810496330261231, 0.012767231941223145, 0.01271110439300537, 0.012751359939575196, 0.012666399955749513, 0.012682016372680665, 0.012724224090576173, 0.012750847816467285, 0.012742527961730957, 0.012772512435913087, 0.01272316837310791, 0.012760512351989746, 0.01282089614868164, 0.012826080322265626, 0.012798784255981445, 0.012814208030700684, 0.01321340847015381, 0.01866166305541992, 0.013531135559082032, 0.012636159896850586, 0.012586112022399903, 0.012692511558532715, 0.012705632209777832, 0.012742655754089355, 0.012820672035217286, 0.012859295845031739, 0.012885055541992187, 0.012893024444580079, 0.012918463706970215, 0.012917056083679199, 0.012869088172912598, 0.012781536102294922, 0.012786239624023437, 0.012828672409057617, 0.012813983917236328, 0.012861663818359376, 0.012848704338073731, 0.012845631599426269, 0.012922240257263183, 0.012927616119384765, 0.012912639617919922, 0.012946911811828613, 0.012909088134765625, 0.012914752006530762, 0.013162367820739746, 0.012798015594482421, 0.012857215881347656, 0.012900287628173829, 0.012867775917053223, 0.0129202241897583, 0.012916704177856446, 0.012931296348571777, 0.013386303901672364, 0.012873855590820313, 0.01281993579864502, 0.012816800117492675, 0.01281158447265625, 0.01284124755859375, 0.012748384475708007, 0.01271072006225586, 0.012678720474243165, 0.01280454444885254, 0.01275699234008789, 0.012660736083984376, 0.012826623916625977, 0.012661919593811036, 0.012648736000061035, 0.012693920135498048, 0.012676480293273926, 0.012655391693115234, 0.012642304420471191, 0.012729344367980957, 0.012753184318542481, 0.012908831596374511, 0.01278320026397705, 0.012703743934631348, 0.012734687805175782, 0.012759679794311524, 0.012793408393859864, 0.012833312034606933, 0.012863391876220704, 0.013021183967590331, 0.012851360321044922, 0.012880736351013184, 0.01285427188873291, 0.012826239585876465, 0.012845439910888672, 0.012904224395751953, 0.01283033561706543, 0.012776032447814941, 0.012781567573547363, 0.012836864471435547, 0.012836864471435547, 0.012795040130615234, 0.012833632469177247, 0.012851200103759765, 0.013823007583618164, 0.014273504257202148, 0.012906496047973632, 0.012838912010192872, 0.012851200103759765, 0.01287712001800537, 0.012862144470214843, 0.012887359619140625, 0.012821184158325196, 0.012818143844604493, 0.012839200019836426, 0.01284438419342041, 0.012944031715393066, 0.01293619155883789, 0.012880576133728028, 0.0128755521774292, 0.012900896072387695, 0.012931072235107421, 0.01287168025970459, 0.013469887733459472, 0.012906399726867676, 0.012892160415649414, 0.01317632007598877, 0.01474131202697754, 0.01486508846282959, 0.012726271629333496, 0.012726495742797852, 0.012660351753234864, 0.012715488433837891, 0.012728351593017578, 0.012718111991882323, 0.012718720436096191, 0.01272329616546631, 0.012751775741577149, 0.012742655754089355, 0.012757023811340333, 0.012786911964416505, 0.01282534408569336, 0.012828672409057617, 0.012789152145385741, 0.012798111915588379, 0.012784064292907715, 0.012804096221923827, 0.012795743942260742, 0.012838432312011719, 0.012825023651123047, 0.012811967849731445, 0.012882431983947755, 0.01288304042816162, 0.01285212802886963, 0.012772576332092285, 0.0127926082611084, 0.012816384315490722, 0.012904447555541992, 0.01283795166015625, 0.012880736351013184, 0.012881248474121094, 0.012890624046325684, 0.012873984336853028, 0.012918784141540527, 0.012937215805053711, 0.012939264297485351, 0.012838624000549316, 0.013402688026428222, 0.012851936340332031, 0.012790783882141114, 0.012869248390197754, 0.012861248016357421, 0.01283948802947998, 0.012889568328857421, 0.012932640075683594, 0.012891136169433593, 0.012865535736083985, 0.012857343673706055, 0.012928895950317383, 0.01291481590270996, 0.01287763214111328, 0.012864831924438477, 0.012900704383850098, 0.012905216217041016, 0.012920607566833496, 0.012930560111999511, 0.013226719856262207, 0.012777503967285156, 0.012821599960327149, 0.01278656005859375, 0.01274880027770996, 0.012768704414367676, 0.012796480178833008, 0.012804096221923827, 0.012781567573547363, 0.012738080024719238, 0.01271833610534668, 0.012734687805175782, 0.012760224342346192, 0.012767647743225098, 0.012757375717163085, 0.012763199806213378, 0.012806143760681152, 0.012835871696472169, 0.012829216003417968, 0.012800031661987305, 0.012771360397338867, 0.012760928153991699, 0.012800543785095216, 0.012799072265625, 0.01280297565460205, 0.01281999969482422, 0.012814592361450196, 0.012879520416259766, 0.01292140769958496, 0.012885055541992187, 0.012835071563720704, 0.012839615821838379, 0.012855392456054687, 0.012838879585266113, 0.012800959587097168, 0.012861536026000977, 0.012989343643188477, 0.012856800079345703, 0.01286143970489502, 0.012880512237548828, 0.012877951622009278, 0.012908320426940918, 0.012833984375, 0.01287228775024414, 0.012892383575439453, 0.012867584228515624, 0.012907808303833007, 0.012888064384460449, 0.012896032333374023, 0.012956000328063965, 0.012966079711914063, 0.01294921588897705, 0.012888959884643555, 0.012896063804626465, 0.01287987232208252, 0.012896224021911621, 0.012871071815490723, 0.012911231994628906, 0.012945183753967285, 0.012904671669006348, 0.012922368049621581, 0.01297049617767334, 0.012916735649108887, 0.013375519752502441, 0.012898303985595704, 0.012822239875793458, 0.012715840339660644, 0.012697216033935546, 0.012731231689453126, 0.012761088371276855, 0.012696703910827637, 0.012663423538208008, 0.012676383972167969, 0.012650464057922363, 0.01266153621673584, 0.01273423957824707, 0.012800448417663575, 0.01266431999206543, 0.012626144409179687, 0.012595487594604491, 0.012642304420471191, 0.012679167747497559, 0.012658687591552734, 0.012684736251831055, 0.012694080352783204, 0.01269279956817627, 0.012677151679992676, 0.012669055938720704, 0.012677663803100587, 0.01265446376800537, 0.012652671813964844, 0.01267302417755127, 0.012719200134277344, 0.012722271919250489, 0.012751680374145508, 0.012777471542358398, 0.012730400085449218, 0.012715519905090332, 0.012720128059387208, 0.012732095718383789, 0.01272707176208496, 0.012724224090576173, 0.012713120460510255, 0.012770048141479492, 0.0128635835647583, 0.012792096138000489, 0.012744031906127929, 0.012738752365112304, 0.012763327598571777, 0.012843008041381837, 0.012805343627929688, 0.012783840179443359, 0.012743231773376464, 0.012776991844177247, 0.012826144218444824, 0.012790719985961914, 0.012769280433654785, 0.01278771209716797, 0.01274675178527832, 0.012777471542358398, 0.012725600242614746, 0.012800671577453614, 0.012848575592041016, 0.012882495880126953, 0.012906111717224122, 0.012853631973266602, 0.013221887588500977, 0.012711935997009278, 0.012636159896850586, 0.012658687591552734, 0.012681056022644042, 0.012672575950622558, 0.012612031936645509, 0.012633952140808105, 0.012605759620666504, 0.012627967834472656, 0.012633088111877442, 0.012706656455993653, 0.012664735794067383, 0.012666463851928711, 0.012626591682434083, 0.012703295707702636, 0.012745152473449707, 0.012785152435302734, 0.012739071846008301, 0.01274060821533203, 0.012701696395874023, 0.012690431594848632, 0.012624992370605468, 0.012681119918823243, 0.012693504333496093, 0.012701696395874023, 0.012737536430358886, 0.012771360397338867, 0.012733280181884766, 0.012688575744628906, 0.012643232345581054, 0.012691391944885253, 0.0127008638381958, 0.012705951690673829, 0.012745471954345703, 0.01276313591003418, 0.012768735885620117, 0.013441568374633788, 0.012806143760681152, 0.01278700828552246, 0.012771871566772461, 0.01274630355834961, 0.012761856079101562, 0.012910431861877441, 0.012885439872741699, 0.012872320175170898, 0.012824511528015137, 0.012867584228515624, 0.012920831680297852, 0.012922752380371093, 0.012926848411560058, 0.012915136337280274, 0.012891519546508788, 0.01289260768890381, 0.012871808052062988, 0.012818400382995605, 0.012910592079162597, 0.012979616165161132, 0.012914560317993163, 0.012934816360473634, 0.012944000244140625, 0.012956000328063965, 0.012979968070983887, 0.01423203182220459, 0.012903807640075683, 0.01436838436126709, 0.012741151809692383, 0.012766976356506348, 0.012716768264770507, 0.012704928398132325, 0.012718943595886231, 0.012738240242004395, 0.012753215789794922, 0.012777471542358398, 0.012732416152954102, 0.012727999687194824, 0.012679360389709472, 0.012748928070068359, 0.012813759803771972, 0.012830623626708984, 0.01284598445892334, 0.012850943565368653, 0.012866559982299805, 0.012868032455444336, 0.01284547233581543, 0.012948800086975097, 0.012897120475769044, 0.012777152061462403, 0.01284108829498291, 0.012820672035217286, 0.012804096221923827, 0.012769184112548827, 0.013254464149475098, 0.012863776206970216, 0.012825823783874512, 0.012773632049560547, 0.0128187837600708, 0.01276540756225586, 0.012717632293701172, 0.012775839805603028, 0.012777471542358398, 0.0128057279586792, 0.01285161590576172, 0.012761247634887696, 0.012777312278747559, 0.01277132797241211, 0.012697600364685058, 0.01277337646484375, 0.012816384315490722, 0.012781567573547363, 0.012779392242431641, 0.012787615776062012, 0.012873951911926269, 0.012865216255187988, 0.01290272045135498, 0.01275494384765625, 0.01273363208770752, 0.012923711776733399, 0.0127324800491333, 0.012713024139404297, 0.012733311653137206, 0.012833824157714844, 0.012900575637817383, 0.013164511680603027, 0.01283289623260498, 0.012796575546264649, 0.013394911766052247, 0.012920831680297852, 0.01285324764251709, 0.012849151611328125, 0.012802047729492188, 0.012802047729492188, 0.012758463859558105, 0.012724800109863281, 0.012649920463562012, 0.012595775604248047, 0.012590463638305665, 0.01263270378112793, 0.012674336433410644, 0.01262985610961914, 0.012570624351501464, 0.012581760406494141, 0.012581119537353516, 0.012590880393981934, 0.012678784370422363, 0.012710271835327148, 0.012670495986938476, 0.012681440353393554, 0.012641983985900879, 0.012763775825500489, 0.01277295970916748, 0.01270201587677002, 0.012670016288757325, 0.01268627166748047, 0.012666943550109864, 0.012682559967041016, 0.012693887710571288, 0.012728575706481934, 0.012811871528625488, 0.012689087867736816, 0.012716768264770507, 0.01276313591003418, 0.01276921558380127, 0.012772576332092285, 0.012796671867370605, 0.012793951988220215, 0.01274675178527832, 0.012728447914123536, 0.012705663681030273, 0.012696831703186036, 0.012667648315429688, 0.012697600364685058, 0.01273363208770752, 0.012739295959472657, 0.012724320411682128, 0.012741632461547851, 0.012727295875549317, 0.01273855972290039, 0.01276518440246582, 0.012715231895446778, 0.012691391944885253, 0.012791999816894531, 0.012832863807678223, 0.012888192176818847, 0.012822976112365722, 0.012820480346679687, 0.012803520202636719, 0.012868096351623535, 0.012844960212707519, 0.01308291244506836, 0.012694016456604004, 0.012765312194824218, 0.01271510410308838, 0.012718079566955566, 0.012763808250427246, 0.012724384307861328, 0.012699616432189942, 0.012739808082580566, 0.012697792053222657, 0.012651103973388672, 0.012656767845153809, 0.01261350440979004, 0.012633695602416992, 0.012634528160095216, 0.012637824058532714, 0.012646783828735351, 0.012681183815002441, 0.0126976318359375, 0.012625920295715331, 0.012631263732910156, 0.01268607997894287, 0.012730400085449218, 0.012820480346679687, 0.012734463691711426, 0.012693504333496093, 0.012686816215515137, 0.012683808326721191, 0.012689408302307128, 0.012656384468078614, 0.01272976016998291, 0.012640735626220704, 0.012773759841918945, 0.012892255783081055, 0.012894111633300781, 0.012875776290893554, 0.012873311996459962, 0.012775839805603028, 0.012820480346679687, 0.012806143760681152, 0.012736831665039063, 0.012742303848266601, 0.012732255935668945, 0.012819744110107422, 0.012841664314270019, 0.01292304039001465, 0.012759103775024414, 0.012753984451293946, 0.012798687934875489, 0.012808287620544433, 0.012769408226013183, 0.012785663604736328, 0.012805184364318848, 0.012810496330261231, 0.012849856376647949, 0.012845120429992675, 0.012883744239807128, 0.01289846420288086, 0.013025376319885254, 0.012911999702453613, 0.012853152275085449, 0.012822719573974609, 0.012914336204528809, 0.013483551979064942, 0.01285372829437256, 0.012791808128356934, 0.012740480422973632, 0.012712127685546875, 0.012709823608398438, 0.012716032028198243, 0.012721343994140625, 0.01272655963897705, 0.012771967887878419, 0.01283017635345459, 0.012908991813659667, 0.012797056198120117, 0.012779871940612792, 0.012760928153991699, 0.012743391990661622, 0.012692480087280274, 0.012739520072937011, 0.012771424293518066, 0.012773311614990235, 0.012800000190734863, 0.012806015968322754, 0.012761280059814454, 0.012728256225585937, 0.012666879653930664, 0.012745823860168457, 0.01279196834564209, 0.01279257583618164, 0.012767231941223145, 0.012769280433654785, 0.012756863594055175, 0.012725503921508789, 0.012703840255737304, 0.012720800399780274, 0.012739999771118164, 0.012750880241394043, 0.012743040084838867, 0.012754847526550293, 0.012800415992736817, 0.012744799613952636, 0.012759039878845215, 0.012756896018981934, 0.01272815990447998, 0.01273846435546875, 0.01270809555053711, 0.012729663848876954, 0.01273305606842041, 0.01276524829864502, 0.01280355167388916, 0.012801759719848633, 0.01279417610168457, 0.01279958438873291, 0.012796192169189454, 0.012839551925659179, 0.01277132797241211, 0.012781503677368165, 0.012767295837402343, 0.012777695655822754, 0.012822303771972656, 0.01285097599029541, 0.012817952156066894, 0.012798175811767579, 0.01276966381072998, 0.013193183898925782, 0.012815936088562012, 0.012717791557312012, 0.012703807830810546, 0.012640255928039551, 0.012656607627868653, 0.012653056144714356, 0.012624064445495605, 0.01260108757019043, 0.01261184024810791, 0.012652544021606446, 0.012688575744628906, 0.012796480178833008, 0.012685791969299316, 0.012617504119873047, 0.012652544021606446, 0.012646400451660156, 0.012670975685119629, 0.01273036766052246, 0.012756223678588868, 0.012745439529418945, 0.01270361614227295, 0.012691264152526855, 0.012717439651489257, 0.01271884822845459, 0.01266204833984375, 0.012673983573913574, 0.012687359809875488, 0.012670623779296874, 0.012751071929931641, 0.012755071640014649, 0.012735872268676758, 0.012710687637329101, 0.01273635196685791, 0.012822527885437012, 0.012840959548950195, 0.01278771209716797, 0.01285529613494873, 0.012831999778747558, 0.012845439910888672, 0.012848735809326172, 0.012810303688049316, 0.012846847534179688, 0.012839903831481933, 0.012832768440246582, 0.012935104370117188, 0.012922687530517578, 0.012865344047546386, 0.01289401626586914, 0.012915136337280274, 0.012871871948242188, 0.012848352432250977, 0.01281721591949463, 0.012931039810180664, 0.012929023742675782, 0.012892160415649414, 0.012888064384460449, 0.012919008255004883, 0.013016799926757812, 0.012986432075500489, 0.012974080085754394, 0.013013055801391601, 0.012994303703308105, 0.013435327529907226, 0.01293222427368164, 0.012882880210876465, 0.012881855964660644, 0.012906240463256837, 0.012830975532531739, 0.012737664222717285, 0.01269644832611084, 0.012703743934631348, 0.012700960159301758, 0.01266966438293457, 0.012673312187194825, 0.01266153621673584, 0.012647359848022462, 0.012678784370422363, 0.012732799530029298, 0.012713279724121094, 0.012702400207519532, 0.01270364761352539, 0.012684639930725097, 0.012684032440185547, 0.012694815635681152, 0.012739295959472657, 0.012748096466064453, 0.012767616271972657, 0.012728575706481934, 0.012716095924377441, 0.01277952003479004, 0.01273036766052246, 0.012691455841064453, 0.01267689609527588, 0.012714207649230957, 0.012818431854248047, 0.012759039878845215, 0.012724224090576173, 0.012705792427062988, 0.012725279808044434, 0.012893216133117676, 0.012851136207580566, 0.01275049591064453, 0.01276358413696289, 0.012756896018981934, 0.012759039878845215, 0.012838175773620605, 0.0127741117477417, 0.012763263702392579, 0.012807519912719726, 0.012837056159973145, 0.012795616149902343, 0.012786304473876953, 0.012788864135742187, 0.012807040214538575, 0.012870752334594726, 0.012862367630004883, 0.01284931182861328, 0.012885855674743653, 0.012818240165710449, 0.012857536315917968, 0.012840352058410644, 0.012822751998901366, 0.01282688045501709, 0.01282470417022705, 0.01286348819732666]",tokens/s,78.01373723750405,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11136.79872,12227.3792,0.0,11848.9088,11814.752256,s,1,16.58408203125,16.58408203125,0.0,16.58408203125,16.58408203125,16.58408203125,16.58408203125,[16.58408203125],,kWh,0.00027038696720004126,2.9818376568283732e-05,8.865979315003658e-05,0.0003888651369183616,,MB,2071.98208,14033.027072,0.0,13625.196544,13297.870848,s,10,22.904132568359376,2.2904132568359374,0.0012868454753217756,2.2905863037109375,2.2922088623046877,2.292209411621094,2.292209851074219,"[2.292208740234375, 2.2922099609375, 2.29136376953125, 2.291201171875, 2.29030224609375, 2.288611328125, 2.28920458984375, 2.290870361328125, 2.288856201171875, 2.28930419921875]",tokens/s,111.77022279099448,kWh,6.670982737708072e-05,7.357757209983291e-06,4.435417437220202e-05,0.00011842175895926602,tokens/kWh,2161764.8838340365,MB,2079.465472,14335.01696,0.0,13927.186432,13689.870848,s,10,1355.52684375,135.55268437499998,0.057629119548384934,135.5256875,135.6300109375,135.65389609375,135.67300421875,"[135.67778125, 135.624703125, 135.585671875, 135.569046875, 135.532953125, 135.497828125, 135.507578125, 135.518421875, 135.51459375, 135.498265625]",tokens/s,0.4647639424514348,kWh,0.003951603366722094,0.0004358923515490394,0.0026285220194825986,0.0070160177377537325,tokens/kWh,8979.452782878832,,s,630,1355.5206533203132,2.151620084635417,0.0010979530220220387,2.1514691162109374,2.153191381835937,2.1536974487304685,2.15430625,"[2.15374072265625, 2.152707763671875, 2.152853271484375, 2.152304443359375, 2.15255126953125, 2.15317822265625, 2.15230126953125, 2.15374462890625, 2.152818603515625, 2.153167724609375, 2.15337890625, 2.1532265625, 2.153262939453125, 2.153539794921875, 2.153477294921875, 2.152826904296875, 2.15331640625, 2.153724609375, 2.15341259765625, 2.153799560546875, 2.153301513671875, 2.15342529296875, 2.152824951171875, 2.1533818359375, 2.1532939453125, 2.153029052734375, 2.153637939453125, 2.15375732421875, 2.153396240234375, 2.154084228515625, 2.15372607421875, 2.154080322265625, 2.153596923828125, 2.15418798828125, 2.153638671875, 2.153900146484375, 2.15345556640625, 2.1538623046875, 2.1537548828125, 2.153672607421875, 2.15450830078125, 2.153498779296875, 2.15490625, 2.1540546875, 2.154407958984375, 2.154281982421875, 2.153531005859375, 2.1537177734375, 2.153956787109375, 2.15450927734375, 2.153934814453125, 2.15406591796875, 2.15435888671875, 2.1535478515625, 2.154036376953125, 2.1539912109375, 2.1544140625, 2.1534453125, 2.153170166015625, 2.154316162109375, 2.154031494140625, 2.1533955078125, 2.15377197265625, 2.153185302734375, 2.15213427734375, 2.152380126953125, 2.152184326171875, 2.1523232421875, 2.152303955078125, 2.151925537109375, 2.152924072265625, 2.15286083984375, 2.152084228515625, 2.1526240234375, 2.151804931640625, 2.15241943359375, 2.1530869140625, 2.152595458984375, 2.15252392578125, 2.152662841796875, 2.152728759765625, 2.152658935546875, 2.15320361328125, 2.1518603515625, 2.152887451171875, 2.152725341796875, 2.15288427734375, 2.15361767578125, 2.1521552734375, 2.152619873046875, 2.153019287109375, 2.15226171875, 2.1537685546875, 2.152908935546875, 2.152489990234375, 2.152644775390625, 2.15259326171875, 2.152498046875, 2.152840576171875, 2.152521728515625, 2.152732666015625, 2.153362060546875, 2.153743896484375, 2.152993408203125, 2.152506591796875, 2.153020263671875, 2.154104736328125, 2.152980712890625, 2.152964111328125, 2.152489990234375, 2.15295458984375, 2.152462646484375, 2.153101318359375, 2.15288818359375, 2.152739990234375, 2.153175537109375, 2.152466796875, 2.153329833984375, 2.152919921875, 2.152263671875, 2.15319091796875, 2.15310595703125, 2.153195556640625, 2.152091552734375, 2.15292919921875, 2.153471923828125, 2.152052734375, 2.15093359375, 2.151443603515625, 2.151208740234375, 2.151665771484375, 2.15092431640625, 2.151182373046875, 2.151501953125, 2.151745361328125, 2.151505859375, 2.151245849609375, 2.15233251953125, 2.151381103515625, 2.1523515625, 2.1514228515625, 2.151843017578125, 2.15215576171875, 2.15196484375, 2.151256103515625, 2.15206494140625, 2.1517880859375, 2.1520634765625, 2.1513359375, 2.152428466796875, 2.15237939453125, 2.152940673828125, 2.151484375, 2.15225537109375, 2.1527265625, 2.152265625, 2.153132080078125, 2.15225244140625, 2.152393798828125, 2.152676513671875, 2.152208984375, 2.152499267578125, 2.15231884765625, 2.151991455078125, 2.152485107421875, 2.152724365234375, 2.152110107421875, 2.1523330078125, 2.15228466796875, 2.15233544921875, 2.15267919921875, 2.152990966796875, 2.1522646484375, 2.15244921875, 2.152156982421875, 2.153568359375, 2.15230419921875, 2.152249755859375, 2.151930908203125, 2.15242138671875, 2.152317138671875, 2.153120361328125, 2.152474609375, 2.152212646484375, 2.152177001953125, 2.152445556640625, 2.152672119140625, 2.1532548828125, 2.15179052734375, 2.150914794921875, 2.1508173828125, 2.151113037109375, 2.15136474609375, 2.151604248046875, 2.151395263671875, 2.15161767578125, 2.15109716796875, 2.151607421875, 2.150910888671875, 2.15164306640625, 2.151921630859375, 2.15119775390625, 2.151859130859375, 2.1525478515625, 2.1510517578125, 2.15236376953125, 2.151751220703125, 2.151856689453125, 2.15140966796875, 2.1511865234375, 2.153145751953125, 2.151104736328125, 2.151987548828125, 2.15233251953125, 2.1515537109375, 2.150688720703125, 2.150803466796875, 2.15117822265625, 2.152405029296875, 2.152108154296875, 2.151814697265625, 2.1515673828125, 2.15140625, 2.15215478515625, 2.151510009765625, 2.15209521484375, 2.152355712890625, 2.152592041015625, 2.152163330078125, 2.15205859375, 2.15209375, 2.15196484375, 2.152599853515625, 2.151763427734375, 2.151579345703125, 2.152060791015625, 2.15278662109375, 2.1517724609375, 2.153413818359375, 2.151795654296875, 2.152584716796875, 2.152, 2.15275537109375, 2.152795654296875, 2.151948486328125, 2.152388671875, 2.15271533203125, 2.151862060546875, 2.1522646484375, 2.152122314453125, 2.152468505859375, 2.152443359375, 2.15089306640625, 2.150482177734375, 2.15049267578125, 2.15028125, 2.150667236328125, 2.151060546875, 2.15079052734375, 2.15144873046875, 2.150859130859375, 2.1505966796875, 2.15177001953125, 2.15014404296875, 2.151184326171875, 2.1510185546875, 2.1508076171875, 2.151145751953125, 2.15081103515625, 2.1512626953125, 2.150821044921875, 2.1516171875, 2.150609130859375, 2.15122900390625, 2.15109326171875, 2.1516748046875, 2.15079931640625, 2.151235595703125, 2.150504150390625, 2.151947998046875, 2.15126904296875, 2.15168408203125, 2.15206103515625, 2.151129150390625, 2.15129248046875, 2.15198046875, 2.151365478515625, 2.151548583984375, 2.15148291015625, 2.1516533203125, 2.1513544921875, 2.151612548828125, 2.150863525390625, 2.15126123046875, 2.15179052734375, 2.1515458984375, 2.151701904296875, 2.151739990234375, 2.15112109375, 2.15173876953125, 2.151557861328125, 2.1519248046875, 2.1510234375, 2.15153662109375, 2.151571533203125, 2.15143212890625, 2.152637939453125, 2.151794677734375, 2.1523251953125, 2.15185400390625, 2.151250244140625, 2.1512763671875, 2.151165771484375, 2.15204638671875, 2.15148388671875, 2.150702392578125, 2.14983154296875, 2.15018896484375, 2.149712158203125, 2.150517578125, 2.149858154296875, 2.150455322265625, 2.14996484375, 2.149432373046875, 2.150497802734375, 2.149911865234375, 2.150378662109375, 2.14996923828125, 2.150060302734375, 2.14969140625, 2.150126708984375, 2.150521728515625, 2.15058837890625, 2.149822265625, 2.149945556640625, 2.150328369140625, 2.15119677734375, 2.149961669921875, 2.15074609375, 2.15079931640625, 2.15058642578125, 2.150466796875, 2.15084619140625, 2.150916748046875, 2.15092041015625, 2.151034912109375, 2.150868896484375, 2.151144775390625, 2.151258544921875, 2.15111474609375, 2.15045556640625, 2.150823974609375, 2.150533203125, 2.1515546875, 2.1513095703125, 2.151034912109375, 2.15058837890625, 2.15149560546875, 2.151301025390625, 2.150927978515625, 2.151443115234375, 2.15052294921875, 2.15045654296875, 2.15022265625, 2.152161376953125, 2.1504658203125, 2.15101416015625, 2.15065185546875, 2.151403076171875, 2.1523974609375, 2.151395263671875, 2.150956787109375, 2.152105224609375, 2.150964111328125, 2.152171630859375, 2.151329833984375, 2.151540771484375, 2.151630615234375, 2.14989013671875, 2.14916650390625, 2.149771728515625, 2.14993310546875, 2.149523681640625, 2.15041357421875, 2.149125, 2.150731689453125, 2.15006982421875, 2.150005126953125, 2.149689453125, 2.15046728515625, 2.150459228515625, 2.150856689453125, 2.150032958984375, 2.151127197265625, 2.150333251953125, 2.150701171875, 2.14982568359375, 2.150210205078125, 2.15022412109375, 2.1509560546875, 2.15025439453125, 2.151168701171875, 2.15073583984375, 2.15011328125, 2.15008203125, 2.1519951171875, 2.150269775390625, 2.1510556640625, 2.150285400390625, 2.151147216796875, 2.150312255859375, 2.151153564453125, 2.151604248046875, 2.151036865234375, 2.15113330078125, 2.151172119140625, 2.15152978515625, 2.15193408203125, 2.151918212890625, 2.150645751953125, 2.15139111328125, 2.1517958984375, 2.1515087890625, 2.151620849609375, 2.151088134765625, 2.151255126953125, 2.150869873046875, 2.152009765625, 2.151751708984375, 2.151941650390625, 2.15211962890625, 2.152193115234375, 2.15191748046875, 2.151174072265625, 2.151095458984375, 2.15147412109375, 2.1513125, 2.1514921875, 2.152072998046875, 2.152081787109375, 2.151716796875, 2.15088427734375, 2.149719482421875, 2.150300048828125, 2.150454833984375, 2.15006640625, 2.151052734375, 2.149905029296875, 2.15017822265625, 2.1502412109375, 2.15087451171875, 2.15023193359375, 2.15002783203125, 2.150787109375, 2.150412353515625, 2.150989501953125, 2.150674560546875, 2.15045947265625, 2.15073974609375, 2.150287109375, 2.150994140625, 2.151113037109375, 2.15100830078125, 2.15099365234375, 2.150598876953125, 2.151919677734375, 2.151110595703125, 2.150681884765625, 2.15150634765625, 2.15130908203125, 2.1512744140625, 2.152123046875, 2.151403564453125, 2.15124609375, 2.15190087890625, 2.15092626953125, 2.151630126953125, 2.15122412109375, 2.151163818359375, 2.150731201171875, 2.151129150390625, 2.151237548828125, 2.15116259765625, 2.151464111328125, 2.150979248046875, 2.1516767578125, 2.15100830078125, 2.151354248046875, 2.151202880859375, 2.15159716796875, 2.152220703125, 2.151686279296875, 2.1517109375, 2.151391845703125, 2.151088134765625, 2.151636962890625, 2.15196875, 2.151333984375, 2.151794677734375, 2.151206787109375, 2.151443359375, 2.151432373046875, 2.151592529296875, 2.151311279296875, 2.1513984375, 2.150262939453125, 2.1506396484375, 2.149322021484375, 2.15015283203125, 2.150506103515625, 2.149909912109375, 2.15073681640625, 2.150552734375, 2.150275146484375, 2.15019189453125, 2.150582275390625, 2.1506826171875, 2.15127978515625, 2.150322998046875, 2.150883544921875, 2.150467529296875, 2.151138916015625, 2.15074658203125, 2.150599853515625, 2.149941650390625, 2.151123291015625, 2.151288818359375, 2.151434326171875, 2.1514072265625, 2.15140380859375, 2.15039794921875, 2.1509931640625, 2.150472412109375, 2.15136669921875, 2.150916015625, 2.151755859375, 2.15052685546875, 2.150698974609375, 2.151047119140625, 2.150866943359375, 2.15036474609375, 2.150756591796875, 2.1507783203125, 2.1514296875, 2.151457275390625, 2.15137451171875, 2.1515224609375, 2.151106201171875, 2.151357421875, 2.15172509765625, 2.150919189453125, 2.151572509765625, 2.151522216796875, 2.152494140625, 2.15128125, 2.1516044921875, 2.151712646484375, 2.151427978515625, 2.151321044921875, 2.151582275390625, 2.1514423828125, 2.15172412109375, 2.15140625, 2.152030517578125, 2.1506416015625, 2.1513359375, 2.151771484375, 2.150775390625, 2.149087158203125, 2.149661865234375, 2.149651123046875, 2.14935888671875, 2.149644287109375, 2.1498623046875, 2.15006103515625, 2.14955615234375, 2.150403076171875, 2.1497666015625, 2.149818359375, 2.14995556640625, 2.150998291015625, 2.150135986328125, 2.150268310546875, 2.150007080078125, 2.149869873046875, 2.151012451171875, 2.150572021484375, 2.149473876953125, 2.150074462890625, 2.15087353515625, 2.15081884765625, 2.149972900390625, 2.1505576171875, 2.150240234375, 2.15158984375, 2.150319580078125, 2.150472412109375, 2.1510146484375, 2.1510009765625, 2.151279541015625, 2.15128857421875, 2.150300048828125, 2.15108203125, 2.150709228515625, 2.15055322265625, 2.151600341796875, 2.1515654296875, 2.151026611328125, 2.151, 2.151370849609375, 2.151364501953125, 2.1511015625, 2.15116259765625, 2.1517353515625, 2.1517578125, 2.1517373046875, 2.15187451171875, 2.15133349609375, 2.151178466796875, 2.151184326171875, 2.1522880859375, 2.152058349609375, 2.151327880859375, 2.1508779296875, 2.151151611328125, 2.151152587890625, 2.15120166015625, 2.151810791015625, 2.15129736328125, 2.15137255859375]",tokens/s,0.464766064948425,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3553.804288,4495.179776,0.0,4116.709376,3980.386816,s,1,10.698505859375,10.698505859375,0.0,10.698505859375,10.698505859375,10.698505859375,10.698505859375,[10.698505859375],,kWh,9.446468462082861e-05,1.0412533916205323e-05,3.08761358120066e-05,0.00013575335434904052,,MB,3569.58208,4826.529792,0.0,4418.699264,4245.764608,s,10,6.540985412597656,0.6540985412597655,0.0014622330901031934,0.6539962463378907,0.6554122009277343,0.6563269500732422,0.6570587493896484,"[0.65141357421875, 0.6533139038085938, 0.6548538208007812, 0.6540926513671875, 0.652806884765625, 0.6543641967773437, 0.65724169921875, 0.6552089233398437, 0.6538998413085938, 0.6537899169921875]",tokens/s,391.37833805125916,kWh,1.9062787973176872e-05,2.1022905774729127e-06,1.263961080612535e-05,3.380468935677513e-05,tokens/kWh,7572913.843347965,MB,3576.631296,4837.015552,0.0,4429.185024,4245.767168,s,10,385.14350781249993,38.514350781249995,0.015261880598067006,38.517564453125004,38.528111328125,38.5308974609375,38.5331263671875,"[38.48181640625, 38.49543359375, 38.50627734375, 38.51169140625, 38.51723046875, 38.5178984375, 38.52492578125, 38.52705859375, 38.53368359375, 38.5274921875]",tokens/s,1.6357539130757046,kWh,0.0011235635662026559,0.0001239374439319332,0.0007470703580724743,0.0019945713682070635,tokens/kWh,31585.73365897216,,s,630,385.13932885742145,0.6113322680276537,0.0005062537275485813,0.6113420104980469,0.6119678344726562,0.6121722381591798,0.6125492669677735,"[0.6109921264648438, 0.6100316162109375, 0.611051513671875, 0.6100374145507812, 0.6104026489257812, 0.61068017578125, 0.6101295776367187, 0.6103638305664062, 0.61080810546875, 0.6105416870117187, 0.6103138427734375, 0.6109783325195313, 0.6102640991210937, 0.61072216796875, 0.610486572265625, 0.6104078979492188, 0.6116115112304688, 0.6101176147460937, 0.6113382568359375, 0.6105477294921875, 0.6103302001953125, 0.6110244140625, 0.6103285522460937, 0.6104708251953125, 0.61085693359375, 0.6103161010742187, 0.6109573364257812, 0.6105985107421875, 0.610902587890625, 0.6105989379882812, 0.6109214477539062, 0.61062451171875, 0.610783203125, 0.6112945556640625, 0.6108182983398438, 0.6106873779296875, 0.6106516723632812, 0.6108145141601562, 0.6111149291992187, 0.6111642456054688, 0.6110392456054687, 0.6110167236328125, 0.6107685546875, 0.6111071166992188, 0.6108098754882813, 0.6105782470703125, 0.611965087890625, 0.6104343872070312, 0.6109550170898438, 0.6109105224609375, 0.6109763793945312, 0.6111295776367187, 0.611034912109375, 0.6109859619140625, 0.6112991943359375, 0.6115060424804688, 0.6106196899414063, 0.611409423828125, 0.6109086303710938, 0.6111492309570312, 0.6111985473632813, 0.611876953125, 0.6106612548828125, 0.6112717895507812, 0.6104872436523437, 0.610522705078125, 0.610563720703125, 0.6107140502929688, 0.6106519775390625, 0.6105010375976563, 0.6112579956054688, 0.6113301391601562, 0.6104927368164063, 0.6113546142578125, 0.6107176513671875, 0.6111146240234375, 0.6111682739257812, 0.6106771240234375, 0.61136279296875, 0.610326416015625, 0.6114895629882813, 0.6102262573242188, 0.6108101196289063, 0.6111925048828125, 0.6111926879882813, 0.6110531005859375, 0.610546630859375, 0.6112174072265625, 0.6107294921875, 0.6116746826171875, 0.6105430297851563, 0.6113797607421875, 0.6106009521484375, 0.611178466796875, 0.6106419067382812, 0.6110119018554687, 0.6121970825195312, 0.6102078247070313, 0.6113463745117188, 0.611293212890625, 0.6107523193359375, 0.611599609375, 0.6104749755859376, 0.611045166015625, 0.610963623046875, 0.610951171875, 0.61168212890625, 0.6109185791015626, 0.6111758422851562, 0.6113695068359375, 0.6109900512695312, 0.6115401611328125, 0.610949951171875, 0.6108753662109375, 0.6111744384765625, 0.6108746948242187, 0.6114636840820312, 0.611250244140625, 0.610959228515625, 0.6115820922851563, 0.61092041015625, 0.6112987060546875, 0.6110767822265625, 0.611411865234375, 0.6119174194335938, 0.610774658203125, 0.611037353515625, 0.610566162109375, 0.611219482421875, 0.6104780883789063, 0.6116825561523438, 0.6103765869140625, 0.6105445556640625, 0.6110844116210937, 0.6112127075195313, 0.6108016357421875, 0.6114279174804688, 0.610241455078125, 0.6113211669921875, 0.6111033935546875, 0.6113541870117187, 0.611105224609375, 0.6106083374023438, 0.611092529296875, 0.611721435546875, 0.6106875610351562, 0.6114488525390624, 0.6113054809570313, 0.6107791137695312, 0.6112071533203125, 0.6111641845703125, 0.6112214965820313, 0.6112781372070313, 0.6113916015625, 0.610779296875, 0.6114451904296875, 0.6112849731445312, 0.6113416748046875, 0.6112028198242188, 0.6110396728515625, 0.6113751220703125, 0.6110286254882813, 0.6112623291015625, 0.6121746826171875, 0.610402587890625, 0.6124005737304687, 0.6108922119140625, 0.611768310546875, 0.6109204711914062, 0.6111437377929687, 0.6113484497070313, 0.6110239868164062, 0.6111959838867187, 0.6112222900390625, 0.6117939453125, 0.611330078125, 0.611583984375, 0.6110422973632812, 0.6113079223632812, 0.6110150756835937, 0.6115774536132812, 0.6112814331054688, 0.61088525390625, 0.610998779296875, 0.6116351928710938, 0.6121427001953125, 0.6112935791015625, 0.611651611328125, 0.6115852661132812, 0.61158984375, 0.6109527587890625, 0.610888427734375, 0.6115921630859374, 0.6106536254882813, 0.6108473510742187, 0.6118167114257812, 0.6111113891601563, 0.6113118896484375, 0.6108585205078125, 0.6109044189453126, 0.6109605712890624, 0.6112481689453125, 0.6114147338867187, 0.610752685546875, 0.6111846313476562, 0.6110863647460938, 0.6110222778320312, 0.6114710083007813, 0.6109985961914063, 0.6114678344726563, 0.6113565673828125, 0.6111417846679688, 0.6116593627929687, 0.6111826782226563, 0.6116703491210937, 0.6109490966796876, 0.6115655517578125, 0.610693115234375, 0.6110773315429687, 0.6111281127929687, 0.6118500366210937, 0.6110473022460937, 0.6114962768554687, 0.6114857177734375, 0.6112329711914063, 0.61167822265625, 0.6107901611328125, 0.6116265869140625, 0.6112479248046875, 0.61144873046875, 0.61187109375, 0.6102879638671875, 0.6122516479492187, 0.6111025390625, 0.6116984252929687, 0.6117682495117187, 0.610472412109375, 0.6118174438476562, 0.6105640869140625, 0.611926025390625, 0.611567138671875, 0.6107017822265625, 0.611706298828125, 0.6110521240234374, 0.6117572021484375, 0.612184814453125, 0.6106842041015625, 0.6116370849609375, 0.6110784301757812, 0.6120189208984375, 0.611293212890625, 0.6113382568359375, 0.6117147216796875, 0.6109002075195312, 0.6115035400390625, 0.610646728515625, 0.61136279296875, 0.6111211547851563, 0.6111058959960938, 0.6116148071289063, 0.6106111450195313, 0.611332275390625, 0.6105990600585938, 0.6115392456054688, 0.6115084838867187, 0.6101905517578124, 0.6114943237304687, 0.6112234497070312, 0.611407470703125, 0.6116044921875, 0.61050146484375, 0.6120794067382812, 0.6108378295898438, 0.6114108276367187, 0.611237548828125, 0.6108038330078125, 0.611182861328125, 0.612284423828125, 0.6106126098632813, 0.612206298828125, 0.6105542602539062, 0.6119552612304687, 0.6112861328125, 0.6112113037109375, 0.6112544555664062, 0.6114638061523437, 0.611952392578125, 0.6113582763671875, 0.610810546875, 0.6126079711914062, 0.6107545776367187, 0.6121326904296875, 0.61099365234375, 0.61170556640625, 0.6126713256835937, 0.6104024658203125, 0.6125906982421875, 0.6105870971679688, 0.6122266845703125, 0.6111973876953125, 0.6116929321289063, 0.610832275390625, 0.6116475219726563, 0.6121077880859375, 0.6109210205078125, 0.6118068237304688, 0.6112506103515625, 0.6116597900390625, 0.6113929443359375, 0.61121142578125, 0.6117564697265625, 0.6117857666015625, 0.6112423095703124, 0.6110398559570313, 0.6121041870117188, 0.6117105712890625, 0.6112811279296875, 0.611064208984375, 0.6111959838867187, 0.6108968505859375, 0.6108692626953125, 0.611430419921875, 0.6108098754882813, 0.6111761474609375, 0.6110431518554688, 0.6113449096679687, 0.6107310180664063, 0.6115316772460937, 0.6114692993164063, 0.6104757080078125, 0.6113712158203125, 0.6112704467773438, 0.6113427124023437, 0.6115717163085937, 0.6105310668945313, 0.61163330078125, 0.6108427734375, 0.6118502197265625, 0.6115921630859374, 0.6103900146484375, 0.6117611694335937, 0.6107698974609375, 0.611978271484375, 0.6117611083984374, 0.6107955322265625, 0.6119133911132812, 0.6106209716796875, 0.6123458862304687, 0.611243896484375, 0.611052734375, 0.6119125366210938, 0.6110679931640625, 0.6112162475585937, 0.6125850830078124, 0.6105450439453125, 0.6122443237304688, 0.6114215698242188, 0.6118607788085938, 0.6113755493164063, 0.6115061645507812, 0.611826904296875, 0.6110618286132813, 0.6113306274414062, 0.6119398803710937, 0.6121388549804687, 0.611496826171875, 0.6111968383789063, 0.61158154296875, 0.6117154541015625, 0.611382568359375, 0.611432861328125, 0.6111724243164063, 0.6114041748046875, 0.61209521484375, 0.6118342895507812, 0.6113487548828125, 0.612005859375, 0.6110945434570313, 0.6110287475585937, 0.6114531860351563, 0.6117652587890625, 0.610704345703125, 0.6109407348632813, 0.6114449462890625, 0.6114954223632812, 0.6116541748046875, 0.611900390625, 0.6107484741210938, 0.611822021484375, 0.6110479125976562, 0.611915771484375, 0.61170068359375, 0.6108094482421875, 0.6110839233398437, 0.6109970703125, 0.611715087890625, 0.6115327758789062, 0.61136279296875, 0.6114140014648437, 0.611293212890625, 0.61115185546875, 0.6119301147460937, 0.6108995361328124, 0.6124560546875, 0.6110175170898438, 0.6112965087890625, 0.6114058837890625, 0.6116432495117188, 0.6114476318359375, 0.6111863403320312, 0.6121980590820313, 0.6117874145507812, 0.6110812377929687, 0.611695068359375, 0.6110883178710937, 0.61243603515625, 0.6119996337890625, 0.61095556640625, 0.6116661987304688, 0.610967529296875, 0.6117539672851563, 0.61153271484375, 0.61158203125, 0.6116188354492188, 0.6116188354492188, 0.6107908935546875, 0.6125614624023438, 0.611567626953125, 0.6112808837890625, 0.6116290283203125, 0.6113873901367187, 0.612421630859375, 0.611565185546875, 0.6117789306640625, 0.6108671875, 0.61222216796875, 0.6117747192382812, 0.6116050415039063, 0.6115977172851562, 0.6112446899414062, 0.6119505615234375, 0.6115143432617187, 0.6112133178710938, 0.6109389038085937, 0.6114498291015625, 0.6111221923828125, 0.6121287841796875, 0.6100809326171875, 0.6120098266601562, 0.6109921264648438, 0.6113211059570313, 0.6113121948242187, 0.6112925415039062, 0.6113984375, 0.611454345703125, 0.6114136352539062, 0.6119331665039063, 0.6110739135742187, 0.6118888549804687, 0.6115637817382813, 0.6115280151367187, 0.6106920166015625, 0.6120549926757812, 0.611082275390625, 0.61172119140625, 0.6113543090820313, 0.611282958984375, 0.61156591796875, 0.61148291015625, 0.6115827026367188, 0.61085888671875, 0.6118744506835937, 0.6116233520507812, 0.6119481201171875, 0.61106005859375, 0.61166796875, 0.6118728637695312, 0.610848388671875, 0.612122802734375, 0.6112965087890625, 0.6121966552734375, 0.6116377563476563, 0.611567626953125, 0.6121145629882813, 0.61161181640625, 0.6117364501953125, 0.6111968383789063, 0.6119666748046875, 0.6112833862304687, 0.6117703857421875, 0.6125808715820312, 0.6109168701171875, 0.6113150024414062, 0.6118038330078125, 0.6116127319335938, 0.6124400024414063, 0.6106331176757812, 0.611764892578125, 0.6115382690429687, 0.6115887451171875, 0.6122815551757812, 0.6113696899414063, 0.61158203125, 0.6125175170898437, 0.6118273315429688, 0.6111178588867188, 0.6110796508789063, 0.6114712524414062, 0.6117977905273437, 0.611784912109375, 0.61105322265625, 0.6115532836914063, 0.6113218383789063, 0.6120200805664062, 0.6107853393554687, 0.6117777709960938, 0.6113821411132813, 0.611683837890625, 0.6118856201171875, 0.61125341796875, 0.6108681640625, 0.611375, 0.6117590942382812, 0.6111150512695313, 0.6115455322265625, 0.6114103393554687, 0.6111492919921875, 0.6117642211914063, 0.6117012939453125, 0.6114487915039063, 0.6117152099609375, 0.6112135620117187, 0.6121029663085937, 0.6118154296875, 0.61160498046875, 0.6118670654296875, 0.6117969970703125, 0.6106843872070312, 0.6123680419921875, 0.6115070190429688, 0.6117271728515625, 0.611373046875, 0.61243408203125, 0.6114918212890625, 0.6121692504882813, 0.61119921875, 0.6118868408203125, 0.6116414794921875, 0.611754150390625, 0.6111735229492188, 0.6117667236328125, 0.6120140991210937, 0.6122023315429688, 0.6115181884765625, 0.6117030639648438, 0.6116904907226562, 0.6116195678710937, 0.611751953125, 0.6125194091796875, 0.6119388427734375, 0.6119915771484375, 0.6117601318359375, 0.612121826171875, 0.6117793579101563, 0.6114078979492188, 0.6126141357421875, 0.6114260864257812, 0.6122276000976562, 0.6115369873046875, 0.6108263549804688, 0.6116002807617188, 0.6113702392578125, 0.6110420532226563, 0.6116039428710938, 0.6109025268554688, 0.611217041015625, 0.6117154541015625, 0.6109921264648438, 0.6114871215820312, 0.6117135620117188, 0.6114755249023438, 0.6108710327148438, 0.611488037109375, 0.6119318237304687, 0.6111788330078125, 0.611694580078125, 0.611493896484375, 0.6110637817382812, 0.6114118041992187, 0.6115015869140625, 0.6112830810546875, 0.6119649047851563, 0.6117708740234375, 0.6107279663085937, 0.6119935913085938, 0.6113423461914063, 0.6116249389648437, 0.611358642578125, 0.6114295654296875, 0.6116730346679687, 0.6117742309570312, 0.6115117797851563, 0.6119463500976563, 0.611017578125, 0.6122537231445313, 0.611446044921875, 0.6121294555664063, 0.6116015625, 0.6116072998046875, 0.6116763305664062, 0.6117437744140625, 0.6113546142578125, 0.6115429077148438, 0.6107661743164062, 0.6120408935546875, 0.6112135620117187, 0.6119387817382812, 0.6119666137695312, 0.6120185546875, 0.610864990234375, 0.61204833984375, 0.6110808715820313, 0.6115655517578125, 0.6121328735351562, 0.6121287841796875, 0.6120202026367187, 0.6107361450195312, 0.61201611328125, 0.6118806762695312, 0.6115453491210937]",tokens/s,1.6357716618268952,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,5181.218816,5584.584704,0.0,5182.062592,5181.108736,s,1,11.2432333984375,11.2432333984375,0.0,11.2432333984375,11.2432333984375,11.2432333984375,11.2432333984375,[11.2432333984375],,kWh,0.00011862922332915862,1.3078100650580155e-05,3.545030613799627e-05,0.00016715763011773503,,MB,5161.34912,5739.773952,0.0,5322.571776,5283.621376,s,10,2.4362078399658205,0.24362078399658205,0.0005670315966989556,0.24341770935058593,0.2442363235473633,0.24454350814819337,0.2447892558288574,"[0.24283779907226563, 0.2434153289794922, 0.24306562805175783, 0.2433887939453125, 0.2432181396484375, 0.2434200897216797, 0.24485069274902344, 0.24388758850097655, 0.24416806030273439, 0.24395571899414062]",tokens/s,1050.8134642715527,kWh,7.147958736788396e-06,7.882838604552668e-07,4.720254453707321e-06,1.2656497050950985e-05,tokens/kWh,20226765.665841535,MB,5167.075328,5756.551168,0.0,5339.348992,5283.623936,s,10,25.294171874999996,2.5294171875,0.017786721108151902,2.5263804931640625,2.5501684082031253,2.556255712890625,2.561125556640625,"[2.548815673828125, 2.562343017578125, 2.534892822265625, 2.536100830078125, 2.54606982421875, 2.5178681640625, 2.51202490234375, 2.511920166015625, 2.51397998046875, 2.510156494140625]",tokens/s,24.90692334634893,kWh,7.393513094404378e-05,8.155025752346191e-06,4.896795516349232e-05,0.00013105811185988228,tokens/kWh,480702.78982315096,,s,630,25.29130438613893,0.040144927597045896,0.0005855910361652461,0.04008828735351562,0.04062418556213379,0.04084815483093262,0.04267652240753175,"[0.04278681564331055, 0.04063568115234375, 0.04132502365112305, 0.04061196899414062, 0.04055392074584961, 0.04031558227539062, 0.04022886276245117, 0.04012441635131836, 0.04034051132202148, 0.04041007995605469, 0.04034969711303711, 0.04020940780639649, 0.04050022506713867, 0.04049270248413086, 0.04049545669555664, 0.04026163101196289, 0.040187904357910156, 0.040323070526123043, 0.040321025848388675, 0.04032921600341797, 0.04015923309326172, 0.04029990386962891, 0.040790462493896486, 0.04044518280029297, 0.040162239074707035, 0.04023849487304688, 0.040390625, 0.040148704528808594, 0.040354721069335936, 0.04026748657226562, 0.040141086578369144, 0.04031283187866211, 0.040097793579101565, 0.040075233459472656, 0.040142303466796876, 0.04036806488037109, 0.04043747329711914, 0.04048988723754883, 0.040542209625244144, 0.04042531204223633, 0.040306846618652345, 0.04022259140014649, 0.040382591247558594, 0.04030668640136719, 0.04035692977905273, 0.040597503662109374, 0.04104288101196289, 0.040750911712646484, 0.04038675308227539, 0.04023871994018555, 0.0401923828125, 0.040204288482666016, 0.04024524688720703, 0.04007731246948242, 0.04048896026611328, 0.04081999969482422, 0.040911457061767575, 0.040591487884521486, 0.04040639877319336, 0.04032745742797852, 0.041597278594970706, 0.04039475250244141, 0.040576126098632814, 0.0409354248046875, 0.040736766815185545, 0.04084265518188476, 0.040593856811523436, 0.040814559936523435, 0.04044384002685547, 0.040276222229003907, 0.040508926391601564, 0.04020275115966797, 0.0422740478515625, 0.043031230926513675, 0.0404890251159668, 0.040302593231201174, 0.04053401565551758, 0.04084838485717773, 0.04210496139526367, 0.04035417556762695, 0.04042598342895508, 0.04046768188476563, 0.0406036491394043, 0.042406494140625, 0.04065283203125, 0.04067670440673828, 0.04036038589477539, 0.04040524673461914, 0.040554622650146484, 0.041305313110351564, 0.04051638412475586, 0.04045571136474609, 0.04053782272338867, 0.04018048095703125, 0.04077347183227539, 0.040363616943359375, 0.040428096771240235, 0.04029827117919922, 0.040394977569580076, 0.040521728515625, 0.040468318939208985, 0.040457759857177734, 0.04016191864013672, 0.04066064071655273, 0.04042089462280273, 0.04052812957763672, 0.04038492965698242, 0.04034883117675781, 0.04046720123291016, 0.04045779037475586, 0.04041388702392578, 0.04064780807495117, 0.04046681594848633, 0.04084787368774414, 0.04431241607666016, 0.0403969612121582, 0.04029788970947266, 0.040342113494873044, 0.0405032958984375, 0.04023468780517578, 0.040021759033203125, 0.04040723037719727, 0.04044019317626953, 0.040183265686035155, 0.040436511993408204, 0.040109886169433596, 0.04071449661254883, 0.03983776092529297, 0.04000531387329102, 0.039629215240478514, 0.03957388687133789, 0.03978160095214844, 0.040630527496337894, 0.040317150115966795, 0.04045651245117188, 0.04056441497802735, 0.03993222427368164, 0.0398397102355957, 0.03998518371582031, 0.03974720001220703, 0.03997516632080078, 0.04015526580810547, 0.04057062530517578, 0.0403950080871582, 0.03995852661132813, 0.040371936798095705, 0.04099663925170898, 0.040628734588623046, 0.04045119857788086, 0.04032396697998047, 0.04057907104492187, 0.040613887786865234, 0.04050739288330078, 0.040681343078613284, 0.04051366424560547, 0.04034064102172852, 0.04025225448608399, 0.04026572799682617, 0.040481983184814455, 0.04034550476074219, 0.040395198822021486, 0.04040752029418945, 0.04027801513671875, 0.04017548751831055, 0.04003830337524414, 0.03982950210571289, 0.039798336029052736, 0.040247806549072264, 0.04038671875, 0.04016643142700195, 0.04025203323364258, 0.040237407684326175, 0.04007635116577148, 0.03999430465698242, 0.039874561309814455, 0.03982950210571289, 0.04026153564453125, 0.040212001800537106, 0.04026220703125, 0.040051807403564454, 0.04011708831787109, 0.04008252716064453, 0.04042822265625, 0.040478622436523434, 0.03999372863769531, 0.040662944793701174, 0.04029654312133789, 0.04022809600830078, 0.040135040283203124, 0.04081391906738281, 0.040029022216796876, 0.040170913696289064, 0.040055393218994144, 0.04022886276245117, 0.040243198394775394, 0.040185855865478515, 0.03984572982788086, 0.03999935913085938, 0.040312576293945315, 0.04024374389648438, 0.04001587295532227, 0.03997465515136719, 0.039825313568115236, 0.039887168884277346, 0.03998108673095703, 0.04007452774047852, 0.04062076950073242, 0.040081409454345705, 0.04001305770874024, 0.040051105499267575, 0.04034793472290039, 0.0404152946472168, 0.040927230834960936, 0.04099225616455078, 0.04019795227050781, 0.04001987075805664, 0.040043296813964846, 0.04000316619873047, 0.04008182525634765, 0.04033718490600586, 0.040369792938232424, 0.04001753616333008, 0.040274913787841794, 0.04006911849975586, 0.03990079879760742, 0.040632705688476566, 0.04022784042358398, 0.040001823425292966, 0.041111328125, 0.041339839935302734, 0.04016537475585937, 0.040308734893798825, 0.04013983917236328, 0.04030355072021485, 0.04012358474731445, 0.039782913208007815, 0.040317184448242185, 0.04028361511230469, 0.03998575973510742, 0.040013824462890625, 0.0406036491394043, 0.040615455627441406, 0.04034783935546875, 0.040429855346679686, 0.0405503044128418, 0.04019209671020508, 0.04016918563842774, 0.04002230453491211, 0.04005683135986328, 0.040908798217773434, 0.0403394546508789, 0.04014284896850586, 0.0407624626159668, 0.04027484893798828, 0.04033484649658203, 0.04161539077758789, 0.04014937591552734, 0.04002006530761719, 0.04039884948730469, 0.040151039123535154, 0.040118270874023435, 0.04014284896850586, 0.04098233413696289, 0.040425537109375, 0.0405904655456543, 0.0403056640625, 0.040545631408691406, 0.04037289428710938, 0.04028195190429688, 0.040476287841796875, 0.0412388801574707, 0.04025363159179687, 0.04041113662719727, 0.04014080047607422, 0.04015635299682617, 0.04027065658569336, 0.04021820831298828, 0.04016896057128906, 0.040420257568359375, 0.040261119842529294, 0.040476318359375, 0.04023910522460938, 0.040534591674804686, 0.041220382690429686, 0.04046614456176758, 0.040288543701171874, 0.040220577239990236, 0.04073382568359375, 0.0405145263671875, 0.04073497772216797, 0.0402163200378418, 0.04029561614990235, 0.04024531173706054, 0.04059622573852539, 0.040005630493164065, 0.03991551971435547, 0.04012851333618164, 0.04024524688720703, 0.03983769607543945, 0.039874561309814455, 0.03995647811889649, 0.04008892822265625, 0.039967391967773436, 0.04355440139770508, 0.04055081558227539, 0.03996672058105469, 0.03999532699584961, 0.039753536224365234, 0.03992601776123047, 0.039979007720947264, 0.040096832275390626, 0.0396317138671875, 0.04030003356933594, 0.04386671829223633, 0.03987046432495117, 0.041020225524902344, 0.04005478286743164, 0.03957964706420898, 0.03951615905761719, 0.03961161422729492, 0.0400043830871582, 0.0400524787902832, 0.03998336029052734, 0.04034764862060547, 0.03994214248657227, 0.0397762565612793, 0.039669761657714846, 0.03943219375610352, 0.03954483032226563, 0.03978364944458008, 0.040057632446289064, 0.03970048141479492, 0.0406236801147461, 0.04017401504516602, 0.039898143768310544, 0.03958473587036133, 0.039867904663085936, 0.03963033676147461, 0.04181273651123047, 0.04025312042236328, 0.040454208374023436, 0.04025759887695313, 0.039860671997070315, 0.039841793060302735, 0.039995391845703124, 0.040083297729492186, 0.040036033630371094, 0.039868831634521484, 0.04027151870727539, 0.03972751998901367, 0.03958169555664062, 0.03967180633544922, 0.03983564758300781, 0.039798782348632815, 0.03971686553955078, 0.03983564758300781, 0.040097793579101565, 0.04076876831054688, 0.039971073150634764, 0.03976243209838867, 0.039626049041748046, 0.039627456665039064, 0.03970835113525391, 0.04011788940429688, 0.03951004791259766, 0.04003033447265625, 0.04045059204101562, 0.040286209106445314, 0.03996867370605469, 0.03977001571655273, 0.03982476806640625, 0.039529281616210936, 0.03958988952636719, 0.0404967041015625, 0.04008806228637695, 0.04007708740234375, 0.03993743896484375, 0.03962511825561523, 0.040847774505615234, 0.03986841583251953, 0.03961779022216797, 0.0397215690612793, 0.03960153579711914, 0.03945347213745117, 0.040049663543701174, 0.040026718139648435, 0.039801246643066404, 0.04010764694213867, 0.039843807220458986, 0.03971219253540039, 0.03962774276733398, 0.03978755187988281, 0.039939041137695315, 0.039995391845703124, 0.040352928161621095, 0.03994300842285156, 0.04055859375, 0.039831550598144534, 0.039569408416748046, 0.039462913513183595, 0.039300609588623046, 0.04045846557617187, 0.040046207427978514, 0.03985036849975586, 0.03955449676513672, 0.03970544052124023, 0.04011372756958008, 0.03989481735229492, 0.03953705596923828, 0.03925008010864258, 0.03957097625732422, 0.03989561462402344, 0.039556671142578125, 0.03989139175415039, 0.03976988983154297, 0.03965769577026367, 0.039731201171875, 0.039495681762695314, 0.03980287933349609, 0.04027391815185547, 0.03979990386962891, 0.03958262252807617, 0.03992076873779297, 0.04365811157226562, 0.039294975280761715, 0.03930931091308594, 0.04010569763183594, 0.03982140731811523, 0.03991980743408203, 0.039997440338134765, 0.0397589111328125, 0.03951264190673828, 0.04007564926147461, 0.039577598571777346, 0.03965273666381836, 0.039759521484375, 0.039847934722900394, 0.03973168182373047, 0.0395494384765625, 0.03994214248657227, 0.03983484649658203, 0.04016566467285156, 0.039444480895996094, 0.03940758514404297, 0.03975785446166992, 0.04033065414428711, 0.03967622375488281, 0.04005526351928711, 0.040034111022949216, 0.03964924621582031, 0.039956222534179686, 0.039729183197021484, 0.0392993278503418, 0.04008345413208008, 0.040715648651123044, 0.039991710662841795, 0.039779998779296874, 0.039677921295166015, 0.03928329467773437, 0.03973686218261719, 0.039276447296142575, 0.039233470916748045, 0.03944291305541992, 0.03989910507202148, 0.040394943237304685, 0.04014668655395508, 0.03983180618286133, 0.040079360961914064, 0.03988889694213867, 0.0400711669921875, 0.040707935333251954, 0.03979280090332031, 0.04052377700805664, 0.04013852691650391, 0.040333534240722654, 0.04024838256835937, 0.04007417678833008, 0.03959388732910156, 0.03974563217163086, 0.03939728164672852, 0.03922748947143555, 0.039167713165283204, 0.03956700897216797, 0.04003084945678711, 0.04037971115112305, 0.04088396835327148, 0.039461822509765626, 0.03957555389404297, 0.039532543182373044, 0.03954012680053711, 0.03951267242431641, 0.03978380966186523, 0.03986236953735352, 0.04015977478027344, 0.039669761657714846, 0.04038787078857422, 0.039884769439697265, 0.03974835205078125, 0.039553024291992187, 0.039855422973632815, 0.03986502456665039, 0.04039680099487305, 0.0401080322265625, 0.039913471221923826, 0.04051388931274414, 0.03989907073974609, 0.040027679443359374, 0.03974211120605469, 0.03958771133422852, 0.04044198226928711, 0.039143009185791014, 0.0390926399230957, 0.03906524658203125, 0.03981142425537109, 0.040471775054931644, 0.03975027084350586, 0.039787967681884764, 0.039860065460205076, 0.039366943359375, 0.039422561645507816, 0.03941497421264648, 0.03914387130737305, 0.03964748764038086, 0.039909568786621094, 0.039819198608398436, 0.03984998321533203, 0.03957727813720703, 0.03926252746582031, 0.03929884719848633, 0.03968022537231446, 0.04427775955200195, 0.04055244827270508, 0.040295841217041016, 0.03981558227539062, 0.039317249298095706, 0.03950636672973633, 0.039400447845458986, 0.039498752593994144, 0.039957630157470704, 0.04195622253417969, 0.040030208587646485, 0.04016742324829101, 0.03986764907836914, 0.03975244903564453, 0.04007321548461914, 0.040066398620605466, 0.03951046371459961, 0.04075337600708008, 0.04113817596435547, 0.0401690559387207, 0.03999337768554687, 0.03981145477294922, 0.03959807968139648, 0.03972892761230469, 0.03963833618164062, 0.03945913696289063, 0.03983529663085938, 0.040088512420654296, 0.04006089782714844, 0.040000896453857425, 0.03964108657836914, 0.03956345748901367, 0.03929340744018555, 0.039521984100341793, 0.03928915023803711, 0.04053606414794922, 0.03992575836181641, 0.04081545639038086, 0.039702144622802735, 0.039809249877929685, 0.03976208114624023, 0.03976396942138672, 0.03995033645629883, 0.039616512298583983, 0.039400894165039065, 0.03958819198608399, 0.04015897750854492, 0.03979232025146484, 0.03993679809570312, 0.03969615936279297, 0.03974371337890625, 0.03995238494873047, 0.03980287933349609, 0.03967350387573242, 0.0405302734375, 0.0402966079711914, 0.039769119262695315, 0.039876705169677736, 0.04000950241088867, 0.04023187255859375, 0.039556480407714846, 0.03974310302734375, 0.039895103454589846, 0.039715648651123044, 0.04028201675415039, 0.04000380706787109, 0.040041599273681644, 0.03962515258789062, 0.03981356811523438, 0.039809024810791016, 0.03986227035522461, 0.039792640686035156, 0.03998921585083008, 0.040819904327392575, 0.04031983947753906, 0.03956505584716797, 0.0396229133605957, 0.040271232604980466, 0.03957980728149414, 0.04064713668823242, 0.0397844467163086, 0.039880702972412106, 0.03996780776977539, 0.039707359313964845, 0.03946723175048828, 0.03917004776000976, 0.0393616943359375, 0.03979910278320312, 0.039798656463623044, 0.03957417678833008, 0.03974956893920899, 0.039548992156982425, 0.0393359375, 0.03914137649536133, 0.03924991989135742, 0.03942755126953125, 0.03970716857910156, 0.039851486206054686, 0.04049359893798828, 0.040022014617919925]",tokens/s,24.90974725468395,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,874.1888,655.294464,0.0,260.046848,258.555392,s,1,7.527779296875,7.527779296875,0.0,7.527779296875,7.527779296875,7.527779296875,7.527779296875,[7.527779296875],,kWh,1.5538597287504293e-05,1.7067869787336107e-06,4.514170277997165e-06,2.175955454423507e-05,,MB,1325.068288,751.763456,0.0,341.835776,317.950464,s,18,0.19514825820922851,0.010841569900512694,0.00033796102924892106,0.010710544109344482,0.011126953697204591,0.011620157051086424,0.011872703132629394,"[0.01093945598602295, 0.010771424293518066, 0.011935839653015137, 0.010841279983520509, 0.010702048301696777, 0.010719039916992187, 0.010690400123596192, 0.011564448356628418, 0.010691391944885253, 0.010722208023071288, 0.010649632453918458, 0.010690239906311036, 0.010890432357788086, 0.010601568222045898, 0.010757984161376953, 0.010633760452270508, 0.010651840209960937, 0.010695263862609864]",tokens/s,23612.81644163857,kWh,3.115932021611639e-07,3.4363449575720296e-08,1.607406716969019e-07,5.066973234337861e-07,tokens/kWh,505232587.8991019,MB,1338.540032,779.026432,0.0,369.098752,317.953024,s,18,9.933052001953124,0.5518362223307292,0.003266523007112156,0.5515629272460938,0.5566544311523438,0.5573135284423828,0.5585013287353516,"[0.5587982788085938, 0.5515595703125, 0.5522174682617188, 0.5484296875, 0.5528018798828125, 0.5511383056640625, 0.553289794921875, 0.5540931396484375, 0.5515662841796874, 0.5504417724609375, 0.54669091796875, 0.5496655883789062, 0.5470558471679687, 0.5498707885742188, 0.5484144287109375, 0.5534824829101562, 0.5564842529296875, 0.557051513671875]",tokens/s,114.16430718142045,kWh,1.5852757085558006e-05,1.7481872435484924e-06,5.955272993040292e-06,2.355621732214679e-05,tokens/kWh,2674453.1661613365,,s,1134,9.924552287101738,0.008751809776985668,0.0003252445187168874,0.008687439918518067,0.008831110382080079,0.009003175783157348,0.010744747161865239,"[0.008410528182983398, 0.008704959869384766, 0.00870809555053711, 0.008684736251831054, 0.008617055892944337, 0.008650176048278809, 0.008689951896667481, 0.008640512466430664, 0.008675104141235351, 0.00862611198425293, 0.008635711669921876, 0.008633024215698242, 0.008689727783203125, 0.008628447532653809, 0.008964096069335938, 0.008736767768859864, 0.009592831611633301, 0.009575551986694335, 0.009444255828857422, 0.009559840202331542, 0.011028672218322754, 0.00880025577545166, 0.008738816261291504, 0.008927231788635253, 0.008736576080322266, 0.008711520195007325, 0.008969056129455566, 0.008691072463989258, 0.00872208023071289, 0.00866198444366455, 0.008670880317687988, 0.008667488098144532, 0.008658944129943847, 0.008759296417236329, 0.008751040458679199, 0.008730688095092774, 0.008720383644104004, 0.00875276756286621, 0.008653183937072755, 0.008721823692321778, 0.008669792175292968, 0.008647999763488769, 0.008652928352355958, 0.008636992454528808, 0.008693663597106934, 0.008814208030700684, 0.009508831977844238, 0.011827199935913087, 0.008761119842529298, 0.008714976310729981, 0.008761216163635253, 0.008652704238891602, 0.008713855743408203, 0.008681599617004394, 0.008720864295959472, 0.008720383644104004, 0.008732319831848145, 0.008982848167419434, 0.008707584381103516, 0.008663328170776367, 0.008683775901794434, 0.00870809555053711, 0.008721792221069335, 0.008603039741516114, 0.008776000022888183, 0.008753439903259278, 0.008742719650268556, 0.008751296043395996, 0.008822943687438965, 0.008744799613952636, 0.008659999847412109, 0.008684543609619141, 0.008664352416992187, 0.008722111701965332, 0.008749600410461425, 0.008677151679992676, 0.00866096019744873, 0.008661151885986328, 0.008780351638793945, 0.008807999610900879, 0.008706496238708497, 0.008681471824645997, 0.00897439956665039, 0.008707136154174804, 0.008711039543151855, 0.0087193603515625, 0.008627200126647949, 0.0086746244430542, 0.00867807960510254, 0.008650752067565918, 0.008802304267883301, 0.008678624153137207, 0.008741472244262695, 0.008755295753479005, 0.008710240364074707, 0.00868070411682129, 0.008758015632629394, 0.008734463691711426, 0.008681728363037109, 0.008689663887023925, 0.008668224334716796, 0.008659296035766602, 0.009030240058898926, 0.008706048011779785, 0.008669183731079102, 0.00881868839263916, 0.008722432136535644, 0.008806400299072266, 0.009395456314086914, 0.009048447608947754, 0.008855936050415038, 0.00883683204650879, 0.0087042875289917, 0.008748224258422851, 0.00862451171875, 0.008656543731689453, 0.008732895851135253, 0.009234047889709473, 0.008680383682250976, 0.008744959831237792, 0.008643744468688964, 0.008688159942626953, 0.008644895553588867, 0.008675104141235351, 0.00866329574584961, 0.008769536018371582, 0.008550463676452636, 0.0087259521484375, 0.008737536430358886, 0.00868729591369629, 0.008735199928283692, 0.008725631713867187, 0.00874790382385254, 0.008679424285888672, 0.008654656410217286, 0.008818880081176757, 0.00866006374359131, 0.008743840217590332, 0.008634367942810058, 0.008631744384765626, 0.008622655868530274, 0.008643903732299806, 0.008633024215698242, 0.00865875244140625, 0.008769248008728027, 0.008633983612060548, 0.008735584259033204, 0.008654848098754882, 0.008644288063049316, 0.008679327964782714, 0.008689760208129883, 0.00866540813446045, 0.008644576072692872, 0.008615967750549316, 0.008599552154541015, 0.008660991668701172, 0.008806400299072266, 0.008935423851013183, 0.00873846435546875, 0.008763744354248046, 0.008689663887023925, 0.008898048400878907, 0.011575776100158692, 0.009235712051391601, 0.008720319747924804, 0.008779871940612792, 0.008719103813171387, 0.00871833610534668, 0.008669183731079102, 0.008771391868591308, 0.008683327674865723, 0.008687999725341797, 0.00871628761291504, 0.008673279762268067, 0.00864787197113037, 0.008708928108215332, 0.008816160202026368, 0.008691455841064453, 0.008749792098999024, 0.008720383644104004, 0.008758463859558106, 0.008842047691345214, 0.00873862361907959, 0.008675519943237304, 0.00870809555053711, 0.008660191535949708, 0.008661791801452637, 0.008638463973999023, 0.008660991668701172, 0.008565664291381836, 0.008704000473022461, 0.008828927993774414, 0.008704000473022461, 0.008669183731079102, 0.008689663887023925, 0.00862822437286377, 0.008651935577392578, 0.008610655784606933, 0.00861184024810791, 0.008667136192321777, 0.008665087699890137, 0.008624032020568847, 0.009020768165588378, 0.008626912117004394, 0.008637855529785157, 0.008649344444274903, 0.00866703987121582, 0.008617343902587891, 0.008707839965820313, 0.008645600318908691, 0.00859340763092041, 0.008667136192321777, 0.0086179838180542, 0.008751104354858399, 0.008948927879333496, 0.008657183647155762, 0.008600064277648926, 0.008654208183288574, 0.008611680030822754, 0.008667807579040527, 0.009181344032287597, 0.008695808410644532, 0.008624128341674805, 0.008713536262512207, 0.008602304458618164, 0.008790016174316406, 0.008629792213439942, 0.008622079849243165, 0.00868556785583496, 0.008618080139160156, 0.00862451171875, 0.008685407638549804, 0.008612000465393066, 0.008639616012573242, 0.009003487586975098, 0.008678848266601563, 0.008655360221862793, 0.008827360153198242, 0.008787967681884766, 0.008769472122192383, 0.008738880157470702, 0.00872652816772461, 0.008728063583374024, 0.008671008110046386, 0.008665823936462402, 0.008615712165832519, 0.008705792427062988, 0.008679840087890625, 0.008978495597839356, 0.008747008323669434, 0.008638463973999023, 0.008663040161132812, 0.008397439956665038, 0.008652223587036132, 0.008642175674438476, 0.008737728118896485, 0.008840895652770997, 0.008790335655212402, 0.008671232223510742, 0.008790016174316406, 0.008761343955993652, 0.008689663887023925, 0.008712191581726075, 0.008720383644104004, 0.008705471992492676, 0.008665663719177246, 0.008683135986328125, 0.00864089584350586, 0.008667136192321777, 0.00870736026763916, 0.008677568435668945, 0.008574624061584473, 0.008674176216125488, 0.00862822437286377, 0.008619039535522462, 0.008809408187866212, 0.008736800193786622, 0.01102569580078125, 0.010760640144348145, 0.008789567947387696, 0.008778464317321778, 0.008930399894714355, 0.008815520286560059, 0.008742752075195313, 0.008671392440795898, 0.008683520317077637, 0.008675328254699707, 0.008665087699890137, 0.008744959831237792, 0.008674752235412598, 0.008605631828308105, 0.008612480163574219, 0.008664896011352539, 0.008831168174743652, 0.008818271636962891, 0.008817055702209472, 0.008714271545410155, 0.008603615760803223, 0.008769184112548829, 0.00867363166809082, 0.008631584167480468, 0.00862822437286377, 0.00868230438232422, 0.008705183982849122, 0.008835935592651367, 0.008695743560791015, 0.008617024421691894, 0.008717344284057618, 0.00865398406982422, 0.008665184020996093, 0.008648736000061035, 0.008692064285278321, 0.008620287895202637, 0.008664511680603027, 0.008626751899719239, 0.008533856391906739, 0.008638015747070312, 0.008674912452697754, 0.008698240280151367, 0.008710783958435058, 0.008754688262939453, 0.008650495529174804, 0.008651424407958984, 0.008674400329589844, 0.008764191627502442, 0.00882096004486084, 0.008935327529907227, 0.008781696319580078, 0.008741087913513183, 0.008830975532531739, 0.008703488349914551, 0.008729087829589843, 0.009273344039916993, 0.008756832122802734, 0.008765503883361817, 0.008792415618896484, 0.008676863670349122, 0.008695679664611817, 0.008716927528381348, 0.008768511772155761, 0.008690624237060547, 0.008673343658447265, 0.008630240440368652, 0.00865283203125, 0.008632320404052735, 0.008695808410644532, 0.008648192405700684, 0.008686016082763672, 0.008575039863586426, 0.008695167541503906, 0.008757887840270996, 0.008674304008483886, 0.008653504371643066, 0.008685888290405274, 0.008690752029418946, 0.008649503707885742, 0.008673439979553223, 0.008726304054260253, 0.008624064445495605, 0.008642848014831544, 0.008670623779296876, 0.008614208221435547, 0.008548640251159667, 0.008630271911621093, 0.008667136192321777, 0.008632320404052735, 0.008833024024963379, 0.00970751953125, 0.009828351974487304, 0.008673279762268067, 0.008683520317077637, 0.008853504180908203, 0.008689663887023925, 0.008625247955322265, 0.008612768173217773, 0.008864895820617676, 0.008700192451477051, 0.00872822380065918, 0.00843558406829834, 0.00876966381072998, 0.008711615562438964, 0.008778112411499023, 0.00866323184967041, 0.008644319534301758, 0.008671520233154296, 0.008712191581726075, 0.00875046443939209, 0.008737279891967773, 0.008740991592407226, 0.008642560005187988, 0.008883584022521972, 0.008657535552978516, 0.009713120460510254, 0.01159222412109375, 0.008740863800048827, 0.008828031539916991, 0.008681471824645997, 0.008671839714050293, 0.008648799896240235, 0.008616127967834473, 0.0086179838180542, 0.00862992000579834, 0.008675040245056153, 0.008642880439758301, 0.00864083194732666, 0.00872447967529297, 0.008638463973999023, 0.0086179838180542, 0.008792032241821289, 0.008695263862609864, 0.008682047843933106, 0.008812543869018554, 0.008713727951049804, 0.008790528297424317, 0.008861696243286133, 0.008673279762268067, 0.00872447967529297, 0.00869705581665039, 0.008729375839233399, 0.008880127906799316, 0.008801440238952636, 0.008843520164489745, 0.008741472244262695, 0.00870576000213623, 0.008701408386230469, 0.008698687553405762, 0.008750176429748536, 0.008725055694580078, 0.008718272209167481, 0.008749471664428712, 0.00867251205444336, 0.00867199993133545, 0.008785920143127441, 0.008736800193786622, 0.008709407806396485, 0.008743616104125976, 0.008771583557128907, 0.008642560005187988, 0.008769536018371582, 0.008673279762268067, 0.008681023597717285, 0.010270719528198241, 0.009801280021667481, 0.00894159984588623, 0.009328767776489258, 0.009259296417236327, 0.009842687606811524, 0.00870809555053711, 0.008674719810485839, 0.008651359558105469, 0.00873193645477295, 0.008659680366516114, 0.008605600357055664, 0.008627296447753906, 0.008645631790161134, 0.008630271911621093, 0.008709247589111329, 0.008729472160339355, 0.008665087699890137, 0.008634048461914063, 0.00877952003479004, 0.008706591606140136, 0.009287712097167968, 0.009011199951171875, 0.00874015998840332, 0.00876204776763916, 0.008787967681884766, 0.008761119842529298, 0.00899443244934082, 0.008749664306640625, 0.008787487983703613, 0.008800736427307129, 0.008746272087097167, 0.008686304092407227, 0.008697855949401855, 0.008656895637512207, 0.008615936279296875, 0.00868556785583496, 0.008632320404052735, 0.008607487678527832, 0.00863644790649414, 0.008652159690856934, 0.008606559753417968, 0.008754816055297852, 0.008777536392211915, 0.008606271743774414, 0.008578463554382324, 0.008637056350708008, 0.008666080474853516, 0.008706336021423339, 0.008714847564697266, 0.00879967975616455, 0.008630975723266602, 0.008677375793457032, 0.008650752067565918, 0.008664959907531739, 0.008693920135498048, 0.00864019203186035, 0.008632608413696289, 0.008606752395629884, 0.008647647857666016, 0.008699263572692871, 0.008661631584167481, 0.008683520317077637, 0.00841932773590088, 0.008814304351806641, 0.008677663803100586, 0.010979328155517578, 0.010343808174133301, 0.008735072135925294, 0.008696096420288086, 0.008650752067565918, 0.008642560005187988, 0.00874015998840332, 0.008747072219848632, 0.008690303802490234, 0.008742912292480469, 0.00868556785583496, 0.008699040412902832, 0.008700063705444336, 0.008761055946350097, 0.008682463645935059, 0.008622079849243165, 0.008750176429748536, 0.008753151893615722, 0.008840096473693848, 0.008642560005187988, 0.008627936363220215, 0.00863599967956543, 0.008628928184509277, 0.00868556785583496, 0.008728096008300781, 0.008712672233581542, 0.008621184349060059, 0.00864083194732666, 0.008614463806152344, 0.008633440017700195, 0.00868006420135498, 0.008664480209350586, 0.008571776390075683, 0.008707327842712402, 0.008563296318054199, 0.008601632118225098, 0.00867136001586914, 0.008671232223510742, 0.00865449619293213, 0.008650912284851073, 0.00875334358215332, 0.008632320404052735, 0.008783871650695801, 0.00859545612335205, 0.008641632080078124, 0.008704416275024414, 0.008650655746459962, 0.008643168449401856, 0.00862822437286377, 0.008912896156311035, 0.008679424285888672, 0.00876527976989746, 0.008773823738098145, 0.008632287979125977, 0.008690752029418946, 0.008723391532897948, 0.008695808410644532, 0.008736127853393555, 0.008713919639587403, 0.008749695777893066, 0.008411007881164551, 0.008696031570434571, 0.0087575044631958, 0.008655263900756835, 0.008660927772521972, 0.008734527587890626, 0.008867391586303711, 0.00866374397277832, 0.008768959999084473, 0.008673855781555176, 0.008654848098754882, 0.008667136192321777, 0.008756640434265137, 0.008655232429504394, 0.00863366413116455, 0.008663968086242676, 0.008623456001281738, 0.008589983940124512, 0.00869375991821289, 0.008652799606323243, 0.008664287567138673, 0.008610400199890136, 0.008710335731506348, 0.00860159969329834, 0.008632320404052735, 0.008757247924804687, 0.008589311599731446, 0.00861184024810791, 0.008615263938903809, 0.008665568351745605, 0.008620223999023437, 0.008681471824645997, 0.008613887786865235, 0.008584544181823731, 0.00857699203491211, 0.008616640090942382, 0.008652416229248047, 0.00883743953704834, 0.008807488441467285, 0.008690688133239746, 0.008635456085205079, 0.008638912200927735, 0.008657407760620118, 0.008679264068603516, 0.008670432090759277, 0.00861075210571289, 0.008654303550720215, 0.008634911537170411, 0.008777376174926757, 0.008652735710144042, 0.008659135818481446, 0.008630496025085449, 0.008656895637512207, 0.008615936279296875, 0.008615936279296875, 0.008851455688476563, 0.008662079811096192, 0.009878463745117187, 0.01113049602508545, 0.008735103607177734, 0.008691712379455567, 0.008763392448425293, 0.0088570556640625, 0.008449728012084962, 0.008680031776428223, 0.008659104347229003, 0.008761343955993652, 0.008624128341674805, 0.008693216323852539, 0.00864691162109375, 0.008702239990234374, 0.008636320114135742, 0.008605792045593261, 0.00861184024810791, 0.008644543647766113, 0.008675392150878907, 0.008627679824829102, 0.008607423782348633, 0.008632224082946777, 0.00862713623046875, 0.008622079849243165, 0.008675328254699707, 0.008704000473022461, 0.008681471824645997, 0.008693120002746583, 0.008691935539245606, 0.008747424125671387, 0.008642080307006837, 0.008626655578613281, 0.00862169647216797, 0.008589471817016602, 0.008646880149841309, 0.008656160354614258, 0.008849696159362793, 0.008720831871032715, 0.008780960083007813, 0.008737536430358886, 0.00868553638458252, 0.008715935707092285, 0.008636223793029784, 0.00862070369720459, 0.008605695724487305, 0.008773632049560547, 0.008623871803283691, 0.008808064460754394, 0.008734527587890626, 0.008600383758544923, 0.00858726406097412, 0.00858460807800293, 0.008611455917358398, 0.008698847770690918, 0.008583168029785156, 0.008640512466430664, 0.00860700798034668, 0.008727264404296876, 0.008644607543945313, 0.008704000473022461, 0.00865884780883789, 0.00860153579711914, 0.008702112197875976, 0.008603520393371582, 0.008685695648193359, 0.00920576000213623, 0.008722432136535644, 0.008672896385192872, 0.00862451171875, 0.008382752418518066, 0.008638463973999023, 0.008623295783996583, 0.008736831665039062, 0.008596192359924317, 0.008610943794250488, 0.008597439765930175, 0.008637408256530761, 0.00869375991821289, 0.008719807624816894, 0.0086943359375, 0.008623935699462891, 0.008638655662536621, 0.008589311599731446, 0.0086179838180542, 0.008730624198913574, 0.008774687767028808, 0.008655839920043946, 0.008588895797729493, 0.008612256050109863, 0.008707712173461914, 0.008665375709533692, 0.008648863792419434, 0.008664640426635742, 0.008595840454101562, 0.008744192123413087, 0.00865328025817871, 0.008691360473632812, 0.008677727699279785, 0.008642848014831544, 0.008642560005187988, 0.008652192115783691, 0.00862224006652832, 0.008630208015441895, 0.00870246410369873, 0.008730624198913574, 0.008658944129943847, 0.008677375793457032, 0.008634367942810058, 0.008652799606323243, 0.008644607543945313, 0.0086364164352417, 0.008589311599731446, 0.00860364818572998, 0.008574624061584473, 0.008721920013427734, 0.008711008071899414, 0.009003007888793945, 0.011773632049560548, 0.009081151962280274, 0.008738816261291504, 0.00871833610534668, 0.008773632049560547, 0.008656895637512207, 0.00870809555053711, 0.008646464347839356, 0.008622271537780762, 0.0086113920211792, 0.008751551628112792, 0.008674719810485839, 0.008615839958190917, 0.008643263816833497, 0.008642560005187988, 0.008503199577331543, 0.0086757755279541, 0.008736736297607422, 0.008742400169372559, 0.008670975685119629, 0.008659744262695312, 0.008597215652465821, 0.00859945583343506, 0.008679807662963867, 0.008656895637512207, 0.00862822437286377, 0.00859340763092041, 0.008617376327514649, 0.008704607963562011, 0.008646559715270997, 0.00862217617034912, 0.008809696197509766, 0.008631072044372558, 0.008705408096313476, 0.008639039993286133, 0.00879526424407959, 0.008641471862792969, 0.008615936279296875, 0.008599552154541015, 0.008652000427246094, 0.008846112251281738, 0.008671232223510742, 0.008662176132202149, 0.008737567901611329, 0.008667200088500977, 0.008601471900939942, 0.008627679824829102, 0.008735391616821289, 0.008703807830810546, 0.008707839965820313, 0.008609439849853516, 0.00860649585723877, 0.00862003231048584, 0.0087326717376709, 0.008719455718994141, 0.008618047714233398, 0.00859222412109375, 0.008697855949401855, 0.008984576225280762, 0.00892518424987793, 0.008756511688232422, 0.008690400123596191, 0.008691712379455567, 0.008650752067565918, 0.008696895599365234, 0.00866812801361084, 0.008687583923339844, 0.008624128341674805, 0.008594911575317382, 0.008610336303710938, 0.008704000473022461, 0.008646656036376953, 0.008687616348266602, 0.008796159744262694, 0.00862822437286377, 0.00862822437286377, 0.008663040161132812, 0.008687616348266602, 0.008454815864562988, 0.008647999763488769, 0.008633024215698242, 0.008710080146789551, 0.008990176200866699, 0.008708703994750976, 0.008658944129943847, 0.008650752067565918, 0.008667136192321777, 0.008613887786865235, 0.008677056312561035, 0.008811840057373047, 0.008704319953918457, 0.008647359848022462, 0.008684736251831054, 0.00860857582092285, 0.008630271911621093, 0.008691712379455567, 0.008626175880432128, 0.008646656036376953, 0.008621408462524413, 0.008704671859741211, 0.008607744216918945, 0.008650752067565918, 0.008568832397460938, 0.008617247581481934, 0.008638976097106933, 0.008628447532653809, 0.008625568389892578, 0.008790623664855958, 0.008703743934631348, 0.008652031898498535, 0.008619008064270019, 0.00862822437286377, 0.008605695724487305, 0.00880844783782959, 0.00873408031463623, 0.009247360229492187, 0.01175551986694336, 0.00871833610534668, 0.008676639556884766, 0.008720607757568359, 0.008816448211669922, 0.008676032066345214, 0.008666303634643555, 0.008677375793457032, 0.008616415977478028, 0.008640864372253417, 0.008650752067565918, 0.008699904441833496, 0.008617856025695802, 0.008619296073913573, 0.008586079597473144, 0.00859340763092041, 0.008665087699890137, 0.008644415855407714, 0.008577216148376464, 0.00867024040222168, 0.008614879608154297, 0.00861184024810791, 0.008673215866088867, 0.008640576362609863, 0.008671232223510742, 0.008470080375671386, 0.008636704444885254, 0.008687968254089356, 0.008746944427490234, 0.008859744071960449, 0.008699904441833496, 0.008654656410217286, 0.008679167747497558, 0.008657343864440918, 0.008650239944458007, 0.008769472122192383, 0.008663616180419921, 0.008671232223510742, 0.008671232223510742, 0.008871392250061035, 0.008736448287963867, 0.008815456390380859, 0.008710080146789551, 0.008691776275634765, 0.0087010555267334, 0.008713088035583495, 0.008734463691711426, 0.008802559852600098, 0.008632320404052735, 0.00863593578338623, 0.008663519859313965, 0.008679424285888672, 0.008749055862426757, 0.008761343955993652, 0.008701184272766112, 0.00866585636138916, 0.008910847663879394, 0.00894976043701172, 0.008689663887023925, 0.008654560089111328, 0.008634592056274414, 0.008624159812927246, 0.008678879737854004, 0.008708383560180664, 0.008648991584777832, 0.00863599967956543, 0.008706463813781738, 0.008632320404052735, 0.008589311599731446, 0.008699904441833496, 0.008708064079284667, 0.008642592430114747, 0.008656895637512207, 0.008671232223510742, 0.008714271545410155, 0.008695743560791015, 0.008687647819519043, 0.008675328254699707, 0.0086364164352417, 0.008642560005187988, 0.008734720230102539, 0.008701312065124511, 0.008671808242797851, 0.008814111709594726, 0.008612159729003907, 0.008685215950012207, 0.00868000030517578, 0.008767168045043945, 0.008486911773681641, 0.008734720230102539, 0.008697855949401855, 0.008671232223510742, 0.008712191581726075, 0.008683520317077637, 0.008691712379455567, 0.008740863800048827, 0.008697855949401855, 0.008609248161315917, 0.008624575614929198, 0.008648799896240235, 0.00867091178894043, 0.008681247711181641, 0.00871887969970703, 0.008678976058959962, 0.00868396759033203, 0.008660991668701172, 0.008683520317077637, 0.008677375793457032, 0.008599552154541015, 0.008650015830993653, 0.00872713565826416, 0.008636544227600098, 0.008687616348266602, 0.008894463539123536, 0.008759103775024413, 0.011014335632324219, 0.010712479591369629, 0.00892579174041748, 0.00880947208404541, 0.008778752326965332, 0.00872447967529297, 0.00881385612487793, 0.008746815681457519, 0.00880732822418213, 0.008701151847839355, 0.008760095596313477, 0.008646656036376953, 0.008860735893249512, 0.008731583595275878, 0.008686911582946778, 0.008716992378234863, 0.008671232223510742, 0.008773632049560547, 0.0086909761428833, 0.008729056358337402, 0.008681728363037109, 0.00860159969329834, 0.00861184024810791, 0.008695808410644532, 0.008656703948974609, 0.0086179838180542, 0.008674783706665039, 0.00883785629272461, 0.008736063957214355, 0.008782336235046387, 0.008732864379882813, 0.00872985553741455, 0.008659711837768555, 0.00864031982421875, 0.008696000099182128, 0.008947263717651368, 0.008737279891967773, 0.008682911872863769, 0.00872447967529297, 0.008794719696044923, 0.00875887966156006, 0.008790431976318359, 0.008749055862426757, 0.008740511894226075, 0.008720735549926757, 0.008761343955993652, 0.008790016174316406, 0.008675040245056153, 0.008740192413330078, 0.00864352035522461, 0.008796159744262694, 0.00902143955230713, 0.008756704330444336, 0.008933759689331055, 0.008790176391601562, 0.00872652816772461, 0.008760319709777833, 0.008749343872070313, 0.008766176223754883, 0.008802304267883301, 0.008734720230102539, 0.008728575706481934, 0.008767264366149902, 0.008699295997619629, 0.008694592475891114, 0.008704000473022461, 0.008828927993774414, 0.008826272010803222, 0.00871894359588623, 0.00871014404296875, 0.008706111907958985, 0.008672991752624511, 0.008695903778076173, 0.008667263984680176, 0.008650431632995606, 0.008624064445495605, 0.008670944213867188, 0.008757951736450196, 0.008764639854431152, 0.00867199993133545, 0.008796159744262694, 0.008857600212097168, 0.008812543869018554, 0.009191424369812011, 0.00911081600189209, 0.008905376434326172, 0.008755264282226562, 0.008748736381530762, 0.00868937587738037, 0.008684127807617188, 0.008732192039489746, 0.009120223999023437, 0.009965567588806153, 0.010010623931884765, 0.009639936447143555, 0.008843263626098634, 0.008804351806640624, 0.008798208236694336, 0.008799296379089355, 0.00840176010131836, 0.00872652816772461, 0.008724448204040527, 0.008697376251220704, 0.009023008346557617, 0.008861727714538574, 0.00891744041442871, 0.009011712074279785, 0.008828191757202149, 0.009063136100769044, 0.009390080451965332, 0.009346816062927246, 0.009027839660644531, 0.00900879955291748, 0.008747008323669434, 0.009939295768737793, 0.011491328239440919, 0.008796159744262694, 0.008741888046264648, 0.00872755241394043, 0.00872447967529297, 0.008707615852355957, 0.00872492790222168, 0.0089334077835083, 0.008904576301574707, 0.008796256065368652, 0.008849120140075684, 0.008710463523864747, 0.008712191581726075, 0.008753151893615722, 0.008689663887023925, 0.008654848098754882, 0.00866220760345459, 0.00862825584411621, 0.008644960403442382, 0.00869215965270996, 0.008695136070251465, 0.008651424407958984, 0.008711872100830078, 0.008675583839416504, 0.008720447540283203, 0.008744864463806153, 0.008689760208129883, 0.008732288360595704, 0.008655232429504394, 0.008621760368347168, 0.008691455841064453, 0.008790047645568847, 0.008641056060791016, 0.008828927993774414, 0.008756511688232422, 0.008960448265075683, 0.008759455680847168, 0.008785375595092773, 0.008706720352172851, 0.008685407638549804, 0.008671392440795898, 0.008758848190307617, 0.008673376083374023, 0.008640864372253417, 0.008658623695373536, 0.008722399711608887, 0.008636575698852539]",tokens/s,114.26208127028372,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4293.541888,4878.958592,0.0,4483.710976,4465.672704,s,1,10.53728125,10.53728125,0.0,10.53728125,10.53728125,10.53728125,10.53728125,[10.53728125],,kWh,0.00010265646206668559,1.1316277597974052e-05,3.244585928999927e-05,0.00014641859895465892,,MB,2154.92608,5302.583296,0.0,4892.655616,4837.669376,s,10,1.806138107299805,0.18061381072998048,0.0003293052221499723,0.18058362579345705,0.18102312774658205,0.18117773971557619,0.1813014292907715,"[0.1805165710449219, 0.18033718872070312, 0.18065068054199218, 0.18019810485839843, 0.18073670959472657, 0.18069020080566406, 0.18098876953125, 0.18032659912109375, 0.18133235168457032, 0.18036093139648438]",tokens/s,1417.3888417797832,kWh,5.292656321875218e-06,5.83684918681336e-07,3.519010751714318e-06,9.395351992270872e-06,tokens/kWh,27247515.60245955,MB,2154.92608,5470.355456,0.0,5060.427776,5014.227968,s,10,15.812712036132812,1.5812712036132812,0.002739175781028118,1.5802742309570315,1.58424609375,1.5859874267578125,1.5873804931640627,"[1.577431884765625, 1.5804415283203126, 1.5797474365234374, 1.579446044921875, 1.5827811279296875, 1.58010693359375, 1.579886962890625, 1.583859130859375, 1.5812822265625, 1.587728759765625]",tokens/s,39.841362984440586,kWh,4.623907491687399e-05,5.0998846059126645e-06,3.0630627679086255e-05,8.19695872018729e-05,tokens/kWh,768577.739019778,,s,630,15.809436853408794,0.02509434421176002,0.0002683312272408559,0.025091216087341307,0.02539562587738037,0.02544892520904541,0.02562079782485962,"[0.02498044776916504, 0.024815391540527344, 0.024776927947998045, 0.024805376052856445, 0.024788991928100586, 0.024788991928100586, 0.02476348876953125, 0.024691295623779298, 0.02475449562072754, 0.02478489685058594, 0.024793088912963866, 0.024864767074584963, 0.024766464233398438, 0.0247459831237793, 0.024763839721679688, 0.024744384765625, 0.02489971160888672, 0.024831455230712892, 0.02490787124633789, 0.024795583724975586, 0.02489139175415039, 0.02500320053100586, 0.024898368835449217, 0.02482585525512695, 0.024792160034179687, 0.024869695663452148, 0.024821792602539063, 0.02477987289428711, 0.024884191513061524, 0.025010175704956054, 0.02507980728149414, 0.025064672470092773, 0.024938623428344728, 0.024996511459350584, 0.0251146240234375, 0.02512076759338379, 0.02505881690979004, 0.02513699150085449, 0.025188735961914063, 0.025165727615356445, 0.025176000595092774, 0.025184703826904298, 0.02513491249084473, 0.025129152297973634, 0.025194496154785157, 0.02534809684753418, 0.025310367584228517, 0.025208736419677736, 0.025102848052978514, 0.025165695190429688, 0.02533228874206543, 0.025389055252075195, 0.02526348876953125, 0.02523744010925293, 0.025434656143188475, 0.025280672073364256, 0.025202400207519533, 0.02540777587890625, 0.02535798454284668, 0.02527471923828125, 0.025361984252929688, 0.025620927810668947, 0.025276416778564452, 0.02510304069519043, 0.024698879241943358, 0.024653823852539062, 0.024667999267578126, 0.024707231521606445, 0.02466307258605957, 0.024621536254882812, 0.02473958396911621, 0.024779520034790038, 0.024803327560424804, 0.024825151443481446, 0.0247459831237793, 0.024807231903076172, 0.024838336944580076, 0.024850751876831053, 0.02490310478210449, 0.024869823455810548, 0.02487868881225586, 0.02479052734375, 0.024837024688720705, 0.02485043144226074, 0.024985599517822265, 0.02507776069641113, 0.025175615310668944, 0.025071231842041016, 0.02494041633605957, 0.024984159469604493, 0.02505353546142578, 0.025010175704956054, 0.025037055969238282, 0.025005151748657226, 0.02500204849243164, 0.02509020805358887, 0.025090496063232423, 0.02507776069641113, 0.025009727478027342, 0.0251396484375, 0.025190528869628907, 0.025227136611938476, 0.025161279678344726, 0.02514784049987793, 0.025153568267822266, 0.025163743972778322, 0.0252620792388916, 0.025384735107421875, 0.025440256118774415, 0.025311103820800783, 0.025268575668334962, 0.025337984085083008, 0.025268096923828125, 0.025223167419433593, 0.02549660873413086, 0.025320415496826173, 0.025372671127319335, 0.02544451141357422, 0.025354080200195313, 0.025376640319824218, 0.025380992889404298, 0.02560166358947754, 0.025423776626586913, 0.025455072402954103, 0.025442304611206053, 0.02549964714050293, 0.025324640274047853, 0.024912799835205078, 0.02489263916015625, 0.024666080474853514, 0.02470569610595703, 0.024719520568847655, 0.024807424545288087, 0.024729183197021484, 0.024676767349243164, 0.024769792556762694, 0.024742687225341797, 0.02475004768371582, 0.02478656005859375, 0.024909568786621095, 0.02491823959350586, 0.02493020820617676, 0.024833728790283203, 0.02486172866821289, 0.024846111297607422, 0.024788991928100586, 0.024795007705688477, 0.024967296600341797, 0.02500998306274414, 0.024968992233276366, 0.025135583877563476, 0.0250467529296875, 0.025036863327026367, 0.02502876853942871, 0.025028032302856447, 0.025047615051269533, 0.024993791580200195, 0.02495078468322754, 0.024993791580200195, 0.025069568634033205, 0.0251267204284668, 0.025097728729248047, 0.02506585693359375, 0.025198911666870116, 0.025208831787109375, 0.025210880279541017, 0.0252620792388916, 0.025167871475219726, 0.025531999588012694, 0.025235904693603515, 0.025192415237426758, 0.02517318344116211, 0.025172128677368164, 0.025209503173828127, 0.025118719100952147, 0.025312543869018555, 0.025449184417724608, 0.02535424041748047, 0.02528665542602539, 0.025351743698120117, 0.025340351104736328, 0.025362432479858397, 0.025332735061645507, 0.025278783798217772, 0.025374656677246095, 0.025445119857788086, 0.02533718490600586, 0.025295488357543944, 0.025346080780029298, 0.025178176879882812, 0.024930303573608398, 0.024844287872314453, 0.024825983047485352, 0.02469811248779297, 0.024636032104492188, 0.024805280685424806, 0.02510771179199219, 0.02479705619812012, 0.024687583923339845, 0.02479248046875, 0.024785503387451172, 0.025139104843139647, 0.024799327850341796, 0.02468659210205078, 0.0247459831237793, 0.024796703338623046, 0.024844608306884765, 0.024849952697753905, 0.024760480880737304, 0.02491628837585449, 0.024985759735107423, 0.02489331245422363, 0.024848512649536133, 0.02494588851928711, 0.024965919494628907, 0.024977407455444335, 0.02499939155578613, 0.025013856887817383, 0.02504390335083008, 0.025028831481933595, 0.024956703186035156, 0.02507776069641113, 0.025109535217285157, 0.025099231719970704, 0.025052480697631836, 0.025213632583618164, 0.02529484748840332, 0.025169919967651368, 0.025100288391113282, 0.025126911163330077, 0.025157632827758788, 0.02509404754638672, 0.025058496475219728, 0.025397184371948243, 0.025332799911499025, 0.02526812744140625, 0.025391040802001955, 0.025362495422363282, 0.0253308162689209, 0.025318271636962892, 0.025302751541137695, 0.02533945655822754, 0.025350879669189454, 0.025266176223754884, 0.025251840591430662, 0.02533782386779785, 0.025269952774047852, 0.025203039169311523, 0.025335264205932618, 0.02539369583129883, 0.025415679931640626, 0.02547203254699707, 0.025045759201049806, 0.024967424392700194, 0.02495871925354004, 0.024827903747558593, 0.024743871688842775, 0.02476608085632324, 0.027119232177734376, 0.0246812801361084, 0.024653728485107423, 0.02470307159423828, 0.02476144027709961, 0.024716127395629884, 0.02484230422973633, 0.02488319969177246, 0.024936447143554686, 0.02490572738647461, 0.02484841537475586, 0.02486182403564453, 0.02511296081542969, 0.025000415802001952, 0.024921184539794923, 0.024808351516723632, 0.024840320587158203, 0.02491200065612793, 0.02560111999511719, 0.024847007751464843, 0.02503232002258301, 0.025024223327636718, 0.02499456024169922, 0.02503593635559082, 0.025055999755859374, 0.02509414482116699, 0.02509574317932129, 0.025039295196533203, 0.025104255676269532, 0.025061023712158202, 0.02504934310913086, 0.02506729507446289, 0.025154016494750978, 0.025237472534179687, 0.025378816604614256, 0.0254486083984375, 0.025304927825927734, 0.025290752410888673, 0.02534809684753418, 0.025197568893432616, 0.025177087783813477, 0.025290048599243165, 0.025232063293457032, 0.025195968627929687, 0.025215551376342772, 0.0252126407623291, 0.025223455429077148, 0.025286848068237305, 0.025458303451538086, 0.025409215927124022, 0.025389440536499025, 0.025348127365112303, 0.025344095230102538, 0.025358335494995117, 0.025341920852661133, 0.025315359115600587, 0.025436159133911132, 0.025061952590942384, 0.02478060722351074, 0.02474015998840332, 0.024678047180175782, 0.024641759872436525, 0.024778816223144533, 0.024788351058959962, 0.024793855667114256, 0.02484000015258789, 0.024817663192749022, 0.024774560928344725, 0.02493552017211914, 0.02495795249938965, 0.024987648010253907, 0.02488934326171875, 0.024784383773803712, 0.02478665542602539, 0.024848480224609375, 0.024844991683959962, 0.024883359909057618, 0.024905759811401366, 0.024912864685058593, 0.02499875259399414, 0.024932512283325194, 0.024907455444335938, 0.024893760681152344, 0.025005184173583984, 0.02493881607055664, 0.025050880432128907, 0.02512348747253418, 0.025192287445068358, 0.02564534378051758, 0.025096063613891603, 0.025159679412841796, 0.02513100814819336, 0.025185823440551758, 0.025112607955932616, 0.02508956718444824, 0.02507254409790039, 0.025163743972778322, 0.025226848602294922, 0.02521513557434082, 0.02520297622680664, 0.025233407974243165, 0.02526380729675293, 0.02518252754211426, 0.025157632827758788, 0.025358335494995117, 0.025350208282470702, 0.025362464904785158, 0.025397151947021485, 0.02528678321838379, 0.02532748794555664, 0.025310783386230468, 0.025259647369384765, 0.025212959289550783, 0.02533184051513672, 0.025376800537109376, 0.025334272384643555, 0.02541360092163086, 0.025335968017578123, 0.025283967971801758, 0.025291391372680664, 0.024946720123291015, 0.02489792060852051, 0.024862720489501954, 0.024766464233398438, 0.024737247467041014, 0.02477519989013672, 0.024879104614257814, 0.0247172794342041, 0.024676383972167967, 0.024774656295776368, 0.024780799865722656, 0.024877056121826172, 0.024766464233398438, 0.02482585525512695, 0.024840415954589842, 0.024856351852416993, 0.0248702392578125, 0.02487772750854492, 0.02487295913696289, 0.02487071990966797, 0.024885440826416017, 0.02486832046508789, 0.024857120513916017, 0.02484646415710449, 0.024843231201171875, 0.024884128570556642, 0.025004032135009766, 0.024990751266479493, 0.02498771286010742, 0.025121248245239258, 0.02515011215209961, 0.02512067222595215, 0.025097536087036132, 0.02515551948547363, 0.025117311477661133, 0.025124128341674806, 0.0251759033203125, 0.025057600021362304, 0.02514579200744629, 0.025157760620117188, 0.025217023849487305, 0.025286367416381836, 0.025272607803344727, 0.025296287536621095, 0.02518671989440918, 0.025202880859375, 0.025280511856079102, 0.02527027130126953, 0.025218784332275392, 0.02524188804626465, 0.025322656631469726, 0.02536944007873535, 0.025332927703857422, 0.025400032043457033, 0.02541904067993164, 0.025489919662475585, 0.025479488372802735, 0.02539107131958008, 0.02543577575683594, 0.02540550422668457, 0.02530544090270996, 0.02535024070739746, 0.025327232360839842, 0.025350143432617187, 0.025231359481811523, 0.024922111511230468, 0.0248668155670166, 0.024771903991699217, 0.024724288940429686, 0.024729280471801757, 0.024785087585449218, 0.024769599914550782, 0.02481155204772949, 0.02489641571044922, 0.02478214454650879, 0.024769216537475585, 0.024805599212646485, 0.024812383651733397, 0.025000768661499022, 0.024940671920776366, 0.02498150444030762, 0.02495689582824707, 0.02501024055480957, 0.02505289649963379, 0.025000192642211913, 0.025026464462280275, 0.02510857582092285, 0.025062688827514647, 0.02500681686401367, 0.025026559829711914, 0.025040319442749023, 0.025036800384521486, 0.025024799346923827, 0.02495471954345703, 0.024924192428588867, 0.025049503326416016, 0.025115999221801757, 0.025189023971557617, 0.02507776069641113, 0.02513920021057129, 0.025124448776245117, 0.02516009521484375, 0.025161727905273438, 0.02518534469604492, 0.02521340751647949, 0.025237823486328127, 0.025189727783203126, 0.025966400146484374, 0.025248607635498046, 0.02521481513977051, 0.025506175994873048, 0.025456575393676757, 0.025458112716674804, 0.026091808319091796, 0.02537436866760254, 0.025410175323486328, 0.02536857604980469, 0.025267200469970705, 0.025291648864746094, 0.02543014335632324, 0.025317375183105468, 0.025357887268066405, 0.02545631980895996, 0.025430784225463868, 0.02547711944580078, 0.02544371223449707, 0.025239519119262695, 0.02485424041748047, 0.024719680786132812, 0.024645631790161132, 0.024723167419433593, 0.02477846336364746, 0.024811168670654297, 0.024689567565917968, 0.02469068717956543, 0.024816640853881834, 0.024837343215942383, 0.02473347282409668, 0.024751935958862305, 0.024774848937988283, 0.02487049674987793, 0.024833696365356445, 0.02489334487915039, 0.024916831970214843, 0.024980607986450194, 0.025033311843872072, 0.024895776748657228, 0.024885568618774414, 0.025027551651000977, 0.025084287643432616, 0.024983903884887696, 0.02485862350463867, 0.02491561508178711, 0.024953407287597658, 0.024999616622924804, 0.025059423446655273, 0.025083904266357423, 0.02510995292663574, 0.02510207939147949, 0.025187007904052733, 0.025214336395263673, 0.025197311401367186, 0.02514262390136719, 0.025148063659667968, 0.025215200424194336, 0.02516713523864746, 0.02511510467529297, 0.025304224014282225, 0.025383808135986327, 0.02526323127746582, 0.025262975692749025, 0.025444351196289062, 0.025427967071533202, 0.025316736221313477, 0.025113216400146486, 0.025069568634033205, 0.025174016952514647, 0.0252392635345459, 0.025213247299194337, 0.02536240005493164, 0.025448448181152345, 0.0255098876953125, 0.02527846336364746, 0.02535628890991211, 0.025374624252319337, 0.025417823791503907, 0.02554265594482422, 0.025620479583740235, 0.025617664337158202, 0.02533635139465332, 0.025091936111450195, 0.024928255081176756, 0.024829952239990235, 0.024666112899780275, 0.024731647491455077, 0.02483404731750488, 0.024821760177612305, 0.024909631729125976, 0.024788671493530274, 0.02480499267578125, 0.024864736557006835, 0.024998815536499023, 0.024868864059448242, 0.024860671997070313, 0.024877056121826172, 0.024925567626953124, 0.024805952072143554, 0.024891456604003905, 0.024870912551879884, 0.02506547164916992, 0.025038463592529297, 0.0250184326171875, 0.026261823654174805, 0.025176319122314453, 0.024999679565429686, 0.02505523109436035, 0.02509404754638672, 0.02510652732849121, 0.02508083152770996, 0.025178943634033203, 0.025163967132568358, 0.025081279754638672, 0.02509040069580078, 0.02530112075805664, 0.025276287078857422, 0.025278688430786133, 0.025372480392456053, 0.02558598327636719, 0.0252126407623291, 0.025338016510009765, 0.025261344909667968, 0.025240224838256838, 0.025306528091430663, 0.02534160041809082, 0.0252523193359375, 0.025303327560424804, 0.025395456314086913, 0.02547929573059082, 0.025489280700683594, 0.02540348815917969, 0.025413536071777345, 0.025413631439208984, 0.02534137535095215, 0.027045984268188477, 0.02526223945617676, 0.025290111541748046, 0.025348672866821288, 0.025389440536499025, 0.025466815948486328, 0.025411264419555664, 0.025413536071777345, 0.02561686325073242]",tokens/s,39.84961677266576,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1264.20992,1247.674368,0.0,845.152256,799.789056,s,1,8.3349716796875,8.3349716796875,0.0,8.3349716796875,8.3349716796875,8.3349716796875,8.3349716796875,[8.3349716796875],,kWh,3.114275858754354e-05,3.4278199243677994e-06,8.730840318008082e-06,4.330141882991942e-05,,MB,1475.670016,1277.034496,0.0,859.83232,828.70272,s,10,0.3604083518981933,0.036040835189819334,0.00012322554692584568,0.03600415992736816,0.03616562728881836,0.03626122894287109,0.03633771026611328,"[0.03635683059692383, 0.03596150588989258, 0.035931934356689454, 0.0359496955871582, 0.03614438247680664, 0.03608694458007813, 0.035945377349853515, 0.03602336120605469, 0.035987167358398436, 0.03602115249633789]",tokens/s,7103.054039999434,kWh,1.082576060524658e-06,1.1938859152795504e-07,7.168966640518442e-07,1.918861316104457e-06,tokens/kWh,133412455.5284245,MB,1500.475392,1277.034496,0.0,859.83232,828.70528,s,10,12.401146728515624,1.2401146728515624,0.012046266317167474,1.235614990234375,1.2474471557617188,1.2604022888183593,1.2707663952636719,"[1.2350218505859376, 1.24034814453125, 1.234222412109375, 1.2337264404296875, 1.273357421875, 1.2445682373046876, 1.2296748046875, 1.2306395263671874, 1.2433797607421875, 1.2362081298828125]",tokens/s,50.80175356294723,kWh,3.600428262364285e-05,3.970823912952854e-06,1.524436610494683e-05,5.521947264154253e-05,tokens/kWh,1140901.877295439,,s,630,12.395952459335332,0.019676115014817978,0.000535524730651251,0.019546175956726072,0.020106563758850098,0.020240932846069336,0.02179022163391115,"[0.019552383422851562, 0.01965795135498047, 0.019572832107543944, 0.019477184295654298, 0.019521535873413084, 0.01944780731201172, 0.01970809555053711, 0.019867584228515624, 0.019748735427856445, 0.021447967529296875, 0.019659711837768556, 0.01966374397277832, 0.019702688217163086, 0.019879520416259764, 0.020047391891479492, 0.019624832153320313, 0.019604896545410155, 0.01963065528869629, 0.019861183166503905, 0.019667295455932616, 0.019589120864868165, 0.019615743637084963, 0.019451583862304687, 0.019394943237304688, 0.01953580856323242, 0.01979327964782715, 0.01969011116027832, 0.019568639755249022, 0.0197010555267334, 0.01992732810974121, 0.01992278480529785, 0.019415327072143555, 0.01936137580871582, 0.01937068748474121, 0.019449663162231446, 0.01936185646057129, 0.019342912673950195, 0.019356224060058595, 0.019349504470825195, 0.01927577590942383, 0.019478527069091797, 0.019378047943115234, 0.01942950439453125, 0.019467647552490235, 0.019465087890625, 0.019733535766601563, 0.01952774429321289, 0.019353952407836914, 0.01950921630859375, 0.019429727554321288, 0.019513471603393555, 0.019619712829589842, 0.01970515251159668, 0.01956435203552246, 0.019514047622680664, 0.0198985595703125, 0.01948467254638672, 0.019462272644042967, 0.019520992279052733, 0.019337631225585936, 0.019392511367797852, 0.019522911071777345, 0.019460351943969726, 0.019532224655151368, 0.01950054359436035, 0.019355712890625, 0.019464576721191406, 0.019396543502807617, 0.019328447341918947, 0.01954217529296875, 0.019441984176635743, 0.01967945671081543, 0.019528768539428712, 0.019467071533203126, 0.01950486373901367, 0.019411231994628905, 0.019443872451782227, 0.01946931266784668, 0.01949523162841797, 0.019593727111816405, 0.019499040603637694, 0.019615743637084963, 0.01956252861022949, 0.019817440032958985, 0.01968230438232422, 0.01987583923339844, 0.019597312927246095, 0.019637632369995116, 0.02035500717163086, 0.022962623596191407, 0.021437952041625977, 0.019921247482299804, 0.01960335922241211, 0.01954457664489746, 0.02006345558166504, 0.019440544128417968, 0.01947612762451172, 0.019488351821899414, 0.019532384872436522, 0.019404224395751953, 0.019439359664916993, 0.019485536575317382, 0.0194334716796875, 0.01935510444641113, 0.019956256866455076, 0.019460351943969726, 0.019436288833618164, 0.01944473648071289, 0.019511104583740235, 0.01982076835632324, 0.02014339256286621, 0.0201112003326416, 0.019729248046875, 0.019877248764038086, 0.01977996826171875, 0.019757312774658205, 0.01970790481567383, 0.01957232093811035, 0.01963868713378906, 0.019644384384155274, 0.01955366325378418, 0.01947273635864258, 0.019466560363769533, 0.019490816116333007, 0.01944371223449707, 0.019465728759765624, 0.019302400588989257, 0.01948057556152344, 0.01945599937438965, 0.01966694450378418, 0.019629056930541993, 0.01967001533508301, 0.019570688247680663, 0.019475456237792968, 0.019520511627197267, 0.019556352615356445, 0.019599359512329103, 0.01942527961730957, 0.019462335586547853, 0.019377695083618165, 0.0199334716796875, 0.01965056037902832, 0.01970115280151367, 0.019702367782592774, 0.019658304214477538, 0.01952387237548828, 0.019468448638916017, 0.019402048110961915, 0.01949728012084961, 0.01958540725708008, 0.01955407905578613, 0.019898208618164062, 0.019732864379882812, 0.019755264282226563, 0.019582143783569338, 0.01956483268737793, 0.01957302474975586, 0.019603328704833986, 0.019607839584350587, 0.020023136138916015, 0.01966454315185547, 0.019624288558959962, 0.01959247970581055, 0.019473119735717772, 0.019533824920654298, 0.01961356735229492, 0.019542144775390624, 0.019539968490600586, 0.019469343185424804, 0.0194815673828125, 0.019398719787597656, 0.01949001693725586, 0.01958780860900879, 0.019524703979492186, 0.0194401912689209, 0.019478879928588867, 0.01939455986022949, 0.019631999969482422, 0.019482336044311523, 0.019462560653686522, 0.01957683181762695, 0.019372032165527343, 0.01944371223449707, 0.019758623123168947, 0.02003798484802246, 0.020088960647583008, 0.019686624526977538, 0.01957967948913574, 0.01953366470336914, 0.019390207290649414, 0.020054624557495116, 0.019582975387573243, 0.01950249671936035, 0.019365631103515624, 0.019448671340942383, 0.019412992477416992, 0.019378175735473634, 0.019365791320800782, 0.019371519088745116, 0.019366495132446288, 0.019333120346069335, 0.01951081657409668, 0.019595359802246092, 0.01962246322631836, 0.019500864028930663, 0.01940671920776367, 0.019662944793701172, 0.019525568008422853, 0.019498655319213867, 0.019412511825561522, 0.01942969512939453, 0.019378368377685546, 0.019485088348388673, 0.019720191955566405, 0.019746559143066406, 0.01966105651855469, 0.019526912689208985, 0.01933296012878418, 0.01937481689453125, 0.019405120849609374, 0.019379520416259767, 0.019304224014282226, 0.019389471054077147, 0.01934707260131836, 0.019337343215942382, 0.01923276710510254, 0.019234239578247072, 0.01927395248413086, 0.019321184158325195, 0.0192491512298584, 0.019277824401855468, 0.01923891258239746, 0.019193439483642577, 0.019431840896606444, 0.019335103988647462, 0.01956255912780762, 0.019775487899780272, 0.019541664123535155, 0.01986124801635742, 0.019929887771606446, 0.019799871444702147, 0.0198656005859375, 0.020115455627441405, 0.019941280364990235, 0.020088672637939453, 0.020170080184936524, 0.02008566474914551, 0.020071456909179688, 0.020140031814575195, 0.02009702491760254, 0.0200611515045166, 0.02008176040649414, 0.020327007293701172, 0.020116640090942384, 0.020099935531616212, 0.020135488510131836, 0.020357599258422853, 0.020322303771972656, 0.020190431594848634, 0.020142847061157227, 0.019859392166137694, 0.020381759643554688, 0.020230144500732423, 0.01993744087219238, 0.020793184280395508, 0.027248640060424805, 0.020404319763183593, 0.019881887435913084, 0.019822559356689452, 0.019729536056518556, 0.02274822425842285, 0.019725824356079103, 0.019536224365234375, 0.01946419143676758, 0.01942323112487793, 0.01956233596801758, 0.019425024032592775, 0.019400543212890625, 0.019356000900268556, 0.01948297691345215, 0.019752159118652343, 0.01973478317260742, 0.020115232467651366, 0.019798656463623047, 0.019785280227661132, 0.019953472137451172, 0.019845760345458985, 0.019877887725830077, 0.019977760314941407, 0.02014691162109375, 0.02051638412475586, 0.020258495330810547, 0.020590976715087892, 0.020189279556274413, 0.02017695999145508, 0.020140031814575195, 0.02009107208251953, 0.020249759674072266, 0.020208288192749023, 0.020166656494140626, 0.02021785545349121, 0.020674560546875, 0.021930015563964844, 0.021027999877929686, 0.02034172821044922, 0.020039520263671874, 0.019951616287231445, 0.020322303771972656, 0.020113407135009767, 0.01995363235473633, 0.019862688064575196, 0.019672096252441405, 0.019668832778930664, 0.019715999603271483, 0.01963795280456543, 0.019364160537719728, 0.019512704849243164, 0.019296575546264648, 0.019430559158325197, 0.0193721923828125, 0.019482656478881834, 0.019354272842407226, 0.019386144638061525, 0.01981667137145996, 0.019403839111328126, 0.019333503723144532, 0.019339136123657227, 0.019402912139892578, 0.019354143142700196, 0.01924710464477539, 0.01942527961730957, 0.019592767715454103, 0.019530176162719726, 0.019463520050048828, 0.019501567840576172, 0.01962614440917969, 0.019740447998046876, 0.019767520904541015, 0.019613632202148436, 0.02001433563232422, 0.019759552001953125, 0.019662784576416015, 0.01964076805114746, 0.01949014472961426, 0.019573343276977538, 0.01956870460510254, 0.019869695663452147, 0.019920608520507813, 0.01995599937438965, 0.020170656204223633, 0.02003798484802246, 0.02004876708984375, 0.020106048583984376, 0.01997011184692383, 0.01995724868774414, 0.01992310333251953, 0.02001900863647461, 0.020259424209594725, 0.020072351455688475, 0.020162336349487303, 0.02017919921875, 0.020004159927368165, 0.020047807693481447, 0.020180736541748047, 0.019995296478271484, 0.020140575408935546, 0.020152128219604493, 0.02003171157836914, 0.019850624084472655, 0.019823007583618164, 0.019684831619262697, 0.01991324806213379, 0.020025344848632814, 0.019903968811035157, 0.019943391799926758, 0.01957744026184082, 0.019578847885131836, 0.019466144561767578, 0.019283967971801756, 0.020336639404296874, 0.019576160430908204, 0.019368608474731444, 0.019293312072753907, 0.019515615463256836, 0.019517120361328126, 0.019622880935668944, 0.019475872039794923, 0.019454463958740235, 0.019828832626342774, 0.019795040130615234, 0.019692447662353514, 0.019564544677734375, 0.019512384414672852, 0.01959427261352539, 0.019570592880249024, 0.019578880310058593, 0.01942527961730957, 0.019396608352661132, 0.019404800415039062, 0.01964851188659668, 0.019752639770507813, 0.019701631546020507, 0.019634624481201172, 0.019453216552734375, 0.019548896789550782, 0.019723583221435546, 0.01961654472351074, 0.019451711654663088, 0.019435392379760743, 0.019544063568115236, 0.019627647399902345, 0.019359840393066406, 0.019480096817016603, 0.01951644706726074, 0.01944313621520996, 0.019468799591064453, 0.01948057556152344, 0.019345407485961915, 0.019322879791259767, 0.019329023361206055, 0.01925734329223633, 0.01929529571533203, 0.01935420799255371, 0.019708000183105468, 0.019308799743652343, 0.01925529670715332, 0.019933183670043944, 0.01935750389099121, 0.019380447387695312, 0.01937183952331543, 0.019454080581665038, 0.019337247848510743, 0.019386367797851564, 0.0194334716796875, 0.019461408615112304, 0.019396671295166014, 0.019399328231811522, 0.01943654441833496, 0.019543039321899415, 0.019509248733520508, 0.019890304565429687, 0.019496480941772462, 0.019685728073120117, 0.019580928802490235, 0.019664960861206053, 0.019466047286987306, 0.020161663055419922, 0.01945510482788086, 0.0194968318939209, 0.019689472198486328, 0.019337215423583985, 0.019279104232788086, 0.019241727828979493, 0.019306495666503908, 0.019417087554931642, 0.019351808547973633, 0.019319936752319335, 0.019337600708007812, 0.01929020881652832, 0.019195360183715822, 0.019173088073730468, 0.01926038360595703, 0.01929360008239746, 0.019208799362182616, 0.019247360229492187, 0.02304204750061035, 0.01970672035217285, 0.019360576629638672, 0.019649631500244142, 0.019420032501220704, 0.01938819122314453, 0.019189823150634767, 0.019243295669555665, 0.01958675193786621, 0.01931430435180664, 0.019243551254272462, 0.01935532760620117, 0.0194616641998291, 0.01967638397216797, 0.0195885124206543, 0.020035104751586916, 0.019561248779296873, 0.01954777526855469, 0.019471935272216797, 0.019267711639404297, 0.01935590362548828, 0.019413440704345704, 0.019345247268676757, 0.019398815155029298, 0.01969152069091797, 0.019644416809082032, 0.020258623123168944, 0.019627967834472657, 0.01959756851196289, 0.019561952590942382, 0.019499551773071288, 0.01955596733093262, 0.01953548812866211, 0.019618656158447264, 0.019421312332153322, 0.019347232818603517, 0.01943552017211914, 0.019400863647460936, 0.01936367988586426, 0.019346208572387696, 0.01964236831665039, 0.019527679443359376, 0.01948182487487793, 0.019440383911132814, 0.01940483283996582, 0.0194150390625, 0.01923849678039551, 0.01929871940612793, 0.019288063049316406, 0.019369855880737304, 0.020060096740722656, 0.019439231872558593, 0.01932972717285156, 0.01927168083190918, 0.019335071563720704, 0.019426591873168947, 0.0193686408996582, 0.019799776077270508, 0.01928188705444336, 0.019316192626953124, 0.019333984375, 0.01925939178466797, 0.019367935180664063, 0.019458047866821288, 0.019314687728881837, 0.019289087295532227, 0.019368959426879884, 0.019281919479370118, 0.019410816192626953, 0.01942336082458496, 0.019586528778076172, 0.01978835105895996, 0.01973583984375, 0.019749568939208983, 0.01978153610229492, 0.019871839523315428, 0.020282400131225585, 0.01997494316101074, 0.020019392013549804, 0.020068384170532225, 0.020101184844970702, 0.02005513572692871, 0.019942432403564452, 0.020969247817993163, 0.02006211280822754, 0.02083452796936035, 0.02007846450805664, 0.01995356750488281, 0.01994300842285156, 0.02015078353881836, 0.020180992126464844, 0.02044313621520996, 0.020215583801269532, 0.020136159896850588, 0.01984515190124512, 0.020123615264892578, 0.02007244873046875, 0.020045856475830078, 0.019789791107177733, 0.019734560012817384, 0.019784704208374023, 0.019903072357177733, 0.01948217582702637, 0.019597631454467773, 0.019521663665771485, 0.019451904296875, 0.019509279251098632, 0.019402719497680663, 0.019373151779174806, 0.019542943954467772, 0.019687423706054686, 0.019348703384399413, 0.019602239608764647, 0.01992905616760254, 0.01951683235168457, 0.019442272186279298, 0.01938787269592285, 0.022874048233032226, 0.023228767395019532, 0.019748672485351563, 0.019620288848876954, 0.019683135986328124, 0.01957881546020508, 0.019419391632080077, 0.019365888595581054, 0.019451200485229494, 0.01949728012084961, 0.019337600708007812, 0.019328575134277343, 0.019282367706298827, 0.01925529670715332, 0.019564544677734375, 0.01985308837890625, 0.01956399917602539, 0.019432191848754884, 0.019353599548339845, 0.01978995132446289, 0.01998031997680664, 0.019783519744873048, 0.019654079437255858, 0.019610431671142577, 0.019840864181518553, 0.019873695373535158, 0.020436992645263673, 0.019504608154296874, 0.019399103164672853, 0.01951241683959961, 0.019512544631958006, 0.019330848693847658, 0.019371200561523437, 0.019356063842773438, 0.019365280151367188, 0.019264703750610353, 0.01922211265563965, 0.019240703582763672, 0.01919795227050781, 0.019321279525756838, 0.019557727813720702, 0.019414783477783203, 0.019463104248046877, 0.019304767608642578, 0.019275455474853515, 0.01927987289428711, 0.01920790481567383, 0.019397920608520507]",tokens/s,50.82304099396173,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,2226.5856,2551.119872,0.0,2155.872256,2032.413184,s,1,8.91062890625,8.91062890625,0.0,8.91062890625,8.91062890625,8.91062890625,8.91062890625,[8.91062890625],,kWh,4.962559017501083e-05,5.466919062269274e-06,1.5655012524005973e-05,7.074752176128608e-05,,MB,2275.807232,2827.943936,0.0,2418.016256,2279.563776,s,10,0.7760554885864258,0.07760554885864257,0.0001963902127203726,0.07763959884643555,0.07780231704711914,0.07785509605407714,0.07789731925964355,"[0.07790787506103515, 0.07766345977783203, 0.07763129425048829, 0.07724739074707031, 0.07762963104248047, 0.07771437072753906, 0.07764790344238282, 0.07727324676513672, 0.07754972839355469, 0.07779058837890625]",tokens/s,3298.733193244989,kWh,2.319681841898155e-06,2.558206105176512e-07,1.5470757526507914e-06,4.122578205066597e-06,tokens/kWh,62097063.358404994,MB,2283.020288,2911.830016,0.0,2501.902336,2389.801984,s,10,13.973065063476563,1.3973065063476562,0.003427724356547006,1.3969668579101562,1.401062353515625,1.4017873413085937,1.4023673315429688,"[1.3914072265625, 1.393419189453125, 1.4009012451171876, 1.4004678955078125, 1.3964017333984375, 1.4000360107421874, 1.4025123291015624, 1.397531982421875, 1.395591064453125, 1.39479638671875]",tokens/s,45.08674346952859,kWh,4.133535275435144e-05,4.558946191796699e-06,2.321011071974948e-05,6.910440966589762e-05,tokens/kWh,911663.9633359014,,s,630,13.965461080551151,0.022167398540557378,0.0003694476016180388,0.022058432579040527,0.022498105430603026,0.0227490740776062,0.023932267227172854,"[0.023076864242553712, 0.022781951904296875, 0.022462080001831055, 0.02277334403991699, 0.022332191467285156, 0.02209791946411133, 0.02203878402709961, 0.02194985580444336, 0.02207494354248047, 0.022087551116943358, 0.022014400482177735, 0.021895647048950195, 0.02198534393310547, 0.021904767990112303, 0.021988927841186525, 0.02204569625854492, 0.022129951477050783, 0.021947200775146485, 0.02204252815246582, 0.02200927925109863, 0.022014720916748047, 0.022103231430053712, 0.022211231231689454, 0.022060384750366212, 0.02210083198547363, 0.02201523208618164, 0.021993215560913087, 0.02196940803527832, 0.022058528900146486, 0.021956544876098633, 0.021994304656982423, 0.021987327575683592, 0.02206924819946289, 0.0220425910949707, 0.021934112548828124, 0.021845632553100586, 0.021931840896606446, 0.021942047119140624, 0.02202908706665039, 0.02190336036682129, 0.021975231170654298, 0.021987136840820314, 0.022183488845825196, 0.022038976669311525, 0.022018335342407228, 0.021949344635009766, 0.02195350456237793, 0.021901151657104493, 0.021915935516357423, 0.022158784866333006, 0.022018335342407228, 0.021983232498168945, 0.022038528442382813, 0.022002719879150392, 0.02203887939453125, 0.021948480606079103, 0.02204115104675293, 0.021997568130493163, 0.022026239395141603, 0.02224127960205078, 0.022478080749511718, 0.02231283187866211, 0.022141824722290038, 0.02288470458984375, 0.02227596855163574, 0.021926015853881837, 0.02231091117858887, 0.02256915283203125, 0.02285139274597168, 0.02217478370666504, 0.02203539276123047, 0.02205900764465332, 0.022040576934814454, 0.022079488754272462, 0.022097824096679687, 0.022019327163696287, 0.02197385597229004, 0.021850175857543945, 0.02184137535095215, 0.02201238441467285, 0.0219238395690918, 0.021987327575683592, 0.022067359924316406, 0.02194326400756836, 0.021988128662109373, 0.02206697654724121, 0.021876928329467773, 0.021928064346313475, 0.021786624908447266, 0.022064128875732423, 0.02192620849609375, 0.022321855545043946, 0.022001056671142577, 0.022114912033081056, 0.022044767379760744, 0.02196988868713379, 0.021836736679077148, 0.021927040100097658, 0.022078271865844726, 0.021932031631469725, 0.02250739288330078, 0.02191993522644043, 0.02191360092163086, 0.021997568130493163, 0.022021184921264647, 0.022504383087158204, 0.02207321548461914, 0.022139007568359376, 0.022046720504760742, 0.021980352401733398, 0.021924671173095704, 0.022099967956542968, 0.022540288925170897, 0.02205900764465332, 0.02215116882324219, 0.022005760192871093, 0.02224287986755371, 0.022040159225463866, 0.02203094482421875, 0.022059263229370116, 0.02209382438659668, 0.02262835121154785, 0.022120447158813478, 0.022644960403442382, 0.02197465515136719, 0.02222051239013672, 0.023654272079467773, 0.02295238494873047, 0.02258403205871582, 0.022224288940429687, 0.022034208297729493, 0.02196124839782715, 0.022360063552856444, 0.021954559326171876, 0.022156864166259765, 0.022030784606933595, 0.02205411148071289, 0.021967647552490234, 0.02216783905029297, 0.0220664005279541, 0.022045183181762695, 0.021948415756225585, 0.022234495162963868, 0.022346336364746092, 0.02232249641418457, 0.02249388885498047, 0.022347904205322264, 0.022161312103271484, 0.022116352081298828, 0.02206515121459961, 0.022016000747680665, 0.02199888038635254, 0.02188889694213867, 0.022007680892944335, 0.02412771224975586, 0.02273356819152832, 0.022997247695922853, 0.02225724792480469, 0.02236636734008789, 0.022124223709106446, 0.02208799934387207, 0.022157312393188477, 0.022332544326782226, 0.022283136367797853, 0.022347776412963868, 0.0221265926361084, 0.022153215408325197, 0.021944320678710938, 0.021883039474487304, 0.021913631439208985, 0.02191084861755371, 0.021921632766723632, 0.021990047454833985, 0.021825536727905274, 0.021946367263793946, 0.021869823455810546, 0.02185260772705078, 0.02191801643371582, 0.022051872253417967, 0.022172639846801758, 0.022215808868408203, 0.022065088272094725, 0.022088640213012694, 0.021989248275756837, 0.02205299186706543, 0.021968511581420897, 0.02196928024291992, 0.021993471145629884, 0.02417804718017578, 0.02208358383178711, 0.022005247116088866, 0.02204489517211914, 0.022111743927001954, 0.021918176651000976, 0.021943775177001953, 0.022390975952148437, 0.021981855392456055, 0.022094079971313477, 0.02195568084716797, 0.022182559967041014, 0.02194380760192871, 0.022054527282714845, 0.021939071655273437, 0.022103391647338867, 0.022031007766723634, 0.022128704071044922, 0.02193574333190918, 0.022020416259765627, 0.022142175674438477, 0.022338592529296875, 0.021980831146240234, 0.02211235237121582, 0.02201100730895996, 0.022350912094116212, 0.022103904724121094, 0.022470624923706054, 0.022804479598999023, 0.02271014404296875, 0.022601856231689452, 0.022779136657714843, 0.022369024276733398, 0.022619487762451172, 0.02252249526977539, 0.022383903503417967, 0.02248518371582031, 0.022391008377075194, 0.022552768707275392, 0.022706335067749023, 0.022419424057006837, 0.022610111236572264, 0.022515775680541993, 0.022437664031982423, 0.022419456481933595, 0.022573280334472656, 0.02242729568481445, 0.02231865692138672, 0.02207619285583496, 0.022034208297729493, 0.02191798400878906, 0.021995231628417967, 0.021958656311035156, 0.022355167388916016, 0.021938528060913086, 0.021930431365966795, 0.021944320678710938, 0.02204787254333496, 0.021924448013305665, 0.022050399780273438, 0.022076095581054687, 0.022220800399780274, 0.02210201644897461, 0.02207744026184082, 0.022260543823242187, 0.02230271911621094, 0.022021440505981444, 0.0223604793548584, 0.02198761558532715, 0.021987327575683592, 0.021985471725463866, 0.022071039199829102, 0.022255680084228516, 0.02205833625793457, 0.021862464904785155, 0.02201251220703125, 0.021968896865844727, 0.02198255920410156, 0.021947168350219728, 0.022284095764160156, 0.0225382080078125, 0.022401119232177736, 0.022239231109619142, 0.02201350402832031, 0.022170207977294923, 0.022099775314331056, 0.02205900764465332, 0.022077472686767578, 0.02207334327697754, 0.022001440048217774, 0.022210784912109375, 0.022273279190063475, 0.022250240325927734, 0.02222857666015625, 0.02249337577819824, 0.02245039939880371, 0.0223187198638916, 0.022178272247314453, 0.022619808197021484, 0.02210348892211914, 0.02215609550476074, 0.02203209686279297, 0.022167871475219727, 0.022601408004760744, 0.022272287368774416, 0.021986976623535156, 0.021876575469970704, 0.022053184509277343, 0.02216979217529297, 0.022035551071166993, 0.02210223960876465, 0.022043359756469726, 0.022048736572265627, 0.02287001609802246, 0.022160383224487306, 0.02226278305053711, 0.022130687713623046, 0.022108160018920898, 0.022405120849609376, 0.02198255920410156, 0.022069919586181642, 0.022037599563598635, 0.021908416748046874, 0.022042623519897463, 0.021949888229370117, 0.02200160026550293, 0.0219899845123291, 0.02188902473449707, 0.022337535858154296, 0.02394726371765137, 0.02225798416137695, 0.022137760162353515, 0.022360671997070314, 0.02217945671081543, 0.02194268798828125, 0.022311071395874023, 0.022761760711669923, 0.02250927925109863, 0.022216928482055663, 0.022374176025390626, 0.02207334327697754, 0.022024192810058595, 0.02192131233215332, 0.022084064483642578, 0.02202128028869629, 0.02196771240234375, 0.02197292709350586, 0.022078720092773438, 0.022012256622314454, 0.021959007263183592, 0.022141056060791017, 0.02246575927734375, 0.021951263427734374, 0.02191974449157715, 0.021935359954833984, 0.021962944030761718, 0.021916223526000977, 0.021915615081787108, 0.02180713653564453, 0.021908863067626953, 0.021871231079101563, 0.021957727432250978, 0.021881439208984374, 0.02192620849609375, 0.02197817611694336, 0.02195289611816406, 0.022093408584594725, 0.02212063980102539, 0.022006784439086914, 0.022107872009277343, 0.022038591384887694, 0.02203545570373535, 0.021890239715576174, 0.022210559844970702, 0.022089536666870118, 0.022116352081298828, 0.02188083267211914, 0.02200281524658203, 0.022016960144042967, 0.022255584716796874, 0.022257631301879882, 0.022166847229003906, 0.022090431213378905, 0.02208358383178711, 0.02231920051574707, 0.02430905532836914, 0.024731679916381834, 0.02253036880493164, 0.022423807144165038, 0.022595584869384764, 0.022255231857299804, 0.022352031707763672, 0.022041759490966796, 0.02221955108642578, 0.022156959533691407, 0.02235856056213379, 0.022319007873535156, 0.022085216522216795, 0.02194063949584961, 0.022017023086547852, 0.02206822395324707, 0.02320707130432129, 0.022229856491088867, 0.021935487747192384, 0.021979711532592774, 0.022056224822998047, 0.022162336349487305, 0.021974336624145507, 0.021987775802612304, 0.02203251266479492, 0.022191328048706056, 0.02200009536743164, 0.022118175506591797, 0.022039072036743164, 0.022273696899414063, 0.024794784545898438, 0.023702207565307616, 0.022517152786254883, 0.022436447143554687, 0.022218751907348632, 0.022437887191772463, 0.022779903411865234, 0.0227061767578125, 0.02225987243652344, 0.022128480911254883, 0.022124160766601564, 0.0224168643951416, 0.022127519607543944, 0.022001792907714843, 0.02198111915588379, 0.022044607162475586, 0.022141952514648438, 0.02210508728027344, 0.022106111526489256, 0.02201203155517578, 0.022194047927856446, 0.022001056671142577, 0.022003456115722655, 0.02214944076538086, 0.02231929588317871, 0.02212236785888672, 0.022206432342529298, 0.022051584243774413, 0.022036224365234374, 0.022177087783813478, 0.02204947280883789, 0.022120447158813478, 0.021929983139038087, 0.021910783767700195, 0.02198182487487793, 0.022524032592773437, 0.02245193672180176, 0.022397184371948244, 0.02230944061279297, 0.022351104736328124, 0.02200422477722168, 0.022423807144165038, 0.021956544876098633, 0.02200172805786133, 0.021990432739257812, 0.021977247238159178, 0.021926687240600585, 0.0219648323059082, 0.021982431411743164, 0.021958751678466795, 0.022031040191650392, 0.021985279083251954, 0.021917055130004883, 0.021979328155517577, 0.022284736633300783, 0.022190080642700196, 0.021958080291748047, 0.02214134407043457, 0.022075551986694336, 0.022196224212646484, 0.021958656311035156, 0.02197532844543457, 0.02202569580078125, 0.022007167816162108, 0.021904064178466798, 0.021869855880737303, 0.021924863815307616, 0.02262403106689453, 0.022104127883911133, 0.021917600631713868, 0.021915807723999023, 0.021906879425048827, 0.022114400863647462, 0.022683744430541993, 0.022058719635009764, 0.02223126411437988, 0.022227392196655274, 0.02208790397644043, 0.022089151382446288, 0.021911903381347655, 0.02202217674255371, 0.021866464614868165, 0.02205695915222168, 0.021924896240234373, 0.022071487426757814, 0.021996320724487303, 0.02216534423828125, 0.022149280548095705, 0.022410751342773438, 0.022561279296875, 0.022461599349975585, 0.022462400436401367, 0.02216771125793457, 0.022424480438232423, 0.022781791687011718, 0.022447328567504882, 0.022201120376586916, 0.02196227264404297, 0.02206972885131836, 0.025210527420043944, 0.022159744262695312, 0.022419551849365234, 0.022324735641479493, 0.022164255142211913, 0.02201206398010254, 0.021869855880737303, 0.021983808517456054, 0.022061119079589842, 0.021943647384643553, 0.022096479415893554, 0.021934080123901366, 0.021917119979858398, 0.021834304809570312, 0.022052223205566407, 0.02219481658935547, 0.021959903717041016, 0.021852960586547853, 0.022006944656372072, 0.021807584762573242, 0.021899648666381836, 0.02192495918273926, 0.022174335479736327, 0.022024288177490234, 0.022269792556762695, 0.021930335998535156, 0.0219703369140625, 0.02190787124633789, 0.022032575607299806, 0.022806528091430665, 0.022413536071777342, 0.02264041519165039, 0.022101408004760743, 0.022088287353515625, 0.022037696838378907, 0.021979904174804686, 0.023217504501342773, 0.022014656066894532, 0.022179872512817382, 0.02192793655395508, 0.022034431457519533, 0.0218789119720459, 0.0220153923034668, 0.021893600463867187, 0.021966848373413086, 0.021946367263793946, 0.022076831817626954, 0.022206079483032225, 0.02211529541015625, 0.02185215950012207, 0.021915807723999023, 0.02233123207092285, 0.02316854476928711, 0.023895551681518554, 0.022577152252197266, 0.02200457572937012, 0.022134496688842775, 0.02201363182067871, 0.022012639999389648, 0.02192736053466797, 0.022239007949829102, 0.021920543670654297, 0.02205900764465332, 0.02225152015686035, 0.02229043197631836, 0.022446975708007813, 0.022427648544311524, 0.021944063186645508, 0.021932512283325194, 0.021808639526367187, 0.02204198455810547, 0.022004383087158203, 0.021999872207641602, 0.02307276725769043, 0.021949472427368163, 0.02189206314086914, 0.021853248596191407, 0.021896127700805665, 0.023895423889160158, 0.022857599258422852, 0.02315532875061035, 0.022497407913208006, 0.022330528259277345, 0.021953311920166016, 0.02192758369445801, 0.02192630386352539, 0.022042720794677735, 0.02189116859436035, 0.02206470489501953, 0.022061279296875, 0.021962047576904297, 0.023093631744384766, 0.022143327713012695, 0.021991615295410157, 0.02185523223876953, 0.0219136962890625, 0.021977312088012697, 0.021974687576293946, 0.02244630432128906, 0.022175775527954102, 0.022392831802368163, 0.022143583297729492, 0.022332576751708983, 0.022016864776611328, 0.022056896209716795, 0.021954559326171876, 0.02190342330932617, 0.02183305549621582, 0.021956415176391603, 0.021887840270996092, 0.02191916847229004, 0.021913663864135742, 0.021825023651123047, 0.021888191223144532, 0.021903167724609374, 0.022138879776000975, 0.02208153533935547, 0.022013471603393554, 0.02222934341430664, 0.02203455924987793, 0.021978271484375, 0.021999488830566405, 0.02194492721557617, 0.0222271671295166, 0.022149280548095705, 0.02195033645629883, 0.021987455368041992, 0.021941375732421876]",tokens/s,45.11129252133056,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1169.5104,1101.98784,0.0,706.740224,681.6384,s,1,7.8803232421875,7.8803232421875,0.0,7.8803232421875,7.8803232421875,7.8803232421875,7.8803232421875,[7.8803232421875],,kWh,2.994021517082122e-05,3.2955293587886216e-06,9.018618325994099e-06,4.225436285560394e-05,,MB,1473.560576,1406.07488,0.0,996.1472,949.238272,s,10,0.2701465301513672,0.027014653015136718,0.0002590781925695431,0.026916496276855467,0.02731109790802002,0.02742468538284302,0.027515555362701418,"[0.027538272857666017, 0.027266656875610352, 0.0267741756439209, 0.027285856246948244, 0.02666486358642578, 0.026836351394653322, 0.02688630485534668, 0.02694668769836426, 0.027082208633422852, 0.026865152359008788]",tokens/s,9476.338632095676,kWh,7.87035152987432e-07,8.679267208216295e-08,4.824255132237186e-07,1.3562533382933135e-06,tokens/kWh,188755295.76365587,MB,1494.245376,1414.463488,0.0,1004.535808,949.240832,s,10,12.78926037597656,1.2789260375976563,0.004933773595789401,1.2782561645507813,1.2860602294921875,1.2865773559570313,1.2869910571289063,"[1.287094482421875, 1.2779775390625, 1.2859453125, 1.2803258056640625, 1.2744139404296875, 1.27096923828125, 1.2768492431640626, 1.28282861328125, 1.2785347900390625, 1.2743214111328125]",tokens/s,49.26008083965484,kWh,3.754832102909648e-05,4.1411248843222496e-06,1.5031698009176037e-05,5.6721143922594757e-05,tokens/kWh,1110696.9225792373,,s,630,12.787142049789429,0.020297050872681632,0.00033143789891812,0.020219488143920897,0.020541273307800294,0.02072889757156372,0.021887381591796873,"[0.020536287307739258, 0.020398143768310548, 0.02035308837890625, 0.020231744766235352, 0.020243776321411132, 0.020232736587524416, 0.020072927474975587, 0.020193439483642578, 0.020277088165283202, 0.02020351982116699, 0.02022809600830078, 0.02029148864746094, 0.020152416229248047, 0.020344352722167967, 0.020193952560424805, 0.020031295776367187, 0.020373504638671876, 0.020368831634521484, 0.020482624053955078, 0.020609024047851563, 0.02038102340698242, 0.020664831161499024, 0.022929567337036133, 0.020983808517456053, 0.02082611274719238, 0.020406272888183592, 0.020279296875, 0.020290559768676757, 0.020488576889038088, 0.0201997127532959, 0.020320608139038087, 0.02035043144226074, 0.020193824768066405, 0.020362335205078123, 0.020255647659301757, 0.020701183319091796, 0.020346879959106445, 0.020231775283813477, 0.02055619239807129, 0.02037721633911133, 0.020295904159545897, 0.020572256088256836, 0.02027676773071289, 0.02042524719238281, 0.020340448379516603, 0.02029801559448242, 0.020967424392700194, 0.022359968185424805, 0.020377695083618166, 0.020338623046875, 0.020423904418945312, 0.020304672241210936, 0.02027503967285156, 0.020164255142211915, 0.02020924758911133, 0.020259071350097656, 0.02015715217590332, 0.020161632537841798, 0.020675487518310547, 0.020349056243896484, 0.020332416534423827, 0.020082687377929686, 0.02027030372619629, 0.020314815521240235, 0.020259872436523437, 0.020146303176879883, 0.020446144104003906, 0.02020310401916504, 0.020187456130981444, 0.020291200637817385, 0.02019500732421875, 0.02021651268005371, 0.020248575210571287, 0.020183040618896485, 0.02009059143066406, 0.02006390380859375, 0.020212352752685545, 0.020471519470214843, 0.0204169921875, 0.02052076721191406, 0.02086016082763672, 0.020358144760131838, 0.020449024200439453, 0.020305919647216796, 0.020273151397705077, 0.02020966339111328, 0.020241472244262697, 0.020204479217529298, 0.020332256317138673, 0.020277088165283202, 0.02077231979370117, 0.02034787178039551, 0.020099071502685546, 0.02011955261230469, 0.020090463638305665, 0.020115615844726563, 0.020101343154907227, 0.020148256301879882, 0.020245920181274413, 0.020203327178955077, 0.02021251106262207, 0.020332096099853515, 0.020263359069824217, 0.02033193588256836, 0.020107872009277345, 0.020090879440307616, 0.02008780860900879, 0.020118528366088868, 0.020154144287109373, 0.020258848190307616, 0.02051705551147461, 0.02022604751586914, 0.020312063217163084, 0.020172224044799805, 0.020146751403808595, 0.020231264114379883, 0.020179872512817384, 0.020313791275024414, 0.02095110321044922, 0.02037785530090332, 0.020238336563110353, 0.020243616104125978, 0.02030473518371582, 0.020606719970703125, 0.020461248397827148, 0.02031804847717285, 0.020342784881591795, 0.02071743965148926, 0.020582176208496093, 0.020520864486694337, 0.020335039138793944, 0.020254720687866212, 0.020152288436889647, 0.02016422462463379, 0.020121088027954103, 0.020138912200927735, 0.02016694450378418, 0.021693727493286134, 0.023953760147094726, 0.02070742416381836, 0.020313215255737305, 0.02029862403869629, 0.020166656494140626, 0.020137311935424805, 0.02012835121154785, 0.020041791915893555, 0.020041696548461913, 0.02001513671875, 0.02012553596496582, 0.02018694305419922, 0.019988832473754884, 0.01999667167663574, 0.02028489685058594, 0.02004368019104004, 0.02008127975463867, 0.01997177505493164, 0.020008575439453124, 0.020226816177368163, 0.02003721618652344, 0.02002569580078125, 0.020109312057495117, 0.02015203285217285, 0.02030342483520508, 0.020167392730712892, 0.02013167953491211, 0.020144287109375, 0.02017430305480957, 0.020273408889770507, 0.02027343940734863, 0.02169241523742676, 0.02009449577331543, 0.020060319900512696, 0.02017100715637207, 0.02012985610961914, 0.0204532470703125, 0.020096607208251953, 0.02045580863952637, 0.02041993522644043, 0.020534080505371095, 0.022040576934814454, 0.021884927749633788, 0.020585472106933594, 0.020264192581176756, 0.020108415603637696, 0.020173440933227538, 0.02021785545349121, 0.020731903076171874, 0.02103455924987793, 0.020854272842407227, 0.0206210880279541, 0.020460416793823242, 0.020256000518798827, 0.020345600128173828, 0.02017817687988281, 0.019983104705810547, 0.02003763198852539, 0.020129791259765627, 0.020162559509277343, 0.02007027244567871, 0.02005219268798828, 0.020506528854370116, 0.02044108772277832, 0.02072985649108887, 0.0202478084564209, 0.020196096420288086, 0.02037555122375488, 0.020158464431762696, 0.02035513687133789, 0.020459455490112306, 0.020457023620605468, 0.020291584014892578, 0.02044905662536621, 0.020607295989990233, 0.020414464950561522, 0.020338848114013673, 0.020510143280029296, 0.020599552154541016, 0.020445184707641603, 0.02029363250732422, 0.020238431930541992, 0.02022390365600586, 0.02060438346862793, 0.020640287399291992, 0.020385440826416017, 0.020267360687255858, 0.020099071502685546, 0.02048521614074707, 0.02062019157409668, 0.020312320709228514, 0.02045302391052246, 0.020192607879638672, 0.020062591552734373, 0.020275583267211916, 0.02004991912841797, 0.020846176147460937, 0.02024393653869629, 0.020247488021850585, 0.020282527923583985, 0.02016099166870117, 0.020140415191650392, 0.020120895385742188, 0.020588319778442384, 0.020652416229248047, 0.02015216064453125, 0.020126399993896486, 0.020099071502685546, 0.020414016723632813, 0.020089088439941408, 0.020086336135864257, 0.020177536010742188, 0.020320255279541014, 0.02030182456970215, 0.020199424743652345, 0.020346879959106445, 0.020165792465209963, 0.02033750343322754, 0.020207199096679687, 0.02032067108154297, 0.020219295501708985, 0.02022825622558594, 0.020525503158569335, 0.020254720687866212, 0.020207616806030275, 0.02008678436279297, 0.020115455627441405, 0.020148000717163085, 0.019962015151977538, 0.020093055725097658, 0.020264896392822265, 0.020526144027709962, 0.02036761665344238, 0.020107872009277345, 0.020520832061767576, 0.020217567443847655, 0.020670656204223634, 0.020363584518432617, 0.02020265579223633, 0.020601152420043945, 0.020189855575561525, 0.02016828727722168, 0.02026851272583008, 0.020228000640869142, 0.020165536880493166, 0.020420608520507814, 0.020316160202026368, 0.02022400093078613, 0.02020351982116699, 0.0202608642578125, 0.020324352264404297, 0.020225727081298828, 0.020258207321166993, 0.020112287521362304, 0.020129600524902345, 0.020076736450195313, 0.020057952880859375, 0.020307680130004883, 0.020178720474243163, 0.020081087112426756, 0.02007267189025879, 0.02006630325317383, 0.020291648864746093, 0.020270015716552733, 0.020164960861206054, 0.020084672927856446, 0.019987167358398436, 0.02006447982788086, 0.020113183975219728, 0.02021990394592285, 0.020145503997802735, 0.020286111831665038, 0.020076831817626952, 0.020248287200927733, 0.020182912826538085, 0.020172927856445314, 0.02026713562011719, 0.020234079360961915, 0.02019001579284668, 0.02013507270812988, 0.020095840454101562, 0.02027519989013672, 0.020404224395751954, 0.02021347236633301, 0.02012598419189453, 0.020072160720825197, 0.020044063568115233, 0.02006220817565918, 0.020074495315551756, 0.020266176223754883, 0.02008291244506836, 0.020119935989379882, 0.020111263275146483, 0.020388160705566406, 0.020172800064086914, 0.02006387138366699, 0.020089216232299804, 0.020215232849121093, 0.0200949764251709, 0.02010086441040039, 0.020111583709716798, 0.02004591941833496, 0.020212223052978515, 0.020150272369384766, 0.020116800308227538, 0.020177600860595703, 0.02013545608520508, 0.020197248458862303, 0.020139711380004883, 0.020226112365722658, 0.020063072204589843, 0.020068351745605468, 0.02004991912841797, 0.020099071502685546, 0.02003094482421875, 0.02010998344421387, 0.020062080383300783, 0.02012575912475586, 0.020151615142822266, 0.02007072067260742, 0.02011372756958008, 0.020178112030029297, 0.02041324806213379, 0.020496383666992187, 0.020442367553710938, 0.02026927947998047, 0.02026451110839844, 0.020360160827636718, 0.02029737663269043, 0.020291936874389647, 0.020206975936889648, 0.020287359237670898, 0.020189952850341798, 0.020125696182250977, 0.02016041564941406, 0.020164735794067384, 0.020106592178344727, 0.02013657569885254, 0.020135936737060548, 0.02015145683288574, 0.020189247131347655, 0.02145964813232422, 0.020557151794433594, 0.020238624572753907, 0.020468128204345702, 0.020353023529052734, 0.020131839752197265, 0.02019046401977539, 0.020155136108398437, 0.020155679702758788, 0.020162879943847658, 0.020066015243530272, 0.02018387222290039, 0.020022687911987306, 0.02003955268859863, 0.020009151458740236, 0.02012179183959961, 0.020081888198852538, 0.020204128265380858, 0.020212127685546876, 0.020841503143310548, 0.02094179153442383, 0.020672191619873048, 0.02053356742858887, 0.020341951370239256, 0.020169408798217773, 0.020629568099975584, 0.020369472503662108, 0.020136224746704103, 0.020309728622436525, 0.020264959335327147, 0.020286975860595705, 0.020226560592651367, 0.020237600326538086, 0.02017967987060547, 0.020174400329589844, 0.02009542465209961, 0.02032614326477051, 0.020232448577880858, 0.02002943992614746, 0.02006559944152832, 0.020134143829345703, 0.020112831115722655, 0.02002841567993164, 0.020113311767578124, 0.02011532783508301, 0.020146400451660156, 0.020094911575317384, 0.020130943298339844, 0.02021062469482422, 0.020297439575195312, 0.02020089530944824, 0.020228960037231444, 0.02020547294616699, 0.0202589111328125, 0.020287488937377928, 0.020264671325683593, 0.02032217597961426, 0.020343040466308592, 0.020292800903320314, 0.020138975143432616, 0.02027123260498047, 0.02036467170715332, 0.020186784744262696, 0.020093088150024415, 0.020076959609985352, 0.021663551330566407, 0.020729631423950196, 0.02037507247924805, 0.02034550476074219, 0.020173023223876953, 0.02011071968078613, 0.020136159896850588, 0.020074176788330077, 0.020048608779907228, 0.02008998489379883, 0.020138463973999023, 0.02006671905517578, 0.02002739143371582, 0.020172800064086914, 0.020178943634033202, 0.020281183242797853, 0.020312223434448242, 0.02027724838256836, 0.02026700782775879, 0.02023756790161133, 0.020208383560180666, 0.020412416458129884, 0.020518463134765626, 0.020332992553710936, 0.02020351982116699, 0.020202783584594725, 0.020396415710449218, 0.020629215240478515, 0.02039583969116211, 0.020456256866455077, 0.020520191192626953, 0.020398847579956053, 0.02189516830444336, 0.021045120239257812, 0.021192832946777342, 0.02072800064086914, 0.020289344787597655, 0.020159584045410156, 0.020315135955810547, 0.020084543228149412, 0.020142175674438476, 0.020104448318481447, 0.02013670349121094, 0.020256767272949217, 0.02026412773132324, 0.02021049690246582, 0.02007040023803711, 0.020365407943725586, 0.020072351455688475, 0.020178592681884766, 0.020453535079956054, 0.020179136276245117, 0.020171903610229493, 0.020130367279052735, 0.020160831451416016, 0.020740352630615234, 0.02069273567199707, 0.020630624771118163, 0.0206713924407959, 0.020539104461669924, 0.020254720687866212, 0.020227519989013672, 0.020447551727294924, 0.020270784378051757, 0.020208192825317384, 0.02019708824157715, 0.020093215942382812, 0.020069408416748046, 0.020165599822998048, 0.02020147132873535, 0.02011955261230469, 0.02004604721069336, 0.020053632736206056, 0.02001862335205078, 0.020177631378173827, 0.02021171188354492, 0.020127391815185545, 0.02009123229980469, 0.020167776107788086, 0.02039695930480957, 0.020174848556518556, 0.020144384384155275, 0.020188928604125977, 0.020213567733764648, 0.020182975769042967, 0.020115711212158202, 0.02231500816345215, 0.021888383865356444, 0.02017286491394043, 0.02031827163696289, 0.020119359970092773, 0.02000761604309082, 0.02010851287841797, 0.02010601615905762, 0.02010316848754883, 0.020260351181030273, 0.020464128494262695, 0.02051411247253418, 0.02022879981994629, 0.020133535385131837, 0.020136287689208984, 0.020142080307006836, 0.020146175384521483, 0.020193248748779296, 0.020129919052124023, 0.02015376091003418, 0.020219680786132812, 0.020128416061401366, 0.020221824645996093, 0.020226240158081055, 0.020120864868164064, 0.020331520080566406, 0.02014156723022461, 0.020133344650268555, 0.020318431854248045, 0.020539615631103517, 0.020437280654907228, 0.0208240966796875, 0.020520959854125977, 0.02082611274719238, 0.02038374328613281, 0.02039971160888672, 0.020343135833740235, 0.02015884780883789, 0.020219200134277342, 0.020131711959838868, 0.020112192153930664, 0.020262271881103515, 0.020804224014282228, 0.020303871154785155, 0.020286624908447265, 0.020218719482421876, 0.020238336563110353, 0.020298751831054687, 0.020274175643920898, 0.020162559509277343, 0.02023129653930664, 0.02026380729675293, 0.02019327926635742, 0.02007619285583496, 0.020160863876342774, 0.020131839752197265, 0.02011907196044922, 0.020170272827148436, 0.020071359634399415, 0.02009235191345215, 0.019951583862304688, 0.019995136260986326, 0.020176416397094728, 0.020002975463867187, 0.019990528106689453, 0.02008720016479492, 0.02004547119140625, 0.020197120666503907, 0.020457632064819337, 0.020302528381347655, 0.02017817687988281, 0.020056575775146485, 0.020215808868408205, 0.02021785545349121, 0.020223039627075196, 0.020109535217285156, 0.020251199722290038, 0.0202446403503418, 0.020183040618896485, 0.020120960235595703, 0.020142816543579103, 0.02039504051208496, 0.020392671585083007, 0.020256927490234375, 0.020234176635742188, 0.020149951934814454, 0.020121952056884766, 0.020616863250732424, 0.020461952209472656, 0.020563968658447264, 0.020348031997680663, 0.020419456481933593, 0.02033459281921387, 0.020314111709594726, 0.02024448013305664, 0.020125696182250977, 0.020161951065063476, 0.020261472702026367, 0.02021990394592285, 0.02036639976501465]",tokens/s,49.26824129637119,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1844.375552,2760.835072,0.0,2365.587456,2314.318336,s,1,8.9506689453125,8.9506689453125,0.0,8.9506689453125,8.9506689453125,8.9506689453125,8.9506689453125,[8.9506689453125],,kWh,5.476549709167102e-05,6.033410756598026e-06,1.7303069397997528e-05,7.810197724626658e-05,,MB,1901.416448,3092.185088,0.0,2682.257408,2607.60832,s,10,0.4811466560363769,0.04811466560363769,0.0001687004770132298,0.04807537651062012,0.04819355430603028,0.04839336986541748,0.04855322231292725,"[0.04859318542480469, 0.048093952178955075, 0.048094879150390624, 0.047963359832763675, 0.048149150848388673, 0.047989856719970705, 0.048056800842285155, 0.048127326965332035, 0.04802908706665039, 0.04804905700683594]",tokens/s,5320.623073823155,kWh,1.4723514423158077e-06,1.6237270339468906e-07,9.705856452562412e-07,2.605309790966738e-06,tokens/kWh,98260867.4360401,MB,1901.416448,3092.185088,0.0,2682.257408,2607.61088,s,10,13.3271533203125,1.33271533203125,0.007290476577824268,1.3307625732421875,1.3393217407226563,1.344705975341797,1.3490133630371095,"[1.338125244140625, 1.3302781982421874, 1.3379046630859375, 1.3288638916015625, 1.3312469482421876, 1.333383544921875, 1.3261055908203125, 1.3270718994140625, 1.3240831298828124, 1.3500902099609375]",tokens/s,47.27191057671629,kWh,3.910087619434966e-05,4.312441178818341e-06,1.927501388454316e-05,6.268833125771117e-05,tokens/kWh,1004971.7185963615,,s,630,13.32509829330445,0.021150949671911817,0.0003596681577085467,0.021049039840698242,0.021470634269714357,0.021678290939331053,0.022570490989685058,"[0.021683935165405274, 0.021206464767456055, 0.02124880027770996, 0.021204704284667968, 0.021231903076171874, 0.02125833511352539, 0.02138313674926758, 0.021178367614746094, 0.021525983810424806, 0.021155712127685546, 0.02108624076843262, 0.021266847610473632, 0.021016799926757812, 0.020956672668457032, 0.020959999084472655, 0.021022464752197264, 0.022714368820190428, 0.022293983459472658, 0.021531167984008788, 0.02164249610900879, 0.021764768600463866, 0.02130544090270996, 0.02149580764770508, 0.021295103073120117, 0.021142911911010743, 0.02130803108215332, 0.02105548858642578, 0.02101862335205078, 0.0209562873840332, 0.020994239807128907, 0.020945024490356446, 0.020905792236328127, 0.02089241600036621, 0.020883455276489257, 0.0209322566986084, 0.020883295059204103, 0.02093721580505371, 0.021604352951049805, 0.02146713638305664, 0.021315231323242187, 0.021356800079345702, 0.021243999481201172, 0.02118592071533203, 0.02099468803405762, 0.02139321517944336, 0.020942304611206056, 0.020894432067871095, 0.021184095382690428, 0.020996511459350584, 0.02100614356994629, 0.021072063446044922, 0.021020671844482423, 0.0210098876953125, 0.02096774482727051, 0.021186784744262697, 0.021255647659301758, 0.021690847396850586, 0.021300544738769533, 0.021616512298583985, 0.02122550392150879, 0.02123673629760742, 0.021303136825561522, 0.021175680160522462, 0.021351423263549805, 0.021388288497924804, 0.021223424911499023, 0.021172224044799806, 0.021262048721313476, 0.021409568786621095, 0.021019136428833008, 0.021110048294067384, 0.021056127548217774, 0.02092451286315918, 0.020985343933105468, 0.021078367233276368, 0.02091801643371582, 0.02082601547241211, 0.020976127624511717, 0.020908031463623047, 0.0209256649017334, 0.02082896041870117, 0.02099363136291504, 0.02108457565307617, 0.020977216720581053, 0.020840448379516603, 0.020984224319458008, 0.021071903228759764, 0.021108448028564455, 0.02097952079772949, 0.021221855163574218, 0.021131103515625, 0.02119910430908203, 0.02099600028991699, 0.02105958366394043, 0.021161888122558595, 0.02102012825012207, 0.020988767623901367, 0.021112607955932616, 0.02101862335205078, 0.02091619110107422, 0.02096131134033203, 0.021352224349975586, 0.025118080139160157, 0.021371360778808593, 0.02103036880493164, 0.02111123275756836, 0.021114751815795897, 0.021002847671508788, 0.02104934310913086, 0.02125619125366211, 0.020975616455078124, 0.020948991775512696, 0.021059423446655273, 0.02112335968017578, 0.020950912475585937, 0.021027040481567384, 0.020948768615722656, 0.020968448638916014, 0.020888576507568358, 0.020824064254760744, 0.020909952163696288, 0.02105523109436035, 0.020967008590698243, 0.02085126495361328, 0.020930976867675782, 0.021014368057250977, 0.021209087371826172, 0.021170175552368165, 0.021370880126953123, 0.02274620819091797, 0.021441408157348633, 0.021121055603027343, 0.021016576766967773, 0.021237695693969726, 0.02104751968383789, 0.02087715148925781, 0.020996095657348633, 0.021440511703491212, 0.021059488296508787, 0.020946399688720703, 0.02107980728149414, 0.021041759490966795, 0.020995967864990233, 0.02090025520324707, 0.02108006477355957, 0.02111267280578613, 0.02102697563171387, 0.02101558494567871, 0.022539520263671876, 0.021525472640991212, 0.02142393684387207, 0.021211360931396483, 0.021266847610473632, 0.02128108787536621, 0.021073087692260743, 0.02092051124572754, 0.02096143913269043, 0.020989984512329102, 0.020974016189575194, 0.02087321662902832, 0.02230067253112793, 0.021985279083251954, 0.0210882568359375, 0.02115123176574707, 0.021488224029541016, 0.021500864028930665, 0.021269472122192382, 0.02106777572631836, 0.022112255096435548, 0.0212541446685791, 0.022235136032104492, 0.02112828826904297, 0.020974496841430663, 0.020942848205566408, 0.02084966468811035, 0.021081087112426757, 0.021159936904907226, 0.0208855037689209, 0.021048992156982423, 0.020982112884521485, 0.020999807357788086, 0.020928415298461914, 0.021186304092407227, 0.021109344482421875, 0.021237728118896484, 0.0213209285736084, 0.02124076843261719, 0.021123071670532227, 0.021068000793457033, 0.021809535980224608, 0.022754783630371093, 0.021852640151977538, 0.021133279800415038, 0.021121248245239258, 0.02104662322998047, 0.0211976318359375, 0.021431711196899413, 0.021158079147338867, 0.021104543685913087, 0.021090303421020508, 0.021021087646484374, 0.02124928092956543, 0.0209334716796875, 0.02102272033691406, 0.021276351928710937, 0.021279327392578123, 0.021108448028564455, 0.021145599365234375, 0.021149919509887694, 0.020999488830566407, 0.020941280364990236, 0.021099967956542967, 0.02105606460571289, 0.020965375900268556, 0.020805631637573242, 0.02100223922729492, 0.020930559158325195, 0.020930240631103516, 0.020810047149658204, 0.020987903594970703, 0.021016576766967773, 0.020985855102539062, 0.020908031463623047, 0.021006336212158205, 0.021071840286254882, 0.02105731201171875, 0.02088310432434082, 0.0211212158203125, 0.020952735900878906, 0.020994335174560546, 0.02083683204650879, 0.020922367095947265, 0.020987520217895506, 0.02103334426879883, 0.02103910446166992, 0.02100249671936035, 0.020944639205932616, 0.020940000534057618, 0.021008352279663085, 0.020939584732055663, 0.021089696884155275, 0.02110678482055664, 0.020943359375, 0.021019872665405274, 0.02127952003479004, 0.021228799819946288, 0.02094095993041992, 0.021054048538208008, 0.021009727478027342, 0.021037759780883788, 0.020764671325683593, 0.0211190071105957, 0.021360639572143555, 0.020975616455078124, 0.020975616455078124, 0.021026336669921875, 0.02103548812866211, 0.02083951950073242, 0.02108233642578125, 0.021283519744873046, 0.021049087524414062, 0.020885759353637696, 0.02105548858642578, 0.020998144149780275, 0.020957183837890626, 0.020792415618896484, 0.023914400100708007, 0.02098259162902832, 0.020998016357421875, 0.021020992279052735, 0.02107187271118164, 0.0208855037689209, 0.021028287887573244, 0.02091641616821289, 0.021022815704345704, 0.02091651153564453, 0.02103910446166992, 0.021156959533691407, 0.021140256881713868, 0.021112384796142577, 0.021151872634887697, 0.021078720092773437, 0.02103500747680664, 0.021144800186157227, 0.021092256546020507, 0.020900224685668944, 0.0210762882232666, 0.02106883239746094, 0.02106051254272461, 0.020960384368896485, 0.02107846450805664, 0.021174720764160156, 0.02105936050415039, 0.020888063430786134, 0.02099171257019043, 0.02108415985107422, 0.02188697624206543, 0.022574527740478516, 0.022542911529541014, 0.021349632263183593, 0.02104707145690918, 0.02090902328491211, 0.020998144149780275, 0.020989023208618163, 0.020960159301757812, 0.020807680130004884, 0.020809728622436522, 0.021016576766967773, 0.021040447235107423, 0.020818111419677734, 0.02088150405883789, 0.020994367599487303, 0.021131359100341796, 0.020991167068481444, 0.020939680099487306, 0.02143436813354492, 0.02119081687927246, 0.021217119216918944, 0.021139007568359375, 0.021168575286865235, 0.020936704635620116, 0.02105753517150879, 0.020964704513549804, 0.021026880264282226, 0.02079190444946289, 0.021009759902954103, 0.02097171211242676, 0.02103343963623047, 0.02230271911621094, 0.021005727767944335, 0.021027423858642577, 0.02127667236328125, 0.021020671844482423, 0.02115932846069336, 0.022270559310913086, 0.022675455093383787, 0.02198259162902832, 0.021295040130615235, 0.021258464813232424, 0.021251680374145508, 0.02118124771118164, 0.021495872497558594, 0.02204038429260254, 0.0214704647064209, 0.021316768646240235, 0.021540319442749024, 0.02116640090942383, 0.02102681541442871, 0.020985952377319338, 0.021159839630126954, 0.020981760025024415, 0.020914176940917968, 0.02108006477355957, 0.02107961654663086, 0.02098953628540039, 0.02095939254760742, 0.020949440002441408, 0.021072128295898437, 0.020946943283081054, 0.020975168228149415, 0.020976064682006836, 0.021000192642211913, 0.020903200149536134, 0.020820703506469727, 0.02086499214172363, 0.021019775390625, 0.020962207794189454, 0.02091632080078125, 0.020940704345703123, 0.020975008010864257, 0.02093116760253906, 0.020998016357421875, 0.020899967193603517, 0.02111894416809082, 0.021081920623779296, 0.021043424606323243, 0.020899839401245117, 0.021040576934814453, 0.021463552474975587, 0.021179712295532227, 0.021365440368652344, 0.02119171142578125, 0.02103183937072754, 0.02104252815246582, 0.021159648895263672, 0.021104991912841795, 0.021232288360595705, 0.02104934310913086, 0.021086208343505858, 0.020973567962646485, 0.02104473686218262, 0.02106825637817383, 0.02119398307800293, 0.02102992057800293, 0.021130752563476563, 0.02101683235168457, 0.020985855102539062, 0.02086502456665039, 0.020941951751708984, 0.020902687072753907, 0.020918560028076173, 0.020952896118164064, 0.020899839401245117, 0.02091823959350586, 0.021702688217163087, 0.02095871925354004, 0.02096588706970215, 0.020874528884887694, 0.02094304084777832, 0.020895679473876952, 0.020959840774536134, 0.021014528274536134, 0.02099945640563965, 0.02125270462036133, 0.020947071075439454, 0.0208855037689209, 0.020951040267944337, 0.02108598327636719, 0.021186784744262697, 0.020963327407836914, 0.02090291213989258, 0.02091667175292969, 0.020874048233032228, 0.02094259262084961, 0.021046848297119142, 0.021121471405029298, 0.020953088760375976, 0.02104934310913086, 0.02104662322998047, 0.020925088882446288, 0.020985408782958983, 0.020895200729370116, 0.021035455703735353, 0.021194528579711915, 0.02128086471557617, 0.021559968948364257, 0.02105548858642578, 0.021051008224487303, 0.020961664199829103, 0.020847904205322267, 0.020933055877685548, 0.021200767517089845, 0.021102592468261717, 0.021024032592773436, 0.020973695755004882, 0.02101433563232422, 0.020919071197509766, 0.02093427276611328, 0.020928895950317383, 0.021088512420654296, 0.020919776916503905, 0.020966976165771485, 0.02092076873779297, 0.020875551223754882, 0.02098588752746582, 0.021211360931396483, 0.020856576919555662, 0.020971359252929686, 0.02104867172241211, 0.0209866886138916, 0.020864639282226562, 0.020969247817993163, 0.02100079917907715, 0.020973567962646485, 0.020926464080810548, 0.020969472885131835, 0.02097942352294922, 0.021012767791748047, 0.020981760025024415, 0.020950239181518556, 0.021551904678344728, 0.02137628746032715, 0.021301984786987305, 0.02116329574584961, 0.020929248809814453, 0.020996095657348633, 0.02112719917297363, 0.021382911682128906, 0.021155424118041992, 0.021484159469604493, 0.021294527053833008, 0.021353023529052734, 0.02128486442565918, 0.02123980712890625, 0.02094486427307129, 0.020942527770996092, 0.02100259208679199, 0.021112224578857423, 0.02189516830444336, 0.02100079917907715, 0.021018112182617187, 0.021066240310668945, 0.021000192642211913, 0.020983007431030272, 0.02093484878540039, 0.020987743377685546, 0.021060352325439454, 0.021024768829345702, 0.020938751220703124, 0.020932607650756836, 0.02087731170654297, 0.020888864517211916, 0.021097152709960938, 0.02095088005065918, 0.021437984466552734, 0.021333471298217773, 0.021268768310546873, 0.021111520767211914, 0.021052480697631836, 0.020923328399658204, 0.020934431076049805, 0.020934879302978517, 0.021182464599609374, 0.02102079963684082, 0.021214176177978515, 0.0210482234954834, 0.02145894432067871, 0.0209017276763916, 0.021030399322509767, 0.02097587203979492, 0.020945215225219728, 0.020842592239379884, 0.020908031463623047, 0.02091606330871582, 0.020882911682128906, 0.021318336486816407, 0.020987903594970703, 0.021063135147094725, 0.02122127914428711, 0.020899520874023438, 0.02084550476074219, 0.020959232330322267, 0.02101161575317383, 0.020874080657958986, 0.0209039363861084, 0.02090188789367676, 0.020880544662475586, 0.020898624420166014, 0.02087062454223633, 0.02078982353210449, 0.02091366386413574, 0.020868608474731445, 0.020956159591674805, 0.020944896697998046, 0.02108415985107422, 0.020944000244140625, 0.02110348892211914, 0.02087731170654297, 0.02109644889831543, 0.020872800827026368, 0.021029279708862304, 0.020868928909301757, 0.02077510452270508, 0.020840448379516603, 0.021372928619384765, 0.020832256317138673, 0.020841760635375975, 0.020842752456665038, 0.020924896240234376, 0.021010112762451173, 0.020829919815063477, 0.020877920150756835, 0.020948991775512696, 0.02104729652404785, 0.021415519714355468, 0.021307775497436524, 0.021671392440795897, 0.021772480010986327, 0.021323680877685547, 0.021288415908813477, 0.021453344345092773, 0.02129817581176758, 0.02123673629760742, 0.02155731201171875, 0.021603872299194336, 0.021340576171875, 0.021356512069702148, 0.02140310478210449, 0.021496416091918946, 0.021446624755859376, 0.02147545623779297, 0.02146236801147461, 0.02148796844482422, 0.02154719924926758, 0.021433984756469727, 0.021477760314941405, 0.021921760559082033, 0.022394912719726563, 0.021501184463500977, 0.021463808059692384, 0.021348352432250976, 0.02125619125366211, 0.021331039428710938, 0.021472160339355468, 0.021202943801879884, 0.021177759170532228, 0.021168479919433592, 0.02114995193481445, 0.021137407302856445, 0.021170080184936522, 0.02106787109375, 0.021196800231933592, 0.021366527557373047, 0.02121855926513672, 0.02107084846496582, 0.021282623291015625, 0.021136959075927733, 0.02116057586669922, 0.021227487564086912, 0.02138889694213867, 0.021249759674072267, 0.021232351303100586, 0.021336063385009766, 0.02115932846069336, 0.02119536018371582, 0.021168127059936523, 0.021202943801879884, 0.02256060791015625, 0.022495391845703126, 0.02163705635070801, 0.021458272933959962, 0.02161123275756836, 0.021817344665527344, 0.02160220718383789, 0.02152569580078125, 0.021584064483642577, 0.021575775146484375, 0.021580703735351564, 0.021397344589233397, 0.021231487274169923]",tokens/s,47.27920095843199,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,823.488512,554.631168,0.0,159.383552,143.673856,s,1,7.305361328125,7.305361328125,0.0,7.305361328125,7.305361328125,7.305361328125,7.305361328125,[7.305361328125],,kWh,1.1056477220840102e-05,1.2058617879394831e-06,3.5866695360028955e-06,1.584900854478248e-05,,MB,1305.423872,609.15712,0.0,199.22944,186.684928,s,32,0.19960160017013548,0.006237550005316735,0.00015914221107278168,0.006206959962844849,0.0062986785411834716,0.006448363280296326,0.0068627054214477545,"[0.0063146882057189945, 0.00616428804397583, 0.006129983901977539, 0.006231584072113037, 0.006142784118652344, 0.006127295970916748, 0.006237760066986084, 0.006172063827514648, 0.0061476478576660155, 0.006155168056488037, 0.006201024055480957, 0.006183008193969727, 0.006218016147613525, 0.006250720024108886, 0.0062919678688049315, 0.006138720035552978, 0.006232992172241211, 0.0062548799514770504, 0.006299424171447754, 0.006134943962097168, 0.006259615898132324, 0.006975456237792969, 0.0062202239036560055, 0.006215839862823486, 0.006187359809875489, 0.0062410240173339845, 0.006119743824005127, 0.006167327880859375, 0.006160384178161621, 0.006611743927001953, 0.006207007884979248, 0.006206912040710449]",tokens/s,41041.75514132823,kWh,1.8462630307294917e-07,2.0361089943147197e-08,7.79196280113859e-08,2.829070210274823e-07,tokens/kWh,904890939.327842,MB,1319.186432,613.351424,0.0,203.423744,186.687488,s,32,9.894926513671873,0.30921645355224614,0.0016198589206653123,0.30923019409179686,0.31221292419433594,0.3126740478515625,0.3129811117553711,"[0.31301141357421874, 0.30927874755859375, 0.3124779968261719, 0.30763729858398436, 0.30926971435546874, 0.31235183715820314, 0.30713351440429687, 0.30975051879882814, 0.3097658996582031, 0.3073489990234375, 0.30855889892578126, 0.309537109375, 0.30816668701171873, 0.30962884521484374, 0.30777706909179686, 0.30975933837890623, 0.30842816162109377, 0.3079856872558594, 0.3084017333984375, 0.30803717041015627, 0.3129136657714844, 0.3104873046875, 0.3094598693847656, 0.30852020263671875, 0.31096270751953126, 0.3075557250976563, 0.3083272705078125, 0.3069648742675781, 0.30923321533203124, 0.3092739868164063, 0.3092271728515625, 0.30769387817382815]",tokens/s,203.7407753573997,kWh,9.012871368541888e-06,9.939593985697612e-07,3.2511715769261455e-06,1.3258002344037798e-05,tokens/kWh,4751847.100730939,,s,2016,9.880397473812101,0.00490099081042267,0.0001024788382667862,0.004880608081817627,0.004958703994750976,0.005008639931678772,0.005323640060424802,"[0.004883200168609619, 0.004953279972076416, 0.004961343765258789, 0.00510265588760376, 0.00504527997970581, 0.005024991989135742, 0.005040639877319336, 0.005111743927001953, 0.004993343830108642, 0.004972288131713867, 0.004968575954437256, 0.00499721622467041, 0.004990047931671142, 0.004978367805480957, 0.004932544231414795, 0.004988480091094971, 0.004948416233062744, 0.004912576198577881, 0.004884384155273438, 0.005165152072906494, 0.004973120212554931, 0.004969664096832275, 0.005055295944213867, 0.004997119903564453, 0.004910943984985351, 0.004894911766052246, 0.004877664089202881, 0.0050466561317443846, 0.004924767971038818, 0.004904032230377197, 0.004883296012878418, 0.004971072196960449, 0.004937344074249268, 0.004936480045318604, 0.004922880172729492, 0.0049177598953247074, 0.005053952217102051, 0.0049706239700317385, 0.004906752109527588, 0.004887167930603027, 0.004962495803833008, 0.0049498238563537595, 0.004886879920959473, 0.004884223937988281, 0.004931488037109375, 0.004898111820220947, 0.004861824035644531, 0.004846496105194092, 0.004960159778594971, 0.004958208084106445, 0.005238783836364746, 0.005243040084838867, 0.004990848064422607, 0.0049417920112609865, 0.004863999843597412, 0.004848991870880127, 0.0049424958229064946, 0.0049459199905395506, 0.004896895885467529, 0.004869440078735352, 0.004977215766906738, 0.0049642882347106934, 0.0049194879531860355, 0.004890719890594483, 0.004903808116912842, 0.004904191970825195, 0.004895487785339356, 0.004949567794799804, 0.004917695999145508, 0.0049040641784667965, 0.004940671920776367, 0.004983871936798096, 0.004920000076293945, 0.004914624214172363, 0.004891039848327637, 0.004942240238189698, 0.004978687763214112, 0.004894720077514648, 0.004890175819396972, 0.004880832195281982, 0.005065824031829834, 0.004891551971435547, 0.004884479999542236, 0.004843520164489746, 0.004949632167816162, 0.004906623840332031, 0.0048455362319946285, 0.004846367835998535, 0.004835328102111816, 0.004916639804840088, 0.004878943920135498, 0.004853824138641358, 0.0048949761390686035, 0.004928671836853028, 0.004901408195495605, 0.00486195182800293, 0.004845471858978272, 0.004843616008758545, 0.004915071964263916, 0.004877952098846436, 0.004860415935516358, 0.004866047859191895, 0.004917247772216797, 0.004910079956054687, 0.004870240211486816, 0.0049081602096557615, 0.004859200000762939, 0.004915103912353515, 0.0048707199096679685, 0.004839647769927979, 0.004871967792510986, 0.005203487873077392, 0.0048865280151367185, 0.004857952117919922, 0.004831168174743652, 0.004940224170684814, 0.004941760063171386, 0.004881792068481446, 0.004872928142547608, 0.0049552001953125, 0.0049222722053527835, 0.004892799854278564, 0.004867231845855713, 0.004868031978607178, 0.004895423889160156, 0.0049348797798156735, 0.004910975933074951, 0.004876607894897461, 0.004896448135375976, 0.004853919982910156, 0.004913055896759033, 0.004892288208007812, 0.004886847972869873, 0.0049090561866760255, 0.004867455959320068, 0.004942207813262939, 0.004982175827026367, 0.004860383987426758, 0.004872576236724853, 0.004941504001617431, 0.00490118408203125, 0.004875936031341553, 0.004864672183990479, 0.0048717761039733885, 0.00492080020904541, 0.004888351917266846, 0.004871007919311524, 0.004860127925872803, 0.005211071968078613, 0.004926271915435791, 0.00490499210357666, 0.004885568141937256, 0.0049304962158203125, 0.004935679912567138, 0.004888576030731201, 0.0048676800727844236, 0.0048726401329040525, 0.004927072048187256, 0.004910880088806152, 0.0048707518577575685, 0.004882431983947754, 0.004912191867828369, 0.004903168201446533, 0.004967103958129883, 0.0048858561515808105, 0.004934304237365723, 0.004931583881378174, 0.004881951808929443, 0.004868256092071533, 0.005226560115814209, 0.005945824146270752, 0.006082335948944092, 0.005511168003082275, 0.00488150405883789, 0.004871071815490723, 0.004909311771392822, 0.004931551933288574, 0.004892127990722656, 0.004880671977996826, 0.004857183933258057, 0.004930079936981201, 0.00495798397064209, 0.004874623775482177, 0.004872223854064941, 0.004882336139678955, 0.004916736125946045, 0.0048932480812072755, 0.0048782720565795894, 0.004861760139465332, 0.004765920162200928, 0.0048412480354309085, 0.0049192957878112795, 0.0048558077812194825, 0.004869376182556152, 0.004844287872314453, 0.004917503833770752, 0.004863743782043457, 0.004833439826965332, 0.0048495039939880375, 0.004838880062103272, 0.00491267204284668, 0.004883456230163574, 0.004833280086517334, 0.00506060791015625, 0.004902112007141113, 0.004879136085510254, 0.004874239921569825, 0.004851712226867676, 0.004847519874572754, 0.004908927917480469, 0.004890560150146484, 0.004851488113403321, 0.004839104175567627, 0.004903744220733643, 0.004857855796813965, 0.004911104202270508, 0.004834496021270752, 0.004823872089385986, 0.005014912128448486, 0.00487494421005249, 0.004868031978607178, 0.004837376117706299, 0.004888576030731201, 0.004882080078125, 0.004862592220306396, 0.004840799808502197, 0.004901247978210449, 0.004959743976593017, 0.004881087779998779, 0.004873600006103516, 0.004884928226470947, 0.004888031959533691, 0.0049054079055786135, 0.004860000133514404, 0.004845664024353027, 0.004873695850372314, 0.004938176155090332, 0.004861760139465332, 0.004821248054504394, 0.0048167362213134765, 0.0048288640975952144, 0.004917695999145508, 0.004881663799285889, 0.004850399971008301, 0.004851583957672119, 0.004890175819396972, 0.004860095977783203, 0.0048520960807800296, 0.004837376117706299, 0.004843039989471436, 0.005059135913848877, 0.004891808032989502, 0.004814464092254639, 0.004858240127563477, 0.004861536026000976, 0.00485478401184082, 0.004921504020690918, 0.004882527828216553, 0.004857600212097168, 0.004843520164489746, 0.0061512961387634275, 0.00503219223022461, 0.004885119915008545, 0.004923391819000244, 0.0049268798828125, 0.004880767822265625, 0.004849696159362793, 0.004892864227294922, 0.004947968006134033, 0.004892672061920166, 0.004910975933074951, 0.004860032081604004, 0.004892447948455811, 0.004903295993804932, 0.0048576960563659665, 0.004838687896728516, 0.004842207908630371, 0.004919583797454834, 0.0048657598495483395, 0.004875775814056397, 0.004919807910919189, 0.004837471961975098, 0.004863903999328613, 0.004888576030731201, 0.00482316780090332, 0.004945792198181152, 0.00491315221786499, 0.0048631677627563475, 0.004851967811584473, 0.004837855815887451, 0.004873792171478272, 0.0048932161331176754, 0.004856063842773438, 0.0048427519798278805, 0.00483187198638916, 0.00486406421661377, 0.004929312229156494, 0.004874112129211426, 0.004829343795776367, 0.004843135833740234, 0.004933760166168213, 0.004884736061096191, 0.004864287853240967, 0.00486137580871582, 0.004870431900024414, 0.004917151927947998, 0.004870240211486816, 0.004876287937164306, 0.004875840187072754, 0.005017216205596924, 0.004891456127166748, 0.004901887893676758, 0.004870175838470459, 0.0048752322196960445, 0.004884479999542236, 0.0048148479461669925, 0.005328896045684814, 0.004988704204559326, 0.005273888111114502, 0.005115839958190918, 0.005059936046600342, 0.005655200004577637, 0.005015744209289551, 0.004933055877685547, 0.004913536071777344, 0.004977695941925049, 0.004991968154907226, 0.0049194879531860355, 0.0048781437873840336, 0.005010623931884766, 0.004971327781677246, 0.004911359786987305, 0.004886271953582763, 0.0048717761039733885, 0.004921343803405762, 0.004926208019256592, 0.004969279766082763, 0.004868192195892334, 0.0049712638854980465, 0.004918911933898926, 0.004891007900238037, 0.004857855796813965, 0.004882207870483398, 0.004919616222381592, 0.004894464015960693, 0.004894271850585937, 0.004905568122863769, 0.004884479999542236, 0.004952064037322998, 0.005062655925750732, 0.004935679912567138, 0.005176896095275879, 0.005029888153076172, 0.004959968090057373, 0.004863999843597412, 0.005, 0.00495411205291748, 0.00489462423324585, 0.004916863918304443, 0.004899199962615967, 0.004947968006134033, 0.004895071983337402, 0.004883168220520019, 0.004910016059875488, 0.004876192092895508, 0.004947904109954834, 0.004887904167175293, 0.004862239837646484, 0.0048603200912475586, 0.004923520088195801, 0.004940127849578858, 0.004881887912750244, 0.004870399951934814, 0.004902847766876221, 0.004925439834594727, 0.004854015827178955, 0.004835072040557861, 0.0048364481925964355, 0.004737984180450439, 0.004821951866149902, 0.004923264026641846, 0.0048512320518493655, 0.004825568199157715, 0.004839424133300781, 0.004864128112792969, 0.004878047943115234, 0.004845248222351074, 0.004861440181732178, 0.004836160182952881, 0.004940095901489258, 0.004875360012054443, 0.004846335887908936, 0.004847616195678711, 0.004839360237121582, 0.004929535865783692, 0.004847392082214356, 0.004865503787994384, 0.00483241605758667, 0.004876192092895508, 0.004869472026824951, 0.004845983982086181, 0.004835328102111816, 0.004869503974914551, 0.004919936180114746, 0.004884479999542236, 0.004843520164489746, 0.004881663799285889, 0.004865824222564697, 0.00489299201965332, 0.004852384090423584, 0.004857855796813965, 0.004833280086517334, 0.0048865280151367185, 0.004871935844421387, 0.004829728126525879, 0.004831103801727295, 0.00483135986328125, 0.005004928112030029, 0.0048807039260864256, 0.004950079917907715, 0.004824927806854248, 0.004894432067871094, 0.004841983795166016, 0.004877888202667237, 0.004829311847686767, 0.0048373441696166995, 0.0050013761520385745, 0.004875391960144043, 0.004845280170440674, 0.004909599781036377, 0.004960927963256836, 0.004890431880950927, 0.004851712226867676, 0.004851327896118164, 0.00483676815032959, 0.004903232097625733, 0.00488105583190918, 0.004820991992950439, 0.0048455681800842285, 0.0048717761039733885, 0.004911520004272461, 0.004815519809722901, 0.004883488178253174, 0.004831615924835205, 0.004847904205322265, 0.004843391895294189, 0.004901120185852051, 0.004851903915405274, 0.004851712226867676, 0.005111743927001953, 0.004943935871124267, 0.004988927841186524, 0.004886496067047119, 0.004847392082214356, 0.004857151985168457, 0.00489683198928833, 0.004862847805023193, 0.004873792171478272, 0.0048685441017150876, 0.004941823959350586, 0.0048800320625305175, 0.004900288105010986, 0.004872608184814453, 0.004926080226898193, 0.004963200092315673, 0.004887551784515381, 0.004882080078125, 0.00488640022277832, 0.004976384162902832, 0.00491593599319458, 0.005014688014984131, 0.005024608135223389, 0.004943168163299561, 0.004906847953796387, 0.004892767906188965, 0.004877056121826172, 0.004952352046966552, 0.004918176174163819, 0.004864831924438476, 0.005365471839904785, 0.004956319808959961, 0.004939583778381348, 0.0048657598495483395, 0.004858431816101075, 0.004861824035644531, 0.004935840129852295, 0.004958208084106445, 0.0048715839385986325, 0.0049014720916748045, 0.004972671985626221, 0.004915071964263916, 0.0048496642112731934, 0.004859903812408447, 0.0048510079383850095, 0.004921247959136963, 0.004917503833770752, 0.004951615810394287, 0.004860896110534668, 0.004931007862091065, 0.004903488159179688, 0.004839424133300781, 0.004857855796813965, 0.0048865280151367185, 0.0048798398971557615, 0.004788640022277832, 0.004847263813018799, 0.004858143806457519, 0.004847263813018799, 0.004863903999328613, 0.00493171215057373, 0.004871744155883789, 0.0049543361663818355, 0.004852255821228027, 0.0049316477775573735, 0.004877664089202881, 0.004926208019256592, 0.004853248119354248, 0.004993375778198242, 0.005007359981536865, 0.0049062399864196774, 0.004866335868835449, 0.0048501439094543455, 0.004900864124298096, 0.0048940801620483395, 0.004853983879089355, 0.004951712131500244, 0.004877056121826172, 0.004894720077514648, 0.0049032001495361325, 0.004869311809539795, 0.00490550422668457, 0.00504633617401123, 0.004877888202667237, 0.004853280067443848, 0.004852575778961182, 0.004889984130859375, 0.004987520217895508, 0.004894720077514648, 0.004829440116882324, 0.004862944126129151, 0.004903711795806885, 0.004877823829650879, 0.004870656013488769, 0.004843520164489746, 0.004863999843597412, 0.004981056213378906, 0.004894144058227539, 0.005847296237945557, 0.00494217586517334, 0.004884191989898682, 0.005015488147735596, 0.004859903812408447, 0.0049359359741210935, 0.004887680053710938, 0.004956255912780762, 0.004872735977172852, 0.004878335952758789, 0.004919616222381592, 0.0049064640998840335, 0.004862175941467285, 0.004859903812408447, 0.0049192957878112795, 0.004898848056793213, 0.004835296154022217, 0.004838655948638916, 0.00485811185836792, 0.00492742395401001, 0.004800415992736816, 0.004882751941680908, 0.004905216217041016, 0.0048716158866882325, 0.004848192214965821, 0.004932928085327149, 0.004880799770355225, 0.0048373122215270995, 0.004887936115264893, 0.004909183979034424, 0.004864543914794922, 0.004861504077911377, 0.0049303040504455565, 0.004863999843597412, 0.004929664134979248, 0.004898943901062012, 0.0048477439880371094, 0.0048403840065002444, 0.00488489580154419, 0.0048932480812072755, 0.004839136123657227, 0.004851391792297363, 0.004845888137817383, 0.0049296321868896485, 0.004873824119567871, 0.004837696075439453, 0.0048455681800842285, 0.004841760158538818, 0.0048923840522766115, 0.004866047859191895, 0.0048492798805236815, 0.004920896053314209, 0.004911712169647217, 0.00486569595336914, 0.004840000152587891, 0.004831456184387207, 0.004849440097808838, 0.004918496131896973, 0.004879039764404297, 0.004833375930786133, 0.0048455681800842285, 0.004880383968353271, 0.004886847972869873, 0.0048821120262146, 0.004824639797210694, 0.004837823867797852, 0.0048886399269104, 0.004863647937774658, 0.004833407878875732, 0.004843679904937745, 0.004838560104370117, 0.005009791851043701, 0.004860383987426758, 0.004843743801116943, 0.004847392082214356, 0.004920671939849854, 0.004934304237365723, 0.004850016117095947, 0.004845215797424317, 0.004836832046508789, 0.004911647796630859, 0.004876192092895508, 0.004858272075653076, 0.0047964158058166504, 0.004858208179473877, 0.004851583957672119, 0.0049047360420227054, 0.0048757119178771974, 0.004849599838256836, 0.004850304126739502, 0.004844831943511963, 0.004942560195922851, 0.004896768093109131, 0.004876448154449463, 0.0048678722381591795, 0.004933695793151855, 0.004925280094146728, 0.004866208076477051, 0.004853759765625, 0.004842527866363525, 0.004905951976776123, 0.004859519958496094, 0.005039840221405029, 0.004881343841552734, 0.004910816192626953, 0.004910624027252198, 0.00485430383682251, 0.004868031978607178, 0.004902912139892578, 0.004894720077514648, 0.004856927871704102, 0.004866208076477051, 0.004909311771392822, 0.004901375770568848, 0.00484991979598999, 0.004837279796600342, 0.004864992141723633, 0.004879231929779053, 0.00494543981552124, 0.00487388801574707, 0.004833087921142578, 0.004838272094726563, 0.004923232078552246, 0.004960639953613281, 0.004964255809783936, 0.0048967041969299316, 0.005382527828216552, 0.004910912036895752, 0.004888095855712891, 0.004909567832946778, 0.004910943984985351, 0.004904767990112305, 0.004840928077697754, 0.004884672164916992, 0.004839136123657227, 0.004881216049194336, 0.004868095874786377, 0.004845759868621826, 0.0048455362319946285, 0.004865888118743896, 0.0049246401786804195, 0.004883552074432373, 0.004830912113189697, 0.004839295864105224, 0.004933760166168213, 0.004857632160186767, 0.004804831981658935, 0.004863999843597412, 0.004837183952331543, 0.004855552196502686, 0.0048559679985046384, 0.004896224021911621, 0.00488102388381958, 0.0048334717750549315, 0.004838687896728516, 0.004917984008789063, 0.0048939199447631835, 0.004868607997894287, 0.0048393278121948245, 0.004868415832519532, 0.004931968212127685, 0.0048575358390808105, 0.004818848133087158, 0.004894815921783448, 0.0049049282073974606, 0.004971744060516358, 0.004850111961364746, 0.004860288143157959, 0.004871520042419433, 0.0049522237777709964, 0.004876736164093017, 0.004900928020477295, 0.00486521577835083, 0.0049714879989624025, 0.004908895969390869, 0.004853759765625, 0.0049472317695617675, 0.004848351955413818, 0.004928864002227783, 0.004869984149932861, 0.004850304126739502, 0.0048798398971557615, 0.005143231868743896, 0.0051528000831604, 0.004874239921569825, 0.004866047859191895, 0.004956160068511963, 0.004890624046325683, 0.004853472232818603, 0.004867360115051269, 0.004860288143157959, 0.005061247825622559, 0.004906400203704834, 0.004980607986450196, 0.004880576133728027, 0.004968992233276367, 0.004908576011657715, 0.004904895782470703, 0.0048830718994140624, 0.004941535949707031, 0.004955359935760498, 0.0048670401573181156, 0.004892672061920166, 0.004886144161224365, 0.005253536224365234, 0.004880159854888916, 0.004912864208221435, 0.00491158390045166, 0.004953919887542724, 0.004863999843597412, 0.004882016181945801, 0.004866464138031006, 0.004954368114471436, 0.00488640022277832, 0.004908927917480469, 0.004863679885864258, 0.004850080013275146, 0.004875199794769287, 0.004948639869689941, 0.004905280113220215, 0.00488159990310669, 0.0048661761283874515, 0.004900928020477295, 0.0049690880775451664, 0.004864128112792969, 0.004876160144805909, 0.0048314881324768065, 0.004927199840545654, 0.0048865599632263185, 0.004857503890991211, 0.004882368087768554, 0.00488489580154419, 0.0048865280151367185, 0.004955808162689209, 0.004842144012451172, 0.004839104175567627, 0.004907008171081543, 0.004870016098022461, 0.004831424236297607, 0.004848959922790527, 0.004829823970794678, 0.004917407989501953, 0.004920447826385498, 0.004898528099060059, 0.004834303855895996, 0.004892672061920166, 0.004888576030731201, 0.004888800144195557, 0.0048678722381591795, 0.0048410558700561525, 0.00493609619140625, 0.004890880107879639, 0.004846464157104493, 0.004957056045532227, 0.004919136047363281, 0.00488259220123291, 0.0048496642112731934, 0.004846911907196045, 0.004860608100891113, 0.004931839942932129, 0.004871712207794189, 0.004841695785522461, 0.004847904205322265, 0.004886240005493164, 0.004916607856750488, 0.004842175960540772, 0.004875199794769287, 0.0048585600852966305, 0.004925471782684326, 0.004946208000183106, 0.004877855777740478, 0.004931200027465821, 0.004750239849090576, 0.004900767803192139, 0.004933248043060303, 0.004868127822875976, 0.004897215843200684, 0.00499619197845459, 0.004928512096405029, 0.004887551784515381, 0.004848671913146973, 0.004904831886291504, 0.004836832046508789, 0.0049526081085205075, 0.0048558077812194825, 0.00491315221786499, 0.004868095874786377, 0.0049090561866760255, 0.004888576030731201, 0.0048492798805236815, 0.004854144096374512, 0.0048510398864746096, 0.004917632102966309, 0.004882719993591309, 0.004857855796813965, 0.004857855796813965, 0.004899007797241211, 0.004889855861663819, 0.00485433578491211, 0.004863264083862305, 0.0048544640541076664, 0.004913375854492187, 0.0048858880996704105, 0.005234784126281739, 0.004850016117095947, 0.00491315221786499, 0.004904640197753906, 0.005220928192138672, 0.004859519958496094, 0.004954239845275879, 0.004959360122680664, 0.0048213438987731935, 0.004849535942077637, 0.0048559679985046384, 0.004933119773864746, 0.004856927871704102, 0.004837279796600342, 0.0048282241821289065, 0.0048975682258605956, 0.0048846077919006345, 0.004953855991363525, 0.004864287853240967, 0.004865056037902832, 0.004922239780426025, 0.004953184127807617, 0.004851744174957275, 0.005379360198974609, 0.004961984157562256, 0.004875552177429199, 0.00489734411239624, 0.004896927833557129, 0.004964000225067139, 0.004909408092498779, 0.004892672061920166, 0.004976640224456787, 0.004764639854431153, 0.004867551803588867, 0.004960896015167237, 0.00489247989654541, 0.004849535942077637, 0.0048353919982910155, 0.004857151985168457, 0.004875040054321289, 0.004892416000366211, 0.004841792106628418, 0.004847424030303955, 0.004904511928558349, 0.004862720012664795, 0.004859776020050049, 0.004911104202270508, 0.004875487804412841, 0.004926368236541748, 0.004859007835388183, 0.0048504958152771, 0.004827072143554687, 0.004890624046325683, 0.00485094404220581, 0.00486240005493164, 0.004823200225830078, 0.004953375816345215, 0.004897664070129395, 0.004866367816925049, 0.004832575798034668, 0.004877920150756836, 0.0048726720809936525, 0.0048807039260864256, 0.004939775943756103, 0.004833280086517334, 0.004842495918273926, 0.004893695831298828, 0.004867712020874024, 0.004860415935516358, 0.004896639823913574, 0.004890624046325683, 0.004986879825592041, 0.004860064029693603, 0.004841311931610108, 0.004846784114837646, 0.004903359889984131, 0.004857215881347656, 0.004819968223571777, 0.004903039932250977, 0.004892543792724609, 0.004931583881378174, 0.004899871826171875, 0.004827328205108642, 0.004868959903717041, 0.004986815929412842, 0.00486195182800293, 0.004947968006134033, 0.0048603200912475586, 0.004911871910095214, 0.004930079936981201, 0.004872511863708496, 0.0049006080627441405, 0.0048494720458984375, 0.004915872097015381, 0.00487388801574707, 0.004855487823486328, 0.005958879947662353, 0.004990719795227051, 0.005257215976715088, 0.004878335952758789, 0.004888576030731201, 0.004878592014312744, 0.004943615913391113, 0.004931583881378174, 0.004902463912963867, 0.004860032081604004, 0.004934048175811768, 0.004864096164703369, 0.004861760139465332, 0.004868063926696777, 0.004982816219329834, 0.004952064037322998, 0.0048865280151367185, 0.0048558077812194825, 0.004849599838256836, 0.004927231788635254, 0.004894815921783448, 0.004892864227294922, 0.004866079807281494, 0.004853759765625, 0.004974495887756347, 0.004917056083679199, 0.004867775917053223, 0.004866655826568604, 0.0049357438087463375, 0.004879744052886963, 0.00485433578491211, 0.004857855796813965, 0.004859903812408447, 0.004921343803405762, 0.005033984184265137, 0.0049502401351928715, 0.004873792171478272, 0.004922624111175537, 0.00487283182144165, 0.004828800201416015, 0.0048295679092407225, 0.0048429441452026364, 0.0049155521392822265, 0.0048559679985046384, 0.004830783843994141, 0.004849535942077637, 0.0048867201805114745, 0.00485865592956543, 0.00488640022277832, 0.00483955192565918, 0.004830399990081787, 0.004911871910095214, 0.0048642239570617675, 0.00484335994720459, 0.004831232070922851, 0.004984640121459961, 0.00487443208694458, 0.004859903812408447, 0.0048520641326904295, 0.004849599838256836, 0.004912000179290771, 0.004868959903717041, 0.004833439826965332, 0.004857247829437256, 0.0048685441017150876, 0.004825088024139404, 0.004890624046325683, 0.004898816108703613, 0.004843103885650635, 0.004821407794952393, 0.004828927993774414, 0.0049584641456604005, 0.004863039970397949, 0.004848639965057373, 0.004867072105407715, 0.004924160003662109, 0.0048887357711791994, 0.004886688232421875, 0.004840447902679444, 0.004827712059020996, 0.004900576114654541, 0.004849535942077637, 0.00491593599319458, 0.004839424133300781, 0.004877696037292481, 0.004896512031555176, 0.00489356803894043, 0.004829184055328369, 0.00489625597000122, 0.004882815837860107, 0.004862080097198487, 0.0048438081741333005, 0.004841184139251709, 0.004837376117706299, 0.004911104202270508, 0.004893824100494385, 0.0048475837707519535, 0.004864352226257324, 0.004919871807098388, 0.004889984130859375, 0.0048789758682250975, 0.004851103782653809, 0.0048700799942016605, 0.004954559803009033, 0.0051833920478820805, 0.004998688220977783, 0.004903520107269287, 0.004933824062347412, 0.004966559886932373, 0.0049788479804992675, 0.004851647853851318, 0.004918687820434571, 0.004899136066436768, 0.004849696159362793, 0.00485811185836792, 0.004857600212097168, 0.004946144104003907, 0.004891679763793945, 0.0048830718994140624, 0.004870272159576416, 0.004988704204559326, 0.004925375938415528, 0.0048765759468078615, 0.004846975803375244, 0.004870656013488769, 0.0047511358261108395, 0.0049259839057922365, 0.004946432113647461, 0.004858943939208984, 0.0048514242172241215, 0.004862656116485595, 0.004942368030548096, 0.004863615989685059, 0.004861983776092529, 0.004835328102111816, 0.0049276800155639644, 0.004873856067657471, 0.0048397121429443355, 0.004855167865753174, 0.004842048168182373, 0.004939199924468994, 0.0048808960914611815, 0.004878719806671142, 0.004861599922180175, 0.004921120166778564, 0.004869599819183349, 0.004850431919097901, 0.004835328102111816, 0.004975711822509766, 0.004887104034423828, 0.004880832195281982, 0.004865151882171631, 0.004864607810974121, 0.004903135776519775, 0.004865983963012696, 0.004878719806671142, 0.004869791984558105, 0.0048373441696166995, 0.0049316477775573735, 0.004923359870910644, 0.004851712226867676, 0.0048455681800842285, 0.004880383968353271, 0.004882431983947754, 0.004915200233459473, 0.00486195182800293, 0.004822656154632569, 0.004917439937591553, 0.004862143993377686, 0.004872191905975342, 0.0048447041511535645, 0.0048362560272216795, 0.004953216075897217, 0.0048566398620605465, 0.004963647842407226, 0.004846271991729737, 0.005027743816375732, 0.0048787841796875, 0.0048689918518066404, 0.00484991979598999, 0.004882976055145264, 0.00491315221786499, 0.004882431983947754, 0.0048839359283447265, 0.004860447883605957, 0.004978687763214112, 0.004892096042633056, 0.00485865592956543, 0.004909023761749267, 0.0048475837707519535, 0.0048716158866882325, 0.00492796802520752, 0.004890399932861328, 0.004831679821014404, 0.004849567890167237, 0.004900896072387696, 0.004876287937164306, 0.004872191905975342, 0.004853248119354248, 0.004870399951934814, 0.005066688060760498, 0.004914624214172363, 0.005008255958557129, 0.004863071918487549, 0.005094016075134277, 0.0050301761627197265, 0.004882431983947754, 0.004893887996673584, 0.00505452823638916, 0.004903552055358887, 0.004886655807495117, 0.004850719928741455, 0.004836319923400879, 0.004898272037506104, 0.004863679885864258, 0.00484003210067749, 0.0048559999465942385, 0.004892735958099365, 0.0048518719673156735, 0.004857567787170411, 0.004851840019226074, 0.004824192047119141, 0.00492249584197998, 0.004867712020874024, 0.004822720050811768, 0.0048393278121948245, 0.004975135803222656, 0.004901088237762451, 0.0048537278175354, 0.0049313921928405766, 0.004834303855895996, 0.004914112091064453, 0.004866112232208252, 0.0048269758224487305, 0.00483958387374878, 0.004835328102111816, 0.004951615810394287, 0.004885119915008545, 0.004865407943725586, 0.004847455978393555, 0.004895328044891357, 0.004879807949066162, 0.00486684799194336, 0.004855584144592285, 0.004853856086730957, 0.0049284157752990726, 0.004860928058624267, 0.004843423843383789, 0.004840832233428955, 0.004958943843841553, 0.00487014389038086, 0.004818175792694092, 0.004866399765014648, 0.004850080013275146, 0.004843935966491699, 0.0048364481925964355, 0.004892799854278564, 0.004862527847290039, 0.004829504013061523, 0.004870016098022461, 0.004907872200012207, 0.0048831038475036625, 0.00488643217086792, 0.004865503787994384, 0.004851967811584473, 0.004920000076293945, 0.004898816108703613, 0.004851712226867676, 0.0048865280151367185, 0.005033984184265137, 0.004937727928161621, 0.0048651199340820316, 0.004891136169433594, 0.004854207992553711, 0.004932640075683593, 0.004854015827178955, 0.004876992225646973, 0.004849696159362793, 0.005011519908905029, 0.004878047943115234, 0.004849376201629639, 0.004841951847076416, 0.00487014389038086, 0.004953887939453125, 0.004868319988250732, 0.004859007835388183, 0.004841375827789307, 0.004895679950714112, 0.00486732816696167, 0.004973343849182129, 0.004840735912322998, 0.004847424030303955, 0.004965087890625, 0.004892831802368164, 0.004852960109710694, 0.004835904121398926, 0.004928031921386719, 0.004926208019256592, 0.004842048168182373, 0.004896480083465576, 0.004853631973266602, 0.004913504123687744, 0.004885151863098145, 0.004844480037689209, 0.004834176063537598, 0.005119967937469483, 0.004868127822875976, 0.004845439910888672, 0.004860032081604004, 0.004859776020050049, 0.0049168958663940426, 0.004858143806457519, 0.004850016117095947, 0.00484335994720459, 0.00473635196685791, 0.004838367938995361, 0.00490783977508545, 0.0048546562194824215, 0.004834688186645507, 0.004835040092468262, 0.00482806396484375, 0.00489404821395874, 0.004854432106018066, 0.004859903812408447, 0.004837376117706299, 0.005025792121887207, 0.004882431983947754, 0.0048784961700439455, 0.004872032165527344, 0.004843264102935791, 0.0050032639503479, 0.004960512161254883, 0.004872384071350097, 0.004854752063751221, 0.004933856010437012, 0.004896512031555176, 0.004850560188293457, 0.004857664108276367, 0.004836991786956787, 0.004899168014526367, 0.004851935863494873, 0.004841472148895264, 0.0048698558807373045, 0.004888864040374756, 0.005224448204040527, 0.004878592014312744, 0.004865056037902832, 0.004909183979034424, 0.004923999786376953, 0.004892672061920166, 0.004881696224212646, 0.004891488075256347, 0.00524889612197876, 0.005791744232177734, 0.005873663902282715, 0.005629951953887939, 0.005095680236816406, 0.005721343994140625, 0.00491161584854126, 0.004867231845855713, 0.0048932480812072755, 0.004991263866424561, 0.0049231362342834475, 0.004882847785949707, 0.004898655891418457, 0.00493126392364502, 0.004998688220977783, 0.004901663780212402, 0.0048447041511535645, 0.004888800144195557, 0.00498089599609375, 0.004891456127166748, 0.0049079999923706055, 0.004881087779998779, 0.004966047763824463, 0.004940127849578858, 0.004888576030731201, 0.005240511894226074, 0.0049015040397644045, 0.004937759876251221, 0.00488259220123291, 0.004908703804016114, 0.004878335952758789, 0.004919104099273682, 0.004912735939025879, 0.004858751773834228, 0.004853663921356201, 0.004890624046325683, 0.004914400100708008, 0.004856704235076904, 0.004855711936950683, 0.004932767868041992, 0.004908991813659668, 0.005355679988861084, 0.004873983860015869, 0.00487721586227417, 0.004935359954833985, 0.0048644161224365235, 0.0048496642112731934, 0.005135392189025879, 0.0056737599372863766, 0.004878528118133545, 0.004861792087554931, 0.0048596482276916505, 0.004948383808135987, 0.004907008171081543, 0.0048802242279052735, 0.00488486385345459, 0.0049147200584411625, 0.004910624027252198, 0.004870880126953125, 0.004847360134124756, 0.00486630392074585, 0.0049051837921142575, 0.004890399932861328, 0.004863999843597412, 0.00493174409866333, 0.004883391857147217, 0.005004191875457763, 0.004902912139892578, 0.004847616195678711, 0.004857439994812012, 0.004913504123687744, 0.004862016201019287, 0.004855519771575928, 0.004831552028656006, 0.004909023761749267, 0.00486195182800293, 0.004878335952758789, 0.004876287937164306, 0.004863999843597412, 0.004933631896972656, 0.004933407783508301, 0.004857855796813965, 0.0048949441909790035, 0.004943871974945068, 0.005029888153076172, 0.0048577280044555665, 0.004882336139678955, 0.004868319988250732, 0.004787968158721924, 0.004929823875427246, 0.004912447929382324, 0.004878015995025635, 0.004877280235290527, 0.00484716796875, 0.004923840045928955, 0.0048980159759521484, 0.004866911888122559, 0.004841119766235352, 0.004900320053100586, 0.004885312080383301, 0.004854015827178955, 0.004857120037078858, 0.004860383987426758, 0.005024096012115478, 0.00489846420288086, 0.004843391895294189, 0.004841599941253662, 0.004898079872131348, 0.0048666238784790036, 0.0048514242172241215, 0.0048681597709655765, 0.004851456165313721, 0.00493017578125, 0.00488150405883789, 0.004851744174957275, 0.0048447041511535645, 0.004885983943939209, 0.004921599864959717, 0.005138336181640625, 0.005271488189697265, 0.005001311779022217, 0.004997183799743652, 0.0050455999374389646, 0.005534687995910645, 0.004924863815307617, 0.004897119998931884, 0.004861311912536621, 0.0048576960563659665, 0.004924320220947266, 0.004906816005706787, 0.004869823932647705, 0.0048496642112731934, 0.004909120082855225, 0.00489731216430664, 0.00487497615814209, 0.004844096183776856, 0.004839104175567627, 0.004901408195495605, 0.004863264083862305, 0.00482806396484375, 0.004868063926696777, 0.004835360050201416, 0.004966400146484375, 0.00487824010848999, 0.004843008041381836, 0.004837632179260254, 0.004864352226257324, 0.004877984046936035, 0.004856160163879394, 0.004831232070922851, 0.004921343803405762, 0.004834752082824707, 0.004880864143371582, 0.004927231788635254, 0.004890495777130127, 0.004869311809539795, 0.004849376201629639, 0.004947968006134033, 0.0049049282073974606, 0.0048640317916870115, 0.0048455681800842285, 0.004896128177642822, 0.004912831783294678, 0.004865151882171631, 0.004855072021484375, 0.004858208179473877, 0.004947487831115723, 0.004893184185028076, 0.004849823951721191, 0.0048558077812194825, 0.004865024089813232, 0.004963327884674072, 0.004951456069946289, 0.004907423973083496, 0.004903135776519775, 0.00497046422958374, 0.004931359767913818, 0.004882688045501709, 0.004896063804626465, 0.00495900821685791, 0.004910848140716553, 0.00487340784072876, 0.004883135795593262, 0.0048707199096679685, 0.004943552017211914, 0.004866335868835449, 0.004854944229125977, 0.004874815940856933, 0.004925439834594727, 0.004888576030731201, 0.004866047859191895, 0.004839424133300781, 0.004923391819000244, 0.004943647861480713, 0.004909408092498779, 0.0048715839385986325, 0.00486243200302124, 0.004927487850189209, 0.004915200233459473, 0.004866047859191895, 0.004853759765625, 0.004836959838867188, 0.004917183876037598, 0.004851359844207764, 0.004834239959716797, 0.004878176212310791, 0.00489635181427002, 0.005007808208465576, 0.004873600006103516, 0.004841792106628418, 0.004838719844818116, 0.004899328231811524, 0.004874752044677734, 0.0048876161575317385, 0.004749663829803467, 0.004864128112792969, 0.004868319988250732, 0.0049357438087463375, 0.004865375995635987, 0.004863872051239014, 0.004834080219268799, 0.00481708812713623, 0.0049172801971435545, 0.00485152006149292, 0.004925407886505127, 0.004863999843597412, 0.004910975933074951, 0.004868224143981934, 0.004834688186645507, 0.004841824054718017, 0.004837823867797852, 0.0048802242279052735, 0.004907008171081543, 0.004843520164489746, 0.004839424133300781, 0.004886464118957519, 0.004886591911315918, 0.004863999843597412, 0.004852799892425537, 0.004848576068878173, 0.004906335830688477, 0.004862815856933594, 0.004857120037078858, 0.004860415935516358, 0.004853759765625, 0.004947999954223633, 0.004873824119567871, 0.004929855823516845, 0.004841567993164063, 0.004898208141326904, 0.005237696170806885, 0.005007359981536865, 0.005416416168212891, 0.005245120048522949, 0.005334432125091553, 0.004899424076080322, 0.00506060791015625, 0.005293856143951416, 0.005279967784881592, 0.005105311870574951, 0.004967840194702149, 0.004891583919525147, 0.00487014389038086, 0.004872191905975342, 0.004965472221374512, 0.004920224189758301, 0.004898816108703613, 0.0048635520935058595, 0.004946623802185059, 0.004924543857574463, 0.004887167930603027, 0.004867968082427979, 0.004918943881988525, 0.004938432216644287, 0.004875040054321289, 0.004844543933868409, 0.004871456146240234, 0.004811295986175537, 0.004902751922607422, 0.004927360057830811, 0.004872320175170898, 0.004855231761932373, 0.004833856105804443, 0.004917344093322754, 0.004881728172302246, 0.004884352207183838, 0.004862656116485595, 0.0048537921905517575, 0.004921055793762207, 0.004869664192199707, 0.004834080219268799, 0.004849823951721191, 0.004939008235931397, 0.004901440143585205, 0.0048455681800842285, 0.00483513593673706, 0.004835296154022217, 0.004912384033203125, 0.004887519836425781, 0.004856128215789795, 0.004843391895294189, 0.004849408149719238, 0.0048681597709655765, 0.00484991979598999, 0.0048429441452026364, 0.004937759876251221, 0.004919583797454834, 0.004887807846069336, 0.004852479934692383, 0.0048410239219665524, 0.004856256008148194, 0.004978432178497315, 0.004858272075653076, 0.004846784114837646, 0.0048455362319946285, 0.0049179520606994626, 0.004863999843597412, 0.004854015827178955, 0.0049417920112609865, 0.004895711898803711, 0.004885536193847656, 0.004849760055541992, 0.004929056167602539, 0.0048776321411132815, 0.004891488075256347, 0.004874239921569825, 0.004830304145812988, 0.004830239772796631, 0.004847487926483154, 0.0049309759140014646, 0.004940383911132813, 0.0048455681800842285, 0.004828671932220459, 0.004891136169433594, 0.0048863677978515626, 0.004848896026611328, 0.004830399990081787, 0.004846752166748047, 0.004895455837249756, 0.004924928188323975, 0.0048345279693603515, 0.004856575965881347, 0.0048405442237854, 0.004829440116882324, 0.004909759998321534, 0.004863935947418213, 0.004851456165313721, 0.0048642559051513675, 0.00485811185836792, 0.004933440208435059, 0.0048919677734375, 0.00502239990234375, 0.004892896175384522, 0.0049284157752990726, 0.004883327960968018, 0.0048559679985046384, 0.004854688167572022, 0.00484438419342041, 0.004926623821258545, 0.004876959800720215, 0.004843520164489746, 0.004858143806457519, 0.0048947839736938475, 0.004883552074432373, 0.004878367900848389, 0.004877120018005371, 0.0048534078598022464, 0.004927840232849121, 0.004884479999542236, 0.004853375911712646, 0.004863455772399903, 0.004916128158569336, 0.004890336036682129, 0.004866335868835449, 0.004866335868835449, 0.004865983963012696, 0.0049376959800720216, 0.005026624202728271, 0.004875199794769287, 0.004860000133514404, 0.004935647964477539, 0.004879392147064209, 0.00487721586227417, 0.004853983879089355, 0.004858719825744629, 0.004966400146484375, 0.004883456230163574, 0.004869408130645752, 0.004893119812011719, 0.004944064140319824, 0.004880767822265625, 0.0048657598495483395, 0.00486569595336914, 0.004892831802368164, 0.004972064018249512, 0.004872159957885742, 0.004863840103149414, 0.004859936237335205, 0.004944447994232178, 0.004902495861053467, 0.004853888034820557, 0.0048830718994140624, 0.0048715839385986325, 0.004759647846221924, 0.004935647964477539, 0.004886176109313965, 0.004858431816101075, 0.004849184036254883, 0.004839871883392334, 0.004912896156311035, 0.004861087799072266, 0.0048297600746154785, 0.004831456184387207, 0.004873439788818359, 0.004898784160614014, 0.004862720012664795, 0.004827328205108642, 0.004824480056762695, 0.004933631896972656, 0.004888192176818847, 0.004840352058410644, 0.004847936153411865, 0.004853119850158691, 0.004896512031555176, 0.004862527847290039, 0.004868288040161133, 0.004847424030303955, 0.004857855796813965, 0.004876512050628662, 0.004918975830078125, 0.004829216003417969, 0.004829247951507568, 0.004902783870697021, 0.004874144077301025, 0.004835552215576172, 0.004826879978179931, 0.004831776142120361, 0.004890111923217774, 0.004843776226043701, 0.004851647853851318, 0.004849696159362793, 0.004904960155487061, 0.004858975887298584, 0.004860288143157959, 0.004907711982727051, 0.004841311931610108, 0.004896768093109131, 0.004824895858764649, 0.004994400024414062, 0.004836160182952881, 0.004856031894683838, 0.004896575927734375, 0.004874239921569825, 0.004825088024139404, 0.004833280086517334, 0.00495036792755127, 0.004875936031341553, 0.004822815895080566, 0.004815072059631348, 0.004820991992950439, 0.004903103828430176, 0.004883840084075928, 0.004843967914581299, 0.004853919982910156, 0.004929408073425293, 0.00490067195892334, 0.00484496021270752, 0.004913760185241699, 0.0049030079841613766, 0.004878560066223145, 0.004906688213348389, 0.004873600006103516, 0.004957888126373291, 0.00490723180770874, 0.00488918399810791, 0.004950143814086914, 0.004906144142150879, 0.00488640022277832, 0.004873119831085205, 0.005005375862121582, 0.0049147200584411625, 0.004866528034210205, 0.004866047859191895, 0.004978687763214112, 0.004930560111999512, 0.0048855037689208985, 0.004859744071960449, 0.004888768196105957, 0.00495305585861206, 0.004919936180114746, 0.004891007900238037, 0.004875391960144043, 0.004932479858398437, 0.004918591976165771, 0.004883135795593262, 0.004865600109100342, 0.0048419198989868165, 0.004924736022949219, 0.004878464221954345, 0.004856383800506592, 0.004849760055541992, 0.004863232135772705, 0.004889503955841064, 0.004873631954193115, 0.004938079833984375, 0.00491315221786499, 0.004950079917907715, 0.004908895969390869, 0.0048529281616210935, 0.0048484477996826175, 0.004877471923828125, 0.004870463848114013, 0.004856448173522949, 0.004869631767272949, 0.004827648162841797, 0.004907008171081543, 0.004896768093109131, 0.0048661441802978515, 0.0048536000251770016, 0.004861087799072266, 0.0049407038688659664, 0.004857247829437256, 0.004849696159362793, 0.004862815856933594, 0.004921055793762207, 0.004883872032165527, 0.004878943920135498, 0.005155968189239502, 0.00515123176574707, 0.005212192058563232, 0.005036799907684326, 0.004914463996887207, 0.004877344131469726, 0.00493126392364502, 0.004896224021911621, 0.004862463951110839, 0.004853536128997802, 0.004905216217041016, 0.004896768093109131, 0.004882431983947754, 0.0048681597709655765, 0.0048512001037597655, 0.004915647983551026, 0.00489686393737793, 0.004861599922180175, 0.004869376182556152, 0.004903935909271241, 0.004912255764007568, 0.00487824010848999, 0.004850399971008301, 0.00484991979598999, 0.004920671939849854, 0.00513097620010376, 0.004863808155059815, 0.004855936050415039, 0.004900224208831787, 0.004964384078979492, 0.00487388801574707, 0.0048501439094543455, 0.00487062406539917, 0.004911104202270508, 0.004876287937164306, 0.004847263813018799, 0.004848159790039062, 0.004904160022735596, 0.004920000076293945, 0.004968544006347656, 0.004860735893249511, 0.004828159809112549, 0.004909311771392822, 0.004877696037292481, 0.004864575862884521, 0.004894527912139892, 0.0049192957878112795, 0.004902912139892578, 0.004889887809753418, 0.004860640048980713, 0.004868095874786377, 0.0050332479476928715, 0.004897503852844238, 0.0048784961700439455, 0.0048475518226623535, 0.004914080142974854, 0.004891647815704346, 0.004834943771362304, 0.0048559679985046384, 0.004847263813018799, 0.00505299186706543, 0.004911327838897705, 0.004887360095977783, 0.0048846077919006345, 0.004983424186706543, 0.004765727996826172, 0.004900063991546631, 0.004887296199798584, 0.004846879959106445, 0.004855743885040283, 0.004909952163696289, 0.004861408233642578, 0.004839871883392334, 0.00485584020614624, 0.004831200122833252, 0.0048887357711791994, 0.004872032165527344, 0.004843520164489746, 0.004847839832305908, 0.004890560150146484, 0.004912896156311035, 0.0048763837814331055, 0.004860928058624267, 0.004856895923614502, 0.004930784225463867, 0.004878079891204834, 0.004847712039947509, 0.004842368125915528, 0.004855584144592285, 0.004955840110778808, 0.004847936153411865, 0.004851935863494873, 0.004831232070922851, 0.004916959762573242, 0.004888864040374756, 0.004851391792297363, 0.004837696075439453, 0.0048429441452026364, 0.00492409610748291, 0.004880256175994873, 0.004855616092681885, 0.0048848319053649905, 0.004935520172119141, 0.0049183998107910154, 0.004883711814880371, 0.004882048130035401, 0.004904960155487061, 0.004964352130889893, 0.004904096126556397, 0.004850527763366699, 0.004842879772186279, 0.0048830718994140624, 0.004869599819183349, 0.004872255802154541, 0.004854559898376465, 0.004849120140075684, 0.004914463996887207, 0.00489577579498291, 0.005908639907836914, 0.005014272212982178, 0.004927552223205566, 0.004872992038726807, 0.004884640216827393, 0.004934656143188477, 0.004899871826171875, 0.0051233282089233395, 0.00491977596282959, 0.004884736061096191, 0.004759679794311524, 0.004925439834594727, 0.0049060797691345215, 0.004852640151977539, 0.004853568077087402, 0.004839615821838379, 0.004943039894104004, 0.0049795198440551755, 0.0048551359176635745, 0.004854015827178955, 0.004933311939239502, 0.004908063888549805, 0.00487388801574707, 0.004855264186859131, 0.0048707518577575685, 0.004929056167602539, 0.004893119812011719, 0.004887904167175293, 0.004883264064788818, 0.0049576001167297365, 0.004898431777954101, 0.0048587841987609865, 0.004881408214569092, 0.004854688167572022, 0.004917600154876709, 0.004875936031341553, 0.004852992057800293, 0.004852479934692383, 0.004907008171081543, 0.004874239921569825, 0.004859615802764892, 0.0048540477752685544, 0.004869760036468506, 0.0049565439224243165, 0.004939167976379394, 0.004919680118560791, 0.00488265609741211, 0.004933311939239502, 0.004896543979644776, 0.004852255821228027, 0.004837056159973144, 0.004854080200195313, 0.004927584171295166, 0.004867616176605224, 0.004848000049591065, 0.004839424133300781, 0.004857855796813965, 0.004880640029907226, 0.0048596482276916505, 0.004841087818145752, 0.004848127841949463, 0.004898240089416504, 0.00486240005493164, 0.0048240962028503415, 0.004834271907806396, 0.00483897590637207, 0.004905439853668213, 0.004857823848724365, 0.0048455362319946285, 0.004835360050201416, 0.00486579179763794, 0.004876800060272217, 0.004861695766448975]",tokens/s,204.04037442252584,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1852.481536,2552.102912,0.0,2149.5808,2147.522048,s,1,9.1175341796875,9.1175341796875,0.0,9.1175341796875,9.1175341796875,9.1175341796875,9.1175341796875,[9.1175341796875],,kWh,4.497033175834607e-05,4.9517759981075445e-06,1.3928066698004615e-05,6.385017445445822e-05,,MB,2021.94944,2621.308928,0.0,2204.106752,2190.132736,s,10,0.6501222076416016,0.06501222076416016,0.0002492168138458334,0.06499571228027343,0.06529665832519531,0.06537420120239258,0.06543623550415038,"[0.06545174407958984, 0.064698974609375, 0.06458576202392578, 0.06527942657470703, 0.0651214370727539, 0.06489974212646485, 0.06498336029052734, 0.065202880859375, 0.06489081573486329, 0.06500806427001952]",tokens/s,3937.7212005827573,kWh,1.9536802382221544e-06,2.15455905890454e-07,1.2983917794533173e-06,3.467527923565926e-06,tokens/kWh,73827812.10215475,MB,2024.57088,2621.308928,0.0,2204.106752,2190.135296,s,10,14.465799926757812,1.4465799926757812,0.00940396074750822,1.4462983398437501,1.4545673461914062,1.4613666198730468,1.4668060388183595,"[1.453056396484375, 1.4405614013671875, 1.449715087890625, 1.4474168701171874, 1.4451798095703126, 1.440419921875, 1.44349267578125, 1.4298912353515625, 1.447900634765625, 1.4681658935546875]",tokens/s,43.550996363130295,kWh,4.2382167425111185e-05,4.674394195781826e-06,2.4157434140746196e-05,7.121399576163921e-05,tokens/kWh,884657.5638146703,,s,630,14.463738929748533,0.022958315761505612,0.00035667689412259985,0.022896784782409668,0.023296396255493162,0.02340579662322998,0.024195401458740235,"[0.023746944427490233, 0.023186559677124022, 0.02310438346862793, 0.023181312561035155, 0.02323865509033203, 0.022986751556396484, 0.022978559494018554, 0.023008384704589845, 0.023028608322143554, 0.02291004753112793, 0.023190431594848633, 0.023049407958984375, 0.023380319595336915, 0.02308143997192383, 0.02299273681640625, 0.023103647232055664, 0.023085056304931642, 0.023035903930664063, 0.023004735946655273, 0.02312031936645508, 0.023098623275756836, 0.0229752311706543, 0.023029056549072266, 0.023001375198364257, 0.023105600357055663, 0.02301372718811035, 0.02302694320678711, 0.022993663787841796, 0.02307276725769043, 0.023041919708251955, 0.022990079879760743, 0.022999616622924806, 0.023041664123535158, 0.023007232666015624, 0.022997119903564452, 0.022977088928222655, 0.02333443260192871, 0.023097728729248045, 0.022982751846313477, 0.022921215057373046, 0.022953216552734374, 0.022926080703735353, 0.022951936721801756, 0.022940927505493165, 0.022940351486206056, 0.023129663467407226, 0.02301388740539551, 0.022912416458129883, 0.023219039916992187, 0.023143648147583008, 0.022952480316162108, 0.02304204750061035, 0.023214080810546874, 0.023138175964355467, 0.023066751480102538, 0.023124223709106446, 0.023060224533081056, 0.02305638313293457, 0.02304115104675293, 0.022981216430664062, 0.023037376403808593, 0.022905696868896486, 0.022984415054321288, 0.0234586238861084, 0.023090431213378906, 0.022993663787841796, 0.023037664413452147, 0.022908287048339845, 0.02285660743713379, 0.022851232528686524, 0.02294828796386719, 0.022908832550048826, 0.02293350410461426, 0.022775136947631835, 0.0228768310546875, 0.022888351440429687, 0.022847583770751953, 0.022849184036254883, 0.02290060806274414, 0.022882783889770508, 0.02294931221008301, 0.0228787841796875, 0.02290278434753418, 0.02263654327392578, 0.022830463409423827, 0.022686592102050783, 0.022803295135498048, 0.02287504005432129, 0.02282700729370117, 0.022955167770385743, 0.022788959503173827, 0.02277497673034668, 0.022754112243652345, 0.022685375213623047, 0.02286617660522461, 0.02321004867553711, 0.023621055603027345, 0.022926111221313477, 0.022943519592285157, 0.02279756736755371, 0.022764287948608398, 0.022829055786132812, 0.022877599716186522, 0.022866527557373048, 0.022831104278564454, 0.02294988822937012, 0.022874111175537108, 0.022806528091430665, 0.02279430389404297, 0.022921152114868164, 0.022727903366088868, 0.022718463897705078, 0.022762079238891602, 0.022869951248168947, 0.02267366409301758, 0.022743040084838868, 0.022874111175537108, 0.02268704032897949, 0.02272467231750488, 0.022758304595947267, 0.022695648193359376, 0.022839040756225587, 0.022824575424194336, 0.022686336517333986, 0.02266316795349121, 0.02288572883605957, 0.02305891227722168, 0.022972511291503905, 0.022829248428344728, 0.02287001609802246, 0.02296019172668457, 0.022665151596069334, 0.025217023849487305, 0.02291916847229004, 0.022708223342895507, 0.022993024826049806, 0.022816640853881836, 0.022924415588378905, 0.023169824600219727, 0.02295408058166504, 0.022784000396728517, 0.02272051239013672, 0.022763296127319334, 0.022806079864501953, 0.022609888076782228, 0.022817663192749024, 0.022803903579711914, 0.02275321578979492, 0.022744991302490233, 0.022968864440917967, 0.022863487243652343, 0.022704511642456054, 0.022786304473876952, 0.022771360397338868, 0.02269398307800293, 0.022777856826782225, 0.022834751129150392, 0.023009727478027344, 0.022838752746582033, 0.022785696029663086, 0.02272960090637207, 0.022722431182861328, 0.022857856750488282, 0.02295769691467285, 0.022831487655639648, 0.022803743362426757, 0.022810495376586915, 0.0227357120513916, 0.02273209571838379, 0.022898752212524413, 0.023011264801025392, 0.022870719909667967, 0.023480159759521484, 0.024137056350708008, 0.02343404769897461, 0.023435136795043946, 0.023296127319335936, 0.023111167907714843, 0.023146047592163085, 0.023221183776855468, 0.02318681526184082, 0.024230144500732423, 0.023251232147216798, 0.023210079193115234, 0.023159936904907228, 0.023141088485717772, 0.023128223419189454, 0.023033184051513673, 0.02302739143371582, 0.023741600036621093, 0.02325775909423828, 0.023265119552612304, 0.02311142349243164, 0.023081567764282225, 0.023060319900512695, 0.022982816696166992, 0.023064447402954102, 0.02307699203491211, 0.023035903930664063, 0.0228351993560791, 0.022779008865356446, 0.022925664901733398, 0.02335795211791992, 0.022918848037719725, 0.022985088348388673, 0.023091167449951173, 0.022985952377319336, 0.02294406318664551, 0.0229913272857666, 0.02307072067260742, 0.023160863876342773, 0.023076831817626955, 0.023031808853149413, 0.023061952590942382, 0.023198272705078123, 0.023008928298950196, 0.023337312698364258, 0.023017471313476562, 0.023019519805908203, 0.022955808639526367, 0.022846944808959962, 0.02289740753173828, 0.023146495819091797, 0.023164031982421875, 0.022948032379150392, 0.022766271591186524, 0.023027711868286133, 0.02305023956298828, 0.022951711654663087, 0.022980831146240235, 0.022992895126342772, 0.0229171199798584, 0.022879648208618163, 0.02282147216796875, 0.02284492874145508, 0.022856191635131837, 0.022734848022460938, 0.022839263916015626, 0.022733983993530275, 0.022769792556762695, 0.02292313575744629, 0.02311587142944336, 0.022720863342285156, 0.022761056900024414, 0.022798816680908204, 0.022772096633911134, 0.022784000396728517, 0.022798336029052735, 0.022765567779541016, 0.022699968338012695, 0.022738687515258788, 0.02275119972229004, 0.02327756881713867, 0.023218048095703124, 0.02326950454711914, 0.02354697608947754, 0.02328428840637207, 0.02330863952636719, 0.023021535873413088, 0.023303743362426757, 0.02290940856933594, 0.02289459228515625, 0.023047807693481446, 0.02285331153869629, 0.02282566452026367, 0.0240762882232666, 0.02368921661376953, 0.0231014404296875, 0.023029760360717775, 0.022974464416503908, 0.022806528091430665, 0.02291904067993164, 0.02277516746520996, 0.02279091262817383, 0.022806047439575195, 0.022806976318359377, 0.022725664138793945, 0.022786880493164064, 0.02288863945007324, 0.02274246406555176, 0.022784576416015626, 0.022841184616088868, 0.022822944641113282, 0.022687871932983397, 0.022796287536621093, 0.02282700729370117, 0.02288768005371094, 0.022874879837036132, 0.02280243110656738, 0.02271171188354492, 0.02270047950744629, 0.022755168914794923, 0.02353388786315918, 0.02339993667602539, 0.022874624252319335, 0.02295369529724121, 0.022896928787231444, 0.022831104278564454, 0.022785856246948243, 0.022732032775878906, 0.02275833511352539, 0.022680639266967773, 0.02279929542541504, 0.022850847244262694, 0.022786783218383788, 0.022748544692993165, 0.0228351993560791, 0.022852224349975588, 0.02280243110656738, 0.02280022430419922, 0.02279849624633789, 0.022701728820800782, 0.022774112701416015, 0.02289459228515625, 0.02270207977294922, 0.022943552017211915, 0.02290073585510254, 0.02289664077758789, 0.02278390312194824, 0.022784095764160156, 0.022728511810302734, 0.02273865509033203, 0.022853376388549805, 0.022783775329589844, 0.022821279525756837, 0.0226715202331543, 0.02292118453979492, 0.02276393508911133, 0.02276527976989746, 0.022688032150268555, 0.022797887802124023, 0.022718751907348633, 0.022737056732177734, 0.022703392028808594, 0.022755136489868166, 0.022680192947387694, 0.022933792114257813, 0.022726655960083008, 0.022708223342895507, 0.022999040603637694, 0.022834495544433595, 0.02286867141723633, 0.022844512939453124, 0.02303887939453125, 0.02304819107055664, 0.02285158348083496, 0.022929088592529297, 0.02312953567504883, 0.023024511337280274, 0.022822912216186524, 0.02305824089050293, 0.02286534309387207, 0.022776479721069335, 0.022714239120483398, 0.022996320724487304, 0.022972543716430663, 0.022841344833374022, 0.022833919525146483, 0.02280886459350586, 0.024114912033081054, 0.02321971130371094, 0.022862335205078126, 0.022929407119750975, 0.02289366340637207, 0.022764448165893555, 0.02279612731933594, 0.02298076820373535, 0.022858783721923827, 0.02278704071044922, 0.02275328063964844, 0.02274086380004883, 0.02298099136352539, 0.022789888381958008, 0.02278326416015625, 0.02274924850463867, 0.022671968460083007, 0.022739007949829103, 0.022752511978149415, 0.023201536178588868, 0.02431545639038086, 0.02293600082397461, 0.02268783950805664, 0.022923168182373048, 0.022806079864501953, 0.02270047950744629, 0.022837247848510742, 0.02275702476501465, 0.022919519424438477, 0.022796287536621093, 0.023246112823486327, 0.022860511779785157, 0.023096576690673828, 0.02268560028076172, 0.022709087371826173, 0.022624256134033204, 0.022866111755371094, 0.02281248092651367, 0.022635839462280274, 0.022616960525512694, 0.027565088272094727, 0.025332128524780274, 0.02284351921081543, 0.023312639236450196, 0.02277334403991699, 0.02256118392944336, 0.02269558334350586, 0.02270867156982422, 0.022622112274169923, 0.022568960189819336, 0.022558176040649414, 0.02253673553466797, 0.02271980857849121, 0.022606016159057617, 0.0226431999206543, 0.02269388771057129, 0.022597631454467772, 0.022573055267333983, 0.02268569564819336, 0.022994943618774414, 0.022529151916503905, 0.02256764793395996, 0.02251910400390625, 0.022593952178955077, 0.022779327392578125, 0.02259660720825195, 0.022555776596069336, 0.022712511062622072, 0.023124256134033204, 0.022564512252807617, 0.022686464309692383, 0.022693727493286135, 0.0242192325592041, 0.02264271926879883, 0.02269561576843262, 0.02295894432067871, 0.022742080688476562, 0.02278291130065918, 0.022697696685791014, 0.022608160018920898, 0.022674528121948243, 0.022731039047241212, 0.023072256088256835, 0.02276211166381836, 0.022683647155761717, 0.023136255264282226, 0.023032960891723634, 0.022678112030029295, 0.02264297676086426, 0.02287820816040039, 0.022595584869384764, 0.022724607467651366, 0.022699743270874023, 0.022696224212646485, 0.022755327224731444, 0.022788095474243163, 0.02258310317993164, 0.022632320404052733, 0.02271414375305176, 0.022729248046875, 0.02251910400390625, 0.022747840881347656, 0.02258086395263672, 0.02263488006591797, 0.022681600570678712, 0.022634496688842775, 0.022550527572631835, 0.022701791763305664, 0.02258563232421875, 0.022670623779296874, 0.022678239822387695, 0.022657024383544923, 0.02256399917602539, 0.022698848724365235, 0.022577152252197266, 0.022579200744628908, 0.02274508857727051, 0.022732799530029296, 0.022791584014892577, 0.022776416778564453, 0.022627487182617187, 0.02268191909790039, 0.022724704742431642, 0.022538047790527344, 0.02268332862854004, 0.022694847106933595, 0.022560768127441407, 0.02267136001586914, 0.022611711502075197, 0.02362598419189453, 0.022757375717163086, 0.02264255905151367, 0.0226243839263916, 0.022700031280517577, 0.022537439346313477, 0.02270492744445801, 0.022585344314575196, 0.022591487884521484, 0.022624319076538085, 0.02260780715942383, 0.022605823516845702, 0.022642688751220705, 0.022534143447875975, 0.022684864044189453, 0.022508224487304686, 0.022781951904296875, 0.02264473533630371, 0.02269593620300293, 0.02259049606323242, 0.022645727157592773, 0.022603168487548828, 0.022630239486694338, 0.022696704864501954, 0.02263654327392578, 0.022898687362670898, 0.0227589111328125, 0.02278860855102539, 0.02271027183532715, 0.02314588737487793, 0.02282147216796875, 0.022975872039794922, 0.02293337631225586, 0.02295680046081543, 0.02303385543823242, 0.022914880752563475, 0.02292495918273926, 0.02294428825378418, 0.02305561637878418, 0.023173887252807616, 0.02304819107055664, 0.023729631423950195, 0.023286304473876952, 0.023541120529174803, 0.023321216583251952, 0.023389440536499023, 0.023122688293457032, 0.02294313621520996, 0.02286470413208008, 0.02334422492980957, 0.023073471069335938, 0.023013376235961915, 0.023113216400146484, 0.022993408203125, 0.022992095947265624, 0.022899232864379882, 0.02296156883239746, 0.022963039398193358, 0.023080543518066408, 0.022953407287597656, 0.022877151489257813, 0.02319500732421875, 0.0231364803314209, 0.02291753578186035, 0.022806367874145507, 0.022864032745361328, 0.022968511581420898, 0.022753087997436524, 0.022832128524780275, 0.023155712127685548, 0.023057920455932617, 0.022884223937988283, 0.023003103256225586, 0.02303385543823242, 0.023104000091552734, 0.02296028709411621, 0.023120927810668945, 0.023215072631835938, 0.0231845760345459, 0.023762304306030272, 0.023347360610961914, 0.023153120040893555, 0.023066047668457032, 0.02316080093383789, 0.023120479583740236, 0.023093248367309572, 0.02305023956298828, 0.023063680648803712, 0.02309619140625, 0.023207935333251953, 0.023027328491210936, 0.023099775314331054, 0.02304614448547363, 0.02310758399963379, 0.023234560012817384, 0.023183359146118163, 0.02308710479736328, 0.023186687469482423, 0.02313907241821289, 0.023039392471313477, 0.023619295120239258, 0.024855424880981445, 0.023555328369140625, 0.023298816680908205, 0.023264991760253907, 0.023138336181640625, 0.023326976776123047, 0.023348224639892577, 0.023124736785888673, 0.023188928604125976, 0.02321696090698242, 0.02322425651550293, 0.023382080078125, 0.023387807846069336, 0.023358976364135742, 0.02331113624572754, 0.02329132843017578, 0.023241344451904296, 0.023410688400268553, 0.023300191879272462, 0.023105024337768554, 0.023246816635131836, 0.02341059112548828, 0.023357471466064452, 0.023341567993164062, 0.023248895645141602, 0.023228416442871092, 0.0232521915435791, 0.023345951080322266, 0.023355392456054686, 0.023444608688354494, 0.023315040588378907, 0.023261472702026366, 0.023842687606811522, 0.023304319381713866, 0.023349248886108398, 0.023387359619140624, 0.023351167678833006, 0.023393312454223634, 0.023449472427368164, 0.023384063720703126, 0.023447551727294923]",tokens/s,43.55720212179971,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,4764.01664,6194.855936,0.0,5792.333824,5786.358272,s,1,11.419783203125,11.419783203125,0.0,11.419783203125,11.419783203125,11.419783203125,11.419783203125,[11.419783203125],,kWh,0.0001238705426583389,1.3656637711665576e-05,4.0497532398001135e-05,0.00017802471276800563,,MB,1781.78048,6396.182528,0.0,5978.980352,5957.812736,s,10,2.280546463012695,0.22805464630126954,0.0002070197309188695,0.22811331176757813,0.22825646514892578,0.2282631736755371,0.22826854049682618,"[0.22824195861816407, 0.22759808349609376, 0.22808787536621095, 0.2279071044921875, 0.22825497436523437, 0.2281387481689453, 0.22783139038085937, 0.2281889953613281, 0.22802745056152343, 0.22826988220214844]",tokens/s,1122.537971279978,kWh,6.67907109545483e-06,7.365773196205096e-07,4.439586885000057e-06,1.1855235300075396e-05,tokens/kWh,21593835.425465737,MB,1791.1808,6408.76544,0.0,5991.563264,5958.339584,s,10,22.024242431640626,2.2024242431640624,0.007916565264960134,2.2014190673828127,2.209132421875,2.215537890625,2.220662265625,"[2.19835986328125, 2.200092529296875, 2.19814208984375, 2.20274560546875, 2.194254638671875, 2.203310302734375, 2.20533154296875, 2.192353515625, 2.207708984375, 2.221943359375]",tokens/s,28.60484313843753,kWh,6.359732127079631e-05,7.0136543484505735e-06,4.226208936519991e-05,0.00011287306498444682,tokens/kWh,558149.1032309701,,s,630,22.021756393432597,0.03495516887846447,0.0004331618536349763,0.03486964797973632,0.0352915023803711,0.035641749191284176,0.036710958557128914,"[0.0364969596862793, 0.035154369354248045, 0.03484592056274414, 0.03490387344360352, 0.03474736022949219, 0.034781185150146485, 0.03527427291870117, 0.03478575897216797, 0.034746368408203124, 0.03487948989868164, 0.03505152130126953, 0.034748416900634765, 0.03465830230712891, 0.03511820983886719, 0.03471859359741211, 0.03476054382324219, 0.034731422424316406, 0.03486521530151367, 0.03463033676147461, 0.034920448303222655, 0.034790687561035157, 0.03465903854370117, 0.03483238220214844, 0.034813343048095705, 0.034662078857421875, 0.034716129302978516, 0.03508006286621094, 0.03488211059570313, 0.034756832122802735, 0.03528835296630859, 0.034638336181640625, 0.03471155166625976, 0.03468288040161133, 0.03486105728149414, 0.034772991180419925, 0.03467468643188477, 0.03457024002075195, 0.03453334426879883, 0.034582561492919925, 0.034625537872314455, 0.03454313659667969, 0.034646495819091794, 0.03461497497558594, 0.034726207733154296, 0.03471318435668945, 0.034810272216796875, 0.034697216033935545, 0.034729984283447264, 0.03471548843383789, 0.035066017150878905, 0.037676864624023435, 0.03534377670288086, 0.034915103912353515, 0.03480780792236328, 0.03471721649169922, 0.03472022247314453, 0.03464825439453125, 0.03508614349365234, 0.03594604873657226, 0.03484511947631836, 0.0348037109375, 0.034711360931396484, 0.03470150375366211, 0.035647327423095704, 0.03499212646484375, 0.03460831832885742, 0.03467756652832031, 0.03465011215209961, 0.03460505676269531, 0.0346951675415039, 0.0346253433227539, 0.03464787292480469, 0.03442931365966797, 0.03457843017578125, 0.03459078216552734, 0.034637760162353516, 0.034770942687988284, 0.03473769760131836, 0.034670272827148435, 0.03470355224609375, 0.03585836791992188, 0.03496518325805664, 0.03478217697143555, 0.03590348815917969, 0.034852638244628906, 0.034810047149658206, 0.03475833511352539, 0.03484864044189453, 0.03474019241333008, 0.03466700744628906, 0.03521945571899414, 0.034772991180419925, 0.03482828903198242, 0.0377295036315918, 0.03495555114746094, 0.034678783416748044, 0.034740318298339845, 0.034777503967285156, 0.03479497528076172, 0.034711711883544924, 0.03462153625488281, 0.03456335830688476, 0.03453440093994141, 0.03458047866821289, 0.03454140853881836, 0.03469667053222656, 0.0352611198425293, 0.03582489776611328, 0.03490854263305664, 0.03551619338989258, 0.034814655303955076, 0.03463683319091797, 0.03479545593261719, 0.03478384017944336, 0.035119457244873045, 0.03472339248657227, 0.03492499160766602, 0.03494086456298828, 0.035006526947021485, 0.034877086639404295, 0.03505392074584961, 0.03497369766235352, 0.034850624084472655, 0.03595897674560547, 0.03485923385620117, 0.03480348968505859, 0.0357212142944336, 0.03480575942993164, 0.034871551513671876, 0.034724769592285154, 0.03518960189819336, 0.03565692901611328, 0.03472259140014648, 0.034996318817138675, 0.034676353454589845, 0.03467292785644531, 0.03462051010131836, 0.03470409774780273, 0.03492473602294922, 0.034721790313720705, 0.034500606536865236, 0.03556556701660156, 0.03483987045288086, 0.034818302154541014, 0.03463951873779297, 0.03463270568847656, 0.03455158233642578, 0.034686302185058596, 0.0348023681640625, 0.03573961639404297, 0.0346748161315918, 0.03477913665771484, 0.03467977523803711, 0.03472272109985351, 0.03468288040161133, 0.03513897705078125, 0.034468448638916016, 0.03461939239501953, 0.0343900146484375, 0.03466239929199219, 0.03530956649780274, 0.034539329528808595, 0.03467839813232422, 0.03475513458251953, 0.03465216064453125, 0.03524198532104492, 0.03458662414550781, 0.03540943908691406, 0.03477676773071289, 0.03484137725830078, 0.03525734329223633, 0.03476377487182617, 0.03452105712890625, 0.03474179077148438, 0.03461580657958984, 0.037539840698242184, 0.03494911956787109, 0.03482624053955078, 0.03486054229736328, 0.03483494567871094, 0.03456998443603516, 0.03474198532104492, 0.03466460800170899, 0.034939071655273435, 0.034734272003173826, 0.034823551177978515, 0.0348166389465332, 0.035294815063476564, 0.03497929763793945, 0.03599574279785156, 0.0351923828125, 0.0350134391784668, 0.035008670806884766, 0.03500646209716797, 0.0351448974609375, 0.03489465713500976, 0.0348326416015625, 0.03506764984130859, 0.03483238220214844, 0.03489187240600586, 0.03496291351318359, 0.034869152069091795, 0.03498448181152344, 0.03478732681274414, 0.03498188781738281, 0.03517961502075195, 0.03501724624633789, 0.0349409294128418, 0.03471001434326172, 0.034895744323730465, 0.03487744140625, 0.03523788833618164, 0.03465001678466797, 0.03505980682373047, 0.035014686584472654, 0.03487731170654297, 0.035040958404541016, 0.03491436767578125, 0.03496585464477539, 0.03497513580322266, 0.03497974395751953, 0.034699966430664066, 0.03491839981079101, 0.03483564758300781, 0.03476508712768555, 0.03454188919067383, 0.03476092910766602, 0.03496550369262695, 0.03480166244506836, 0.03498160171508789, 0.034785663604736325, 0.03465615844726563, 0.03464601516723633, 0.034969600677490234, 0.03466239929199219, 0.03453747177124023, 0.03487363052368164, 0.03517804718017578, 0.0351192626953125, 0.035503936767578126, 0.034900161743164064, 0.03487929534912109, 0.03471737670898437, 0.03466086578369141, 0.034869247436523435, 0.03492243194580078, 0.03513286590576172, 0.03515596771240234, 0.034992511749267576, 0.03507353591918945, 0.034854911804199216, 0.036292766571044924, 0.035688449859619144, 0.03485689544677734, 0.03482015991210938, 0.03511705780029297, 0.03475046539306641, 0.03478883361816406, 0.03468956756591797, 0.034557376861572266, 0.03466092681884766, 0.034613056182861326, 0.03480390548706055, 0.03486233520507812, 0.03470822525024414, 0.0346255989074707, 0.03478291320800781, 0.0346827507019043, 0.034886016845703124, 0.034821537017822264, 0.034761249542236326, 0.03469728088378906, 0.03461702346801758, 0.034823646545410154, 0.034886497497558594, 0.034729793548583986, 0.03462339019775391, 0.034431262969970705, 0.03451836776733398, 0.03472563171386719, 0.03469564819335937, 0.0347017936706543, 0.034490081787109376, 0.034738433837890624, 0.034756607055664065, 0.03458591842651367, 0.03466432189941406, 0.03463865661621094, 0.03443609619140625, 0.03460611343383789, 0.035246047973632816, 0.034721790313720705, 0.03470086288452148, 0.034609214782714844, 0.03461772918701172, 0.03480976104736328, 0.03450502395629883, 0.03462531280517578, 0.03471769714355469, 0.03462348937988281, 0.03505904006958008, 0.03482863998413086, 0.03476512145996094, 0.0346317138671875, 0.03472175979614258, 0.03471343994140625, 0.034441375732421876, 0.034770942687988284, 0.035794944763183595, 0.03476019287109375, 0.035883262634277345, 0.038117633819580075, 0.034915359497070315, 0.03495625686645508, 0.03463987350463867, 0.03558348846435547, 0.03515852737426758, 0.03503523254394531, 0.03482614517211914, 0.03472995376586914, 0.034877120971679686, 0.03496099090576172, 0.03487001419067383, 0.03491404724121094, 0.0348408317565918, 0.03511273574829102, 0.03498620986938476, 0.03491609573364258, 0.03497580718994141, 0.03524825668334961, 0.03505340957641601, 0.035005825042724606, 0.03487152099609375, 0.0348985595703125, 0.03504297637939453, 0.03515999984741211, 0.034881950378417965, 0.034915584564208985, 0.03492736053466797, 0.035036670684814454, 0.03499388885498047, 0.03567603302001953, 0.03510879898071289, 0.03510953521728516, 0.03474668884277344, 0.0351391372680664, 0.03492230224609375, 0.034751102447509764, 0.03514572906494141, 0.03514121627807617, 0.035105182647705076, 0.03501670455932617, 0.035044990539550784, 0.035110527038574216, 0.034984703063964846, 0.03484995269775391, 0.03493564987182617, 0.03487696075439453, 0.03466214370727539, 0.03495804977416992, 0.034684383392333984, 0.03451865768432617, 0.0346530876159668, 0.034502273559570314, 0.03452556610107422, 0.03453952026367187, 0.035037185668945314, 0.036206592559814454, 0.03507427215576172, 0.03495030212402344, 0.03543856048583984, 0.0346343994140625, 0.03479504013061523, 0.034631328582763674, 0.03513222503662109, 0.03499008178710938, 0.034705150604248045, 0.034955009460449216, 0.03563622283935547, 0.03511593627929688, 0.03476041412353516, 0.03482243347167969, 0.03474393463134766, 0.03478739166259766, 0.0346640625, 0.034587329864501956, 0.03479347229003906, 0.03483359909057617, 0.03487590408325195, 0.03508780670166016, 0.03480665588378906, 0.0348993911743164, 0.036255615234375, 0.03500425720214844, 0.035091201782226564, 0.034891456604003904, 0.034817760467529296, 0.03484531021118164, 0.03506118392944336, 0.03500483322143555, 0.034877216339111325, 0.03527737426757813, 0.03507392120361328, 0.03509657669067383, 0.035282463073730466, 0.035018878936767577, 0.0348469123840332, 0.034984062194824216, 0.0350222396850586, 0.035074687957763674, 0.035071006774902345, 0.03496585464477539, 0.03466432189941406, 0.034756385803222656, 0.03483337783813477, 0.034723838806152346, 0.03456204986572266, 0.03480963134765625, 0.0346278076171875, 0.03523088073730469, 0.03460768127441406, 0.03539152145385742, 0.03463398361206055, 0.03463782501220703, 0.03468851089477539, 0.03506659317016601, 0.03490908813476563, 0.03509651184082031, 0.03484134292602539, 0.03503638458251953, 0.03503817749023438, 0.034985984802246094, 0.03511859130859375, 0.035425888061523435, 0.03509910583496094, 0.03679836654663086, 0.03562364959716797, 0.03534214401245117, 0.0349117431640625, 0.034869281768798825, 0.034787361145019534, 0.035657215118408206, 0.03480809783935547, 0.03542416000366211, 0.034654529571533206, 0.03462956619262696, 0.034570304870605466, 0.034435073852539064, 0.03467468643188477, 0.03454102325439453, 0.03451955032348633, 0.034566143035888675, 0.03492979049682617, 0.03448521423339844, 0.03481542587280274, 0.034548225402832033, 0.03464169692993164, 0.03451087951660156, 0.03440864181518555, 0.03471155166625976, 0.034799617767333986, 0.03458457565307617, 0.034492416381835936, 0.03471059036254883, 0.03449132919311523, 0.034592769622802735, 0.03474431991577148, 0.034495616912841795, 0.03448291015625, 0.034597023010253906, 0.03444255828857422, 0.034659008026123046, 0.0352911376953125, 0.03469443130493164, 0.034474720001220704, 0.03466636657714844, 0.035026943206787106, 0.03498160171508789, 0.03462390518188477, 0.03454771041870117, 0.03491430282592774, 0.034797534942626954, 0.03460281753540039, 0.03483776092529297, 0.038601696014404295, 0.03500006484985352, 0.034986240386962894, 0.03495116806030273, 0.03486921691894531, 0.035016735076904296, 0.03483609771728516, 0.034965953826904296, 0.0347168960571289, 0.03474505615234375, 0.03499391937255859, 0.034821407318115234, 0.03477603149414062, 0.03458867263793945, 0.03481603240966797, 0.03458575820922852, 0.03463865661621094, 0.034754558563232424, 0.03472284698486328, 0.03465315246582031, 0.035697792053222654, 0.03640838241577148, 0.03518649673461914, 0.03492816162109375, 0.034946910858154295, 0.03508697509765625, 0.034707584381103516, 0.03469232177734375, 0.03482076644897461, 0.03480985641479492, 0.03466035079956055, 0.03463177490234375, 0.034733310699462894, 0.03473215866088867, 0.03468751907348633, 0.03484444808959961, 0.03491443252563477, 0.03496102523803711, 0.03477347183227539, 0.035356670379638674, 0.034904064178466795, 0.03491385650634766, 0.034734527587890626, 0.03495017623901367, 0.034836639404296876, 0.03476768112182617, 0.03496467208862305, 0.034929473876953124, 0.03490108871459961, 0.03510160064697266, 0.03487539291381836, 0.035151870727539065, 0.035168254852294925, 0.0352421760559082, 0.035299198150634765, 0.035317024230957034, 0.03512918472290039, 0.03496428680419922, 0.03507526397705078, 0.03488351821899414, 0.035066753387451174, 0.03498393630981445, 0.03506175994873047, 0.035055713653564455, 0.03486883163452149, 0.03504140853881836, 0.0350079345703125, 0.035541759490966794, 0.0351723518371582, 0.03527679824829102, 0.03512319946289062, 0.03504127883911133, 0.03484441757202148, 0.03513520050048828, 0.035022945404052735, 0.03505324935913086, 0.035111614227294925, 0.03519276809692383, 0.03514563369750977, 0.035086559295654296, 0.03529478454589844, 0.035500480651855466, 0.03513043212890625, 0.03586304092407227, 0.03524582290649414, 0.03519247817993164, 0.03531327819824219, 0.035086944580078126, 0.035141185760498045, 0.035646270751953125, 0.03520716857910156, 0.0351148796081543, 0.03507356643676758, 0.03537776184082031, 0.03525187301635742, 0.035299678802490235, 0.0352026252746582, 0.03513766479492188, 0.03572099304199219, 0.0352031021118164, 0.035202816009521486, 0.03512396621704102, 0.03526067352294922, 0.03512294387817383, 0.03523788833618164, 0.03520476913452148, 0.03524441528320312, 0.03521452713012695, 0.03502364730834961, 0.03535257720947266, 0.035350528717041016, 0.03528499221801758, 0.03528006362915039, 0.03521820831298828, 0.03511094284057617, 0.03512115097045899, 0.03510822296142578, 0.03514432144165039, 0.036063232421875, 0.03521641540527344, 0.03503737640380859, 0.03549580764770508, 0.03518502426147461, 0.03502748870849609, 0.034977790832519534, 0.034971649169921876, 0.03508591842651367, 0.03542057418823242, 0.035149726867675785, 0.03501475143432617, 0.03499350357055664, 0.034980510711669924, 0.035173473358154295, 0.03517327880859375, 0.035380577087402346, 0.03543097686767578, 0.03513504028320313, 0.03523638534545898, 0.03522355270385742, 0.03506380844116211, 0.03493593597412109, 0.03518479919433594, 0.03792473602294922, 0.035377952575683595, 0.035086208343505856, 0.03505171203613281]",tokens/s,28.608072341944542,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3900, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,6409.785344,8461.877248,0.0,8059.355136,8042.68544,s,1,14.382447265625,14.382447265625,0.0,14.382447265625,14.382447265625,14.382447265625,14.382447265625,[14.382447265625],,kWh,0.00021150074595419332,2.3322765141045837e-05,6.409421794201231e-05,0.00029891772903725146,,MB,2237.915136,8577.220608,0.0,8160.018432,8135.406592,s,10,3.0637873229980466,0.3063787322998047,0.00039187778093229296,0.30640817260742187,0.3067885864257813,0.30686678161621095,0.3069293377685547,"[0.30595782470703126, 0.3057707214355469, 0.30597088623046875, 0.30608383178710935, 0.3067022399902344, 0.3069449768066406, 0.3064393310546875, 0.3067712097167969, 0.306769287109375, 0.3063770141601562]",tokens/s,835.567136394745,kWh,9.019368110100435e-06,9.94674219594028e-07,5.977117576303094e-06,1.599115990599756e-05,tokens/kWh,16008844.980906358,MB,2240.753664,8608.677888,0.0,8191.475712,8135.409152,s,10,43.204064453125,4.3204064453125,0.004674697733608379,4.3190725097656255,4.32710322265625,4.327339208984375,4.327527998046875,"[4.3222529296875, 4.3162197265625, 4.31194775390625, 4.3182265625, 4.3183408203125, 4.31820556640625, 4.3275751953125, 4.31980419921875, 4.32444091796875, 4.32705078125]",tokens/s,14.581961395866573,kWh,0.00012554180109156657,1.3847701735797249e-05,7.79561440146957e-05,0.00021734564684205954,tokens/kWh,289860.87789363804,,s,630,43.20147452545167,0.0685737690880185,0.0009696866780491943,0.06851481628417969,0.06936007614135742,0.06981144027709961,0.07203592651367188,"[0.06974018859863282, 0.06857561492919922, 0.06871241760253906, 0.06848054504394531, 0.06833814239501954, 0.06827788543701171, 0.06796320343017578, 0.06894096374511718, 0.06888540649414063, 0.06908108520507812, 0.06871244812011719, 0.06901952362060547, 0.06927372741699218, 0.06923468780517578, 0.06894992065429688, 0.06845654296875, 0.06805913543701173, 0.06857523345947265, 0.06861619567871094, 0.06823321533203125, 0.06760633850097657, 0.06797740936279296, 0.06859776306152343, 0.06817791748046875, 0.06887833404541016, 0.06932685089111328, 0.06855471801757812, 0.06881410980224609, 0.068195068359375, 0.06797516632080078, 0.06744012451171875, 0.06840518188476563, 0.06873308563232422, 0.06820291137695313, 0.06886128234863281, 0.06876016235351562, 0.068720703125, 0.06875667572021485, 0.0680640640258789, 0.0690913314819336, 0.06889228820800782, 0.06858790588378906, 0.06843958282470704, 0.06815586853027343, 0.06833174133300782, 0.07179856109619141, 0.06853427124023438, 0.06877536010742187, 0.06875917053222656, 0.06858345794677734, 0.0682996826171875, 0.06862646484375, 0.06869808197021485, 0.06836019134521484, 0.06981632232666016, 0.06803456115722656, 0.06780921936035156, 0.06817593383789063, 0.06787481689453125, 0.06857027435302734, 0.0686289291381836, 0.06800425720214844, 0.06895616149902344, 0.06901757049560547, 0.06895616149902344, 0.06891929626464843, 0.06859929656982422, 0.06860588836669922, 0.07095878601074218, 0.06818905639648437, 0.06767616271972657, 0.06788438415527344, 0.06859168243408204, 0.06870095825195313, 0.06819107055664063, 0.06860284423828125, 0.06858102416992187, 0.06856719970703125, 0.06915705871582031, 0.068927490234375, 0.06903388977050781, 0.0688575668334961, 0.06858380889892578, 0.06879628753662109, 0.06891273498535157, 0.06865961456298827, 0.06863081359863281, 0.0683918685913086, 0.06815184020996094, 0.06795661163330079, 0.06801769256591797, 0.06812290954589843, 0.06821695709228516, 0.06866575622558593, 0.06884684753417969, 0.06897062683105469, 0.06798140716552735, 0.06849766540527344, 0.06797561645507813, 0.067904541015625, 0.06808576202392579, 0.06798579406738281, 0.06905209350585938, 0.06889155578613282, 0.06881507110595703, 0.06851165008544922, 0.06809353637695313, 0.0675854721069336, 0.06734083557128906, 0.06747283172607423, 0.06762175750732421, 0.0673034210205078, 0.06769049835205078, 0.0677232666015625, 0.06739881896972656, 0.06701715087890625, 0.06841180419921875, 0.068136962890625, 0.07102259063720703, 0.06885785675048828, 0.07206707000732422, 0.06908313751220703, 0.06794179534912109, 0.06870614624023437, 0.06865283203125, 0.06918038177490235, 0.07283715057373047, 0.06878412628173829, 0.06834374237060546, 0.06856710052490235, 0.06837042999267579, 0.06807734680175781, 0.06773894500732422, 0.06870867156982421, 0.06871862030029297, 0.06802694702148437, 0.06904013061523437, 0.06858509063720702, 0.06789158630371094, 0.06781929779052734, 0.06842537689208984, 0.06973471832275391, 0.0684136962890625, 0.06858914947509766, 0.06940509033203125, 0.06861209869384766, 0.06844764709472656, 0.06782832336425781, 0.06859935760498047, 0.0687374725341797, 0.06828009796142578, 0.0680101089477539, 0.06757794952392578, 0.06747545623779297, 0.06701235198974609, 0.06808191680908203, 0.06702281951904297, 0.06801615905761718, 0.06894358062744141, 0.06807785797119141, 0.06808576202392579, 0.0676495361328125, 0.0678929901123047, 0.06820188903808594, 0.0680538558959961, 0.06926131439208984, 0.06889449310302734, 0.06827574157714844, 0.06829763031005859, 0.06889449310302734, 0.06817791748046875, 0.06782752227783204, 0.06887983703613282, 0.06894461059570313, 0.06898278045654296, 0.0686714859008789, 0.06788710021972656, 0.06879385375976563, 0.06884143829345703, 0.06864950561523438, 0.06749081420898438, 0.06742707061767578, 0.06880201721191406, 0.07153129577636719, 0.06806233978271484, 0.06755007934570313, 0.068384765625, 0.0684189453125, 0.06806134033203125, 0.06955007934570312, 0.0687959976196289, 0.06848700714111328, 0.06873760223388672, 0.06799565124511718, 0.06806304168701172, 0.06741404724121093, 0.06866140747070312, 0.06910915374755859, 0.07057234954833984, 0.0680819549560547, 0.06836803436279297, 0.06892521667480468, 0.06917916870117187, 0.06934812927246094, 0.0696336669921875, 0.07085708618164062, 0.06821887969970702, 0.06875033569335938, 0.0688752670288086, 0.06930841827392578, 0.06900531005859376, 0.06871040344238281, 0.06940451049804687, 0.06844966125488282, 0.06830364990234375, 0.06864425659179688, 0.06805289459228515, 0.06933363342285156, 0.06805117034912109, 0.06778173065185547, 0.06722367858886719, 0.06734502410888672, 0.06777651214599609, 0.06778265380859375, 0.06712525177001953, 0.0669525146484375, 0.06706451416015625, 0.06740534210205078, 0.06781369781494141, 0.06887615966796876, 0.06838658905029296, 0.06862079620361328, 0.06913660430908203, 0.06901510620117188, 0.06865122985839844, 0.06856499481201171, 0.06851583862304687, 0.06915631866455078, 0.06903657531738282, 0.068329345703125, 0.067833984375, 0.06784345245361328, 0.06811507415771484, 0.06848716735839844, 0.0688986587524414, 0.06893174743652344, 0.06911113739013672, 0.06920668792724609, 0.06880255889892578, 0.06883657836914063, 0.06848592376708984, 0.06798336029052734, 0.07347420501708984, 0.06904688262939453, 0.06868800354003907, 0.06784614562988281, 0.06803250885009765, 0.06802754974365234, 0.06823407745361328, 0.06741401672363281, 0.06794191741943359, 0.06965090942382812, 0.06936707305908203, 0.07099775695800781, 0.07047634887695313, 0.06884188842773438, 0.06896844482421875, 0.06849126434326172, 0.06847487640380859, 0.06853427124023438, 0.06836217498779297, 0.06827145385742188, 0.06923951721191406, 0.06854041290283203, 0.06847897338867187, 0.06798540496826172, 0.06777037048339844, 0.06893891143798828, 0.06859203338623047, 0.06870819091796875, 0.06962032318115234, 0.06934957122802735, 0.06848880004882812, 0.06898918151855468, 0.0685219497680664, 0.06822502136230468, 0.06850969696044922, 0.06820214080810547, 0.06740720367431641, 0.06721638488769531, 0.06713753509521485, 0.06685081481933594, 0.06684998321533203, 0.06816162872314453, 0.06815401458740235, 0.06822509002685546, 0.06872268676757813, 0.06802227020263672, 0.06822297668457031, 0.06825574493408203, 0.0685137939453125, 0.06826188659667969, 0.06836838531494141, 0.06847277069091796, 0.06915078735351562, 0.06870220947265625, 0.06903833770751953, 0.06852528381347656, 0.06826223754882813, 0.06796896362304687, 0.06895436859130859, 0.06831657409667968, 0.06793619537353515, 0.06839148712158204, 0.06868386840820312, 0.07402086639404297, 0.06886585235595703, 0.06822303771972656, 0.06752230072021484, 0.06767040252685547, 0.0681533432006836, 0.06766796875, 0.06766387176513672, 0.0677701416015625, 0.06841481781005859, 0.06760761260986328, 0.06759404754638672, 0.06745702362060547, 0.07195967864990234, 0.06935846710205078, 0.06943743896484375, 0.06949228668212891, 0.06872108459472656, 0.06825122833251954, 0.06939485168457031, 0.06926131439208984, 0.06927273559570313, 0.06859171295166015, 0.068421630859375, 0.06903884887695312, 0.06909500885009766, 0.06917890930175781, 0.06982240295410157, 0.06819731140136719, 0.06868141174316406, 0.06782598114013672, 0.06841343688964843, 0.06854150390625, 0.06831523132324219, 0.06847574615478516, 0.06803453063964844, 0.06783184051513672, 0.06760243225097656, 0.06748159790039063, 0.06740582275390625, 0.06824896240234375, 0.06810483551025391, 0.06803606414794922, 0.06749443054199218, 0.06727474975585937, 0.06886732482910156, 0.06833843231201171, 0.06733618927001953, 0.06771711730957031, 0.06926131439208984, 0.06875135803222657, 0.0685871353149414, 0.06842406463623046, 0.06854962921142578, 0.0692457275390625, 0.06906902313232421, 0.06908460998535157, 0.06914924621582032, 0.06816563415527344, 0.06807756805419922, 0.06906470489501954, 0.06849740600585938, 0.06788301086425781, 0.06900982666015625, 0.0684783706665039, 0.06836211395263672, 0.06815817260742188, 0.06821651458740234, 0.06742374420166015, 0.0668303985595703, 0.06728313446044921, 0.06690425872802734, 0.06679795074462891, 0.06730547332763671, 0.07846092987060548, 0.06898416137695312, 0.06810387420654297, 0.06852435302734375, 0.06862710571289063, 0.06868172454833985, 0.06840431976318359, 0.06828125, 0.06842515563964843, 0.06766000366210938, 0.06711241912841796, 0.07178943634033202, 0.06930973052978516, 0.06957027435302734, 0.06940684509277344, 0.06979615783691406, 0.06972000122070313, 0.07001561737060547, 0.06980547332763672, 0.06982886505126953, 0.06995382690429687, 0.06910777282714843, 0.06851583862304687, 0.06781520080566407, 0.06812796783447266, 0.06846566772460938, 0.06838886260986328, 0.0673276138305664, 0.06800434875488282, 0.0683636474609375, 0.0682044448852539, 0.0692384033203125, 0.06908147430419923, 0.06890918731689454, 0.06905494689941406, 0.06848102569580078, 0.06815740966796875, 0.06830457305908202, 0.06834210968017577, 0.06857113647460937, 0.06846435546875, 0.06839091491699219, 0.06849919891357421, 0.06881913757324219, 0.06848089599609375, 0.06841715240478516, 0.06842454528808593, 0.06868959808349609, 0.06868000030517578, 0.06804889678955078, 0.06808108520507812, 0.0686022720336914, 0.07050252532958984, 0.07033090972900391, 0.07062322998046874, 0.06961151885986328, 0.07002054595947266, 0.06907347106933594, 0.06872835540771484, 0.06885219573974609, 0.06863053131103515, 0.06930636596679687, 0.06838800048828125, 0.06821884918212891, 0.07270591735839843, 0.06849212646484375, 0.06837411499023438, 0.06813343811035157, 0.06753587341308594, 0.06701526641845704, 0.06749606323242187, 0.06885734558105469, 0.0712649917602539, 0.06891494750976562, 0.06889504241943359, 0.06886399841308594, 0.0684290542602539, 0.06881951904296875, 0.06796080017089844, 0.06823753356933594, 0.06763455963134765, 0.06722166442871094, 0.06740214538574218, 0.06719084930419922, 0.06682393646240234, 0.06712140655517577, 0.06709248352050781, 0.0688005142211914, 0.06869606781005859, 0.06942623901367187, 0.06895021057128906, 0.06921702575683594, 0.06880217742919922, 0.0679466552734375, 0.06782697296142579, 0.06872569274902343, 0.06836428833007813, 0.06812572479248047, 0.0686335678100586, 0.06818125152587891, 0.06780582427978515, 0.06785561370849609, 0.06926220703125, 0.06868172454833985, 0.06905149078369141, 0.06827305603027344, 0.06839437103271484, 0.06886873626708985, 0.06954930877685547, 0.06861081695556641, 0.0683985595703125, 0.06806172943115234, 0.06763286590576172, 0.06728864288330078, 0.06735126495361328, 0.06900531005859376, 0.068504638671875, 0.06833814239501954, 0.06859414672851563, 0.06857750701904297, 0.06884534454345703, 0.06851993560791016, 0.06799359893798829, 0.06750323486328125, 0.06912089538574219, 0.06879641723632812, 0.06815872192382813, 0.06931276702880859, 0.06874364471435547, 0.06868790435791015, 0.06849680328369141, 0.06819286346435546, 0.07369286346435547, 0.06868009948730469, 0.06857308959960938, 0.07090089416503906, 0.06857199859619141, 0.06954307556152343, 0.06834159851074219, 0.06898953247070312, 0.06844204711914062, 0.06856752014160156, 0.06956175994873047, 0.06878678131103516, 0.06829055786132812, 0.06910281372070312, 0.06908182525634765, 0.06891321563720704, 0.07130928039550781, 0.06849874877929688, 0.06805487823486328, 0.0685472640991211, 0.06857337951660156, 0.068136962890625, 0.06787686157226562, 0.06765721893310547, 0.06818867492675781, 0.069328125, 0.06925798034667968, 0.06856294250488282, 0.06852377319335938, 0.06908271789550781, 0.06902413177490234, 0.06836617279052734, 0.06815074920654297, 0.06837757110595703, 0.06916505432128907, 0.06850701141357422, 0.06795123291015626, 0.0677396469116211, 0.06725222778320313, 0.06951526641845703, 0.06751026916503906, 0.06817171478271485, 0.06747756958007813, 0.06771260833740235, 0.06721366119384765, 0.06701881408691407, 0.06941065979003906, 0.0692152328491211, 0.068216064453125, 0.06808243560791015, 0.06799961853027343, 0.06896979522705078, 0.06853826904296875, 0.06787884521484375, 0.06766690826416015, 0.06795468902587891, 0.06911106872558594, 0.06918217468261718, 0.0683985595703125, 0.0689505615234375, 0.06898073577880859, 0.06855872344970704, 0.06924505615234375, 0.06781900787353516, 0.0686735076904297, 0.06858601379394531, 0.06848297882080077, 0.06815676879882812, 0.06801484680175782, 0.07057561492919921, 0.06751078033447265, 0.06737094116210937, 0.06745228576660156, 0.06710924530029297, 0.06829036712646484, 0.06712310028076172, 0.06701731109619141, 0.06875341033935548, 0.06876943969726562, 0.06851763153076172, 0.06840281677246093, 0.06861837005615234, 0.06901641845703126, 0.06947634887695313, 0.06935929870605469, 0.06942137908935547, 0.06892095947265625, 0.06883570861816406, 0.06964019012451172, 0.06885580444335937, 0.0685506591796875, 0.0690851821899414, 0.06916422271728516, 0.06904300689697265, 0.07118643188476563, 0.06943734741210937, 0.06901990509033203, 0.06937181091308593, 0.06919776153564453, 0.06905152130126953, 0.06878912353515625, 0.07030374145507813, 0.06968319702148437, 0.0695640640258789, 0.06860771179199218, 0.06800819396972656, 0.06762534332275391, 0.0678809585571289, 0.06809986877441407]",tokens/s,14.582835584207727,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,7434.821632,8041.463808,0.0,7646.216192,7627.584,s,1,13.1314404296875,13.1314404296875,0.0,13.1314404296875,13.1314404296875,13.1314404296875,13.1314404296875,[13.1314404296875],,kWh,0.00017221434909583307,1.8989107224086042e-05,5.507671072799841e-05,0.0002462801670479175,,MB,1756.71296,8725.13536,0.0,8315.20768,8191.863296,s,10,3.325114532470703,0.33251145324707027,0.00033927029372321884,0.3324805603027344,0.3330387817382812,0.33309107666015625,0.3331329125976562,"[0.33262384033203124, 0.3321598205566406, 0.33248501586914064, 0.3324128112792969, 0.3324761047363281, 0.3325360107421875, 0.33198587036132815, 0.33302716064453125, 0.3322645263671875, 0.3331433715820312]",tokens/s,769.8982922244818,kWh,9.734977347083315e-06,1.0735828873529782e-06,6.439773670333278e-06,1.724833390476957e-05,tokens/kWh,14842013.229417477,MB,1763.782656,9039.70816,0.0,8629.78048,8480.067584,s,10,26.70110595703125,2.6701105957031253,0.0035851400564946515,2.6707076416015623,2.6737568847656252,2.6743437744140626,2.6748132861328124,"[2.663013671875, 2.666642333984375, 2.66614990234375, 2.67047509765625, 2.669856201171875, 2.673041748046875, 2.6724296875, 2.670940185546875, 2.67362646484375, 2.6749306640625]",tokens/s,23.59452829458927,kWh,7.812808360291664e-05,8.617620039500167e-06,5.196985639066686e-05,0.00013871556003308366,tokens/kWh,454166.7855067917,,s,630,26.691571811676045,0.04236757430424766,0.000393671327118452,0.0423721923828125,0.04286623229980469,0.042984235000610355,0.04324060020446777,"[0.041927009582519534, 0.04157680130004883, 0.041426399230957034, 0.0416278076171875, 0.0417143669128418, 0.041723838806152345, 0.04161856079101563, 0.04187599945068359, 0.041927009582519534, 0.04173110580444336, 0.04178969573974609, 0.04169180679321289, 0.04173830413818359, 0.041983039855957034, 0.041898239135742185, 0.041853633880615235, 0.04182342529296875, 0.04184352111816406, 0.04213555145263672, 0.04209664154052734, 0.042259681701660154, 0.04209539031982422, 0.04245913696289062, 0.04239974212646484, 0.04214374542236328, 0.04191641616821289, 0.04220278549194336, 0.041968990325927734, 0.04209151840209961, 0.042178558349609374, 0.042478622436523436, 0.04244102478027344, 0.04234675216674805, 0.04231209564208984, 0.042434558868408204, 0.04232720184326172, 0.042296161651611326, 0.04229683303833008, 0.04227462387084961, 0.04238380813598633, 0.04246268844604492, 0.04268316650390625, 0.04272470474243164, 0.04268304061889648, 0.04268409729003906, 0.04245945739746094, 0.042510337829589843, 0.042387454986572266, 0.04243395233154297, 0.04242639923095703, 0.04258041763305664, 0.04266201782226563, 0.04284995269775391, 0.042826080322265626, 0.042698047637939454, 0.04258272171020508, 0.04248086547851562, 0.04256857681274414, 0.04271503829956055, 0.04291123199462891, 0.0429156494140625, 0.04270560073852539, 0.04282720184326172, 0.04180511856079101, 0.041666366577148437, 0.041623615264892576, 0.04158342361450195, 0.041869312286376956, 0.041850879669189454, 0.041744384765625, 0.041562110900878906, 0.04169254302978516, 0.041775615692138675, 0.04173580932617187, 0.04190188980102539, 0.04201337432861328, 0.04211507034301758, 0.042031265258789065, 0.041944766998291014, 0.04196905517578125, 0.041888160705566405, 0.0421830062866211, 0.04215795135498047, 0.04209382247924805, 0.0421662712097168, 0.042275585174560544, 0.04227459335327149, 0.042252799987792966, 0.04200374221801758, 0.04205001449584961, 0.0421453742980957, 0.04223027038574219, 0.04225830459594727, 0.042229888916015625, 0.04231516647338867, 0.04233814239501953, 0.04233606338500977, 0.042516864776611325, 0.04276076889038086, 0.04254860687255859, 0.04248579025268555, 0.042396255493164066, 0.04244678497314453, 0.04245100784301758, 0.04271104049682617, 0.04272742462158203, 0.04276224136352539, 0.042575870513916016, 0.042842113494873046, 0.04261068725585938, 0.042487648010253905, 0.04245753479003906, 0.04244438552856445, 0.04295395278930664, 0.042678688049316404, 0.0429349136352539, 0.04279225540161133, 0.042709121704101564, 0.0425968017578125, 0.042665985107421874, 0.04289913558959961, 0.04272364807128906, 0.04276220703125, 0.04272335815429688, 0.04295270538330078, 0.04296633529663086, 0.042206401824951174, 0.04178425598144531, 0.041774528503417965, 0.04158303833007813, 0.04165631866455078, 0.04204748916625976, 0.04200243377685547, 0.041678848266601565, 0.04156415939331055, 0.041772289276123045, 0.041857791900634767, 0.04194918441772461, 0.041744384765625, 0.04239155197143555, 0.042246143341064454, 0.04211916732788086, 0.04206143951416016, 0.04201692962646485, 0.04198806381225586, 0.04207632064819336, 0.04202681732177734, 0.04201910400390625, 0.04217446517944336, 0.04223311996459961, 0.04225059127807617, 0.042113407135009766, 0.042057727813720705, 0.04208838272094727, 0.042196990966796875, 0.04205491256713867, 0.041995006561279295, 0.04205779266357422, 0.04202310562133789, 0.04218828964233398, 0.04222140884399414, 0.04227734375, 0.042681407928466794, 0.04279391860961914, 0.042597824096679685, 0.042528350830078124, 0.04249264144897461, 0.04236854553222656, 0.042638240814208986, 0.042686046600341795, 0.04260067367553711, 0.04252467346191406, 0.043038719177246096, 0.04281756973266602, 0.04265580749511719, 0.04258332824707031, 0.042621566772460935, 0.04274585723876953, 0.04293017578125, 0.042788864135742184, 0.04258377456665039, 0.04250377655029297, 0.04268233489990234, 0.04274454498291016, 0.0427803840637207, 0.04259161758422852, 0.04290876770019531, 0.042936126708984376, 0.04284592056274414, 0.04192160034179687, 0.04171846389770508, 0.041963905334472654, 0.041729759216308594, 0.04205583953857422, 0.04188275146484375, 0.04182720184326172, 0.041839839935302735, 0.04180252838134765, 0.04187136077880859, 0.04187136077880859, 0.041799678802490234, 0.042162174224853514, 0.04217036819458008, 0.04211916732788086, 0.04203519821166992, 0.04244275283813476, 0.04232396697998047, 0.042262527465820314, 0.042074111938476565, 0.04208003234863281, 0.04195145416259766, 0.042280960083007815, 0.04228300857543945, 0.04214169692993164, 0.04222540664672852, 0.04210918426513672, 0.042315006256103516, 0.04231654357910156, 0.04226588821411133, 0.04215267181396484, 0.04217446517944336, 0.042288192749023436, 0.042453567504882814, 0.04263734436035156, 0.0426295051574707, 0.042618846893310545, 0.04255539321899414, 0.042487777709960935, 0.042624095916748046, 0.04264441680908203, 0.0425346565246582, 0.04261814498901367, 0.042820575714111325, 0.042856449127197264, 0.04266368103027344, 0.04261065673828125, 0.0427542724609375, 0.042507392883300785, 0.04242345428466797, 0.042788192749023436, 0.04283843231201172, 0.04267625427246094, 0.04257398223876953, 0.042676063537597654, 0.04284396743774414, 0.042877120971679686, 0.04275404739379883, 0.04267929458618164, 0.04285702514648437, 0.04296748733520508, 0.04285161590576172, 0.043114559173583984, 0.04197580718994141, 0.041880672454833984, 0.041864097595214846, 0.041987808227539065, 0.04195923233032227, 0.04186710357666015, 0.041667198181152346, 0.041678367614746095, 0.04180748748779297, 0.04172208023071289, 0.04168374252319336, 0.04180774307250976, 0.042082271575927734, 0.04252057647705078, 0.04234204864501953, 0.042207584381103516, 0.0420285758972168, 0.042005985260009766, 0.042017345428466794, 0.04199468612670899, 0.04205344009399414, 0.04207360076904297, 0.04204105758666992, 0.04202364730834961, 0.04209910583496094, 0.042159969329833985, 0.04230963134765625, 0.042364158630371095, 0.04226284790039062, 0.0423328971862793, 0.042569377899169925, 0.04230368041992188, 0.04230131149291992, 0.04230758285522461, 0.04231782531738281, 0.0425241584777832, 0.042608448028564457, 0.04263324737548828, 0.04249196624755859, 0.042472000122070315, 0.04258544158935547, 0.04244140625, 0.042401790618896484, 0.042674175262451174, 0.042611839294433594, 0.042494911193847656, 0.04250137710571289, 0.04238528060913086, 0.042775360107421875, 0.042616832733154295, 0.042536609649658205, 0.04278665542602539, 0.04324169540405273, 0.042938495635986326, 0.04284630584716797, 0.042848033905029295, 0.04286288070678711, 0.04271104049682617, 0.04332748794555664, 0.04276633453369141, 0.04303436660766601, 0.043036415100097654, 0.04294844818115234, 0.041867008209228514, 0.04173574447631836, 0.041955486297607425, 0.04199603271484375, 0.04190902328491211, 0.04210883331298828, 0.04204553604125977, 0.041998336791992184, 0.042057727813720705, 0.04193894577026367, 0.041875583648681644, 0.04183395385742188, 0.04192524719238281, 0.04215727996826172, 0.042248767852783205, 0.042229759216308595, 0.04220723342895508, 0.04214726257324219, 0.042011070251464847, 0.04241420745849609, 0.04222908782958985, 0.042062496185302736, 0.04187347030639649, 0.041766849517822266, 0.0420843505859375, 0.04235059356689453, 0.042278911590576174, 0.04214988708496094, 0.0424898567199707, 0.04251004791259766, 0.04244438552856445, 0.04237107086181641, 0.04236470413208008, 0.042296127319335936, 0.04255267333984375, 0.042574592590332035, 0.042509761810302735, 0.042500606536865236, 0.04255478286743164, 0.04237955093383789, 0.042508544921875, 0.04239369583129883, 0.04259638214111328, 0.04272851181030273, 0.04280825424194336, 0.04259209442138672, 0.04305459213256836, 0.04279119873046875, 0.04281078338623047, 0.04254719924926758, 0.042554622650146486, 0.042774177551269534, 0.04287897491455078, 0.042831745147705075, 0.042870944976806644, 0.042729503631591795, 0.04327328109741211, 0.04318636703491211, 0.04281779098510742, 0.04299020767211914, 0.0431328010559082, 0.04307712173461914, 0.043028640747070315, 0.042335422515869144, 0.042154239654541015, 0.04211974334716797, 0.042049537658691405, 0.04212678527832031, 0.04160528182983399, 0.04165795135498047, 0.04182041549682617, 0.041779777526855466, 0.04172934341430664, 0.04185494232177735, 0.04206460952758789, 0.04210812759399414, 0.04244969558715821, 0.04226047897338867, 0.04216169738769531, 0.042016288757324216, 0.041927009582519534, 0.04183849716186523, 0.041984703063964846, 0.04212268829345703, 0.04195180892944336, 0.04214988708496094, 0.042240001678466796, 0.04215145492553711, 0.042590335845947264, 0.04241852951049805, 0.04220633697509766, 0.04223241424560547, 0.042152225494384764, 0.04214374542236328, 0.04222127914428711, 0.04233833694458008, 0.042467582702636716, 0.04269667053222656, 0.042729633331298825, 0.04262899017333984, 0.0430489616394043, 0.04265155029296875, 0.04245721435546875, 0.04231164932250977, 0.04263731384277344, 0.042659839630126956, 0.04257724761962891, 0.04266870498657226, 0.04298342514038086, 0.04278636932373047, 0.04257583999633789, 0.042498687744140624, 0.042675262451171876, 0.04284284973144531, 0.04288108825683594, 0.042644992828369144, 0.04269635009765625, 0.04328944015502929, 0.042947742462158205, 0.04269667053222656, 0.042853248596191405, 0.04299980926513672, 0.04298489761352539, 0.04282799911499023, 0.043294910430908204, 0.04306972885131836, 0.04217116928100586, 0.041768959045410156, 0.04166156768798828, 0.04189273452758789, 0.041783294677734374, 0.04159196853637695, 0.04169539260864258, 0.04176761627197265, 0.04173619079589844, 0.041760768890380856, 0.04199423980712891, 0.04200003051757813, 0.042133377075195315, 0.04237564849853516, 0.04209814453125, 0.04211334228515625, 0.0419730224609375, 0.04205049514770508, 0.04205545425415039, 0.042074337005615234, 0.041875232696533204, 0.04217164611816406, 0.042850753784179685, 0.042227745056152344, 0.042342910766601564, 0.042417247772216796, 0.04224911880493164, 0.04230348968505859, 0.04217446517944336, 0.0420906867980957, 0.04194300842285156, 0.04215792083740234, 0.04257318496704102, 0.04254294586181641, 0.04240806579589844, 0.04275820922851563, 0.042740318298339845, 0.04277657699584961, 0.042602497100830077, 0.04245280075073242, 0.04235225677490234, 0.042637630462646486, 0.04275580978393555, 0.04259280014038086, 0.04287078475952148, 0.04274176025390625, 0.0426761589050293, 0.04259436798095703, 0.04246252822875977, 0.04282371139526367, 0.042823806762695316, 0.042777118682861326, 0.042874496459960935, 0.04279715347290039, 0.04269903945922852, 0.04248112106323242, 0.04250672149658203, 0.042549312591552736, 0.04286572647094727, 0.043119552612304685, 0.04332284927368164, 0.043237918853759764, 0.04299980926513672, 0.04216915130615234, 0.04190537643432617, 0.041861919403076174, 0.04169321441650391, 0.042076126098632816, 0.042071361541748044, 0.04181472015380859, 0.04171798324584961, 0.041667903900146484, 0.04170595169067383, 0.041813087463378903, 0.042107807159423825, 0.042426368713378904, 0.04256697463989258, 0.04231647872924805, 0.042156032562255856, 0.04213324737548828, 0.04211328125, 0.04209664154052734, 0.04247065734863281, 0.04213772964477539, 0.04197849655151367, 0.04215750503540039, 0.04227129745483398, 0.04228915023803711, 0.042291393280029295, 0.04321859359741211, 0.04245110321044922, 0.04231167984008789, 0.04234035110473633, 0.04213759994506836, 0.042147838592529296, 0.04227686309814453, 0.042469375610351565, 0.042503646850585934, 0.04244089508056641, 0.04292643356323242, 0.043276287078857424, 0.042648990631103514, 0.0424884147644043, 0.04230144119262695, 0.04263033676147461, 0.04266067123413086, 0.04263724899291992, 0.04262508773803711, 0.042510337829589843, 0.04254848098754883, 0.04268518447875977, 0.04275404739379883, 0.04284620666503906, 0.04272537612915039, 0.04282108688354492, 0.04296707153320312, 0.04277936172485351, 0.042624801635742185, 0.04258998489379883, 0.0425588493347168, 0.04265865707397461, 0.04302438354492188, 0.043235328674316405, 0.04308377456665039, 0.04298342514038086, 0.0429189453125, 0.0421847038269043, 0.04186521530151367, 0.0417628173828125, 0.04191231918334961, 0.041975902557373046, 0.041996192932128903, 0.04210073471069336, 0.041744384765625, 0.04187059020996094, 0.04187827301025391, 0.04204544067382812, 0.04222956848144531, 0.042348735809326174, 0.042284641265869144, 0.04222995376586914, 0.04205590438842773, 0.041992416381835936, 0.04210979080200195, 0.042091457366943356, 0.04237331390380859, 0.04260787200927734, 0.04246291351318359, 0.04239011383056641, 0.04249446487426758, 0.04237443161010742, 0.042430912017822266, 0.04236038589477539, 0.04219084930419922, 0.042076576232910154, 0.042051681518554686, 0.04219062423706055, 0.042402015686035154, 0.0424898567199707, 0.042456192016601564, 0.04285440063476562, 0.04267433547973633, 0.04248649597167969, 0.042235008239746095, 0.04235968017578125, 0.0427883186340332, 0.042635807037353514, 0.042603679656982425, 0.042655902862548827, 0.04265251159667969, 0.04271638488769531, 0.04279769515991211, 0.0425984001159668, 0.04246112060546875, 0.04268230438232422, 0.042952831268310544, 0.04297299194335937, 0.04288735961914063, 0.04283087921142578, 0.04267827224731445, 0.04261526489257812, 0.04253747177124023, 0.04294246292114258, 0.043157215118408206, 0.04305692672729492, 0.04304470443725586, 0.0429595832824707, 0.04298854446411133, 0.042895870208740236]",tokens/s,23.602956185757904,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,828.788736,551.419904,0.0,148.897792,141.633024,s,1,7.741388671875,7.741388671875,0.0,7.741388671875,7.741388671875,7.741388671875,7.741388671875,[7.741388671875],,kWh,1.3282514583306692e-05,1.4579288838518787e-06,3.4705583319971645e-06,1.8211001799155733e-05,,MB,1286.815744,666.763264,0.0,249.561088,216.900608,s,24,0.2283538246154785,0.009514742692311605,9.247919403711678e-05,0.009495423793792725,0.009665487670898438,0.009699132537841796,0.009717421073913574,"[0.009618335723876954, 0.009569696426391602, 0.00950707244873047, 0.009437824249267578, 0.009430975914001464, 0.009419008255004883, 0.00955884838104248, 0.009485887527465821, 0.009503199577331542, 0.009548128128051758, 0.009722175598144532, 0.009492064476013183, 0.009701503753662109, 0.009566816329956054, 0.009379712104797364, 0.009493439674377442, 0.00968569564819336, 0.009525792121887207, 0.009381952285766602, 0.0094967679977417, 0.009489919662475586, 0.00949407958984375, 0.009414560317993164, 0.009430368423461915]",tokens/s,26905.6146107725,kWh,2.7324391676418566e-07,3.013398259838614e-08,1.5521964144569402e-07,4.585975408082658e-07,tokens/kWh,558223665.0218554,MB,1300.660224,691.929088,0.0,274.726912,216.903168,s,24,9.959627105712892,0.4149844627380371,0.0035268303696430613,0.4136750030517578,0.4200561981201172,0.4223488891601562,0.4229158642578125,"[0.41335665893554685, 0.41289883422851564, 0.4138035888671875, 0.4166886291503906, 0.4123409729003906, 0.4124916687011719, 0.41617205810546876, 0.4125946960449219, 0.42080743408203125, 0.42300396728515627, 0.4162764892578125, 0.41183526611328125, 0.4226209106445312, 0.4123507080078125, 0.41131216430664064, 0.417663330078125, 0.4178253173828125, 0.40979183959960935, 0.4147850341796875, 0.4135464172363281, 0.4157795715332031, 0.41227035522460936, 0.4111078796386719, 0.41830331420898437]",tokens/s,151.81291266745413,kWh,1.1788113280402638e-05,1.2998630534784228e-06,4.431472916794264e-06,1.7519449250675326e-05,tokens/kWh,3596003.4529950493,,s,1512,9.94787100791931,0.006579279767142401,0.0001778806083774989,0.0065442719459533685,0.006709456157684326,0.006800131249427795,0.007326127719879154,"[0.006300704002380371, 0.006636544227600098, 0.006560704231262207, 0.006696959972381592, 0.0066143999099731446, 0.006555359840393066, 0.006548384189605713, 0.006578112125396728, 0.006514656066894531, 0.0065630397796630855, 0.006534016132354737, 0.006467584133148193, 0.00659171199798584, 0.006660031795501709, 0.006529376029968261, 0.006666880130767822, 0.006636767864227295, 0.006521503925323486, 0.006520671844482422, 0.006576288223266602, 0.006567935943603515, 0.006696383953094482, 0.0065214080810546875, 0.006535168170928955, 0.006536479949951172, 0.006470592021942139, 0.006487840175628662, 0.006498623847961426, 0.006575488090515137, 0.006526495933532715, 0.006470304012298584, 0.00648854398727417, 0.006520480155944824, 0.0064737281799316405, 0.006561791896820069, 0.00650601577758789, 0.006471424102783203, 0.006480607986450195, 0.006444352149963379, 0.006486656188964844, 0.006500256061553955, 0.006459551811218262, 0.0065372161865234375, 0.006774784088134766, 0.00653926420211792, 0.0065064959526062015, 0.006502399921417237, 0.006508543968200684, 0.006520480155944824, 0.006551040172576904, 0.006636544227600098, 0.006624415874481201, 0.006548160076141357, 0.006592512130737305, 0.006633535861968994, 0.0065797758102417, 0.006590623855590821, 0.006620992183685303, 0.006556384086608887, 0.006710976123809814, 0.0065411520004272465, 0.006592415809631348, 0.006656256198883057, 0.0063055682182312014, 0.006614048004150391, 0.006522111892700196, 0.00654307222366333, 0.00659660816192627, 0.00649567985534668, 0.006511072158813477, 0.006530752182006836, 0.006569695949554443, 0.006616896152496338, 0.006564735889434814, 0.006572224140167236, 0.0065103998184204105, 0.006452960014343262, 0.006520895957946777, 0.00650275182723999, 0.006543231964111328, 0.006546527862548828, 0.007154399871826172, 0.006737279891967774, 0.006591296195983887, 0.006773888111114502, 0.0065641279220581055, 0.0065155839920043946, 0.006505216121673584, 0.006440095901489258, 0.006543295860290527, 0.006446976184844971, 0.0065088639259338376, 0.006472991943359375, 0.006447519779205322, 0.006534656047821045, 0.006433568000793457, 0.006481503963470459, 0.006535359859466553, 0.0064386558532714844, 0.006654143810272217, 0.006490111827850342, 0.006532288074493408, 0.0065289921760559085, 0.006502943992614746, 0.006522304058074952, 0.006518655776977539, 0.006504640102386474, 0.006523712158203125, 0.006533120155334473, 0.006518335819244385, 0.006508992195129394, 0.006529024124145508, 0.006643360137939453, 0.006590112209320069, 0.006567903995513916, 0.006535903930664062, 0.006565887928009034, 0.0066408319473266605, 0.006640160083770752, 0.006570271968841553, 0.006518784046173095, 0.006575104236602783, 0.006527999877929688, 0.006508543968200684, 0.006511807918548584, 0.006488383769989014, 0.006248095989227295, 0.006601408004760743, 0.006734911918640137, 0.006483136177062988, 0.006516479969024659, 0.006512639999389648, 0.006462656021118164, 0.006524896144866944, 0.006499392032623291, 0.006506112098693848, 0.006493824005126953, 0.0064839677810668945, 0.006492703914642334, 0.006508736133575439, 0.006893407821655273, 0.006908224105834961, 0.006617792129516601, 0.006523935794830322, 0.006576352119445801, 0.006602464199066162, 0.006529024124145508, 0.006596640110015869, 0.0065493440628051755, 0.0064718079566955565, 0.006552608013153076, 0.006499263763427734, 0.0064962878227233884, 0.00653334379196167, 0.0065450558662414554, 0.006617119789123535, 0.006547552108764648, 0.006737919807434082, 0.006651103973388672, 0.006595359802246094, 0.006553599834442139, 0.0065188159942626955, 0.006592351913452149, 0.00647321605682373, 0.006574143886566162, 0.006547135829925537, 0.00647052812576294, 0.006619135856628418, 0.006532192230224609, 0.006550271987915039, 0.0065783681869506835, 0.006477791786193848, 0.006666016101837158, 0.006524320125579834, 0.006490464210510254, 0.006578783988952636, 0.006880767822265625, 0.006569568157196045, 0.006586751937866211, 0.006578591823577881, 0.006500351905822754, 0.006575679779052735, 0.006568511962890625, 0.0064899840354919434, 0.006524928092956543, 0.00643891191482544, 0.006666240215301514, 0.006551551818847656, 0.006518784046173095, 0.006548384189605713, 0.006497056007385254, 0.006589983940124512, 0.00645740795135498, 0.0065519680976867676, 0.0064880638122558594, 0.006455071926116943, 0.006540544033050537, 0.006468575954437256, 0.00659660816192627, 0.006451200008392334, 0.006475200176239013, 0.006608767986297608, 0.006623936176300049, 0.006662144184112549, 0.00659660816192627, 0.006680575847625733, 0.006578335762023926, 0.006567296028137207, 0.0065502400398254395, 0.006526303768157959, 0.006558112144470215, 0.006514304161071778, 0.006506847858428955, 0.006645472049713135, 0.0064720001220703125, 0.006514656066894531, 0.0065259838104248045, 0.006482175827026367, 0.006515615940093994, 0.006467423915863037, 0.006621183872222901, 0.007513472080230713, 0.00847935962677002, 0.007329792022705078, 0.0066377601623535155, 0.006679488182067871, 0.006616191864013672, 0.006529920101165772, 0.0065032958984375, 0.00653107213973999, 0.006510591983795166, 0.006489503860473633, 0.006491968154907227, 0.006523680210113525, 0.0065123200416564945, 0.00656934404373169, 0.006538176059722901, 0.006520319938659668, 0.006539008140563965, 0.0065127677917480465, 0.006584799766540527, 0.006496096134185791, 0.006533440113067627, 0.006589695930480957, 0.0065318398475646975, 0.006627359867095947, 0.006597824096679687, 0.006694719791412353, 0.0065937919616699215, 0.006522336006164551, 0.0065409278869628905, 0.006533919811248779, 0.00631388807296753, 0.006461472034454346, 0.006493311882019043, 0.0065363521575927735, 0.006458559989929199, 0.006531744003295898, 0.006547391891479492, 0.006672544002532959, 0.006544672012329102, 0.006637663841247559, 0.006540031909942627, 0.00654691219329834, 0.006529183864593506, 0.006484064102172851, 0.006766784191131592, 0.006555327892303467, 0.006541440010070801, 0.006558015823364258, 0.006513472080230713, 0.006613887786865234, 0.00653926420211792, 0.006483327865600586, 0.006553728103637696, 0.006510752201080322, 0.006537375926971435, 0.006531231880187989, 0.006471712112426758, 0.006501984119415283, 0.006564479827880859, 0.006502208232879639, 0.006531392097473145, 0.006601856231689453, 0.006516704082489014, 0.006552127838134766, 0.006541056156158447, 0.006477920055389405, 0.006512864112854004, 0.006569503784179687, 0.006500895977020264, 0.006503839969635009, 0.006478079795837402, 0.006543871879577637, 0.006471392154693604, 0.006615327835083008, 0.006635231971740723, 0.006561600208282471, 0.00655401611328125, 0.006508319854736328, 0.006589439868927002, 0.00653772783279419, 0.006533631801605225, 0.00651478385925293, 0.006530687808990478, 0.0065920639038085935, 0.006537248134613037, 0.006562143802642822, 0.006535520076751709, 0.006531231880187989, 0.006600575923919678, 0.006516704082489014, 0.006492224216461182, 0.006614975929260254, 0.0064471039772033695, 0.006230016231536865, 0.0065231361389160155, 0.006542335987091064, 0.006474495887756348, 0.006506720066070557, 0.006463263988494873, 0.007242815971374512, 0.0066080641746520995, 0.006534912109375, 0.006537087917327881, 0.006576064109802246, 0.006524735927581787, 0.006539040088653565, 0.006525087833404541, 0.0065392317771911625, 0.0065803837776184084, 0.006551871776580811, 0.006740064144134522, 0.0064941120147705075, 0.006559455871582031, 0.006436607837677002, 0.00653769588470459, 0.006506559848785401, 0.0064839677810668945, 0.006540351867675781, 0.006447840213775635, 0.006602975845336914, 0.006518784046173095, 0.0064386558532714844, 0.006531328201293945, 0.006445055961608887, 0.0064982080459594724, 0.006518335819244385, 0.0065500478744506836, 0.006543360233306885, 0.006495584011077881, 0.0064951682090759275, 0.006493728160858154, 0.006481823921203614, 0.006508096218109131, 0.0065127677917480465, 0.006467840194702148, 0.006530432224273681, 0.0065586881637573246, 0.006504447937011719, 0.00656931209564209, 0.0067133760452270505, 0.006492767810821533, 0.00653878402709961, 0.006586271762847901, 0.006513408184051514, 0.006579520225524903, 0.00646608018875122, 0.00650601577758789, 0.0065008320808410645, 0.006482240200042725, 0.006659776210784912, 0.0065998401641845705, 0.006675424098968506, 0.006588255882263184, 0.006557727813720703, 0.006510176181793213, 0.006531551837921142, 0.006299424171447754, 0.007609695911407471, 0.006534304141998291, 0.006902912139892578, 0.006554207801818848, 0.006581471920013428, 0.006511392116546631, 0.00654863977432251, 0.006482175827026367, 0.006552159786224365, 0.006526112079620361, 0.006464000225067139, 0.006539072036743164, 0.0065848641395568844, 0.006516928195953369, 0.006586048126220703, 0.0064618239402770995, 0.0065699200630187984, 0.0064982080459594724, 0.006507808208465576, 0.006482560157775879, 0.006461440086364746, 0.006479872226715088, 0.006480991840362549, 0.00659548807144165, 0.006475232124328613, 0.006507040023803711, 0.006576128005981445, 0.006481247901916504, 0.006475999832153321, 0.006493855953216552, 0.006820032119750977, 0.00656057596206665, 0.006566815853118897, 0.006607744216918945, 0.0065064959526062015, 0.006557663917541504, 0.006588319778442383, 0.006456704139709473, 0.006722047805786133, 0.007395840167999267, 0.006537248134613037, 0.007165279865264893, 0.006572512149810791, 0.006566815853118897, 0.0071241922378540035, 0.006594272136688233, 0.006549888134002686, 0.006524608135223388, 0.006667551994323731, 0.006529759883880615, 0.0065716800689697265, 0.006685023784637451, 0.006516479969024659, 0.006623487949371338, 0.006559648036956787, 0.006531167984008789, 0.006532159805297852, 0.00654636812210083, 0.006493535995483399, 0.006646783828735352, 0.006581024169921875, 0.006456128120422363, 0.0062575039863586425, 0.006522111892700196, 0.006482687950134278, 0.006582335948944092, 0.006587647914886475, 0.006619840145111084, 0.0065218877792358396, 0.006546624183654785, 0.006481023788452148, 0.006550015926361084, 0.006651679992675782, 0.006517119884490967, 0.006566976070404053, 0.00648419189453125, 0.006514560222625732, 0.006593376159667969, 0.006475647926330566, 0.006529151916503906, 0.006504191875457764, 0.006483200073242187, 0.006534016132354737, 0.006619264125823974, 0.006467775821685791, 0.0064748477935791015, 0.006488800048828125, 0.006461152076721191, 0.006499872207641601, 0.006468063831329346, 0.006496895790100097, 0.006510240077972412, 0.0064432001113891605, 0.0065730881690979, 0.006799295902252197, 0.006497119903564453, 0.006591487884521485, 0.006534143924713135, 0.006509952068328858, 0.006628159999847412, 0.006483776092529297, 0.006623456001281738, 0.006643487930297852, 0.0065205440521240235, 0.006533408164978027, 0.0064839677810668945, 0.00655347204208374, 0.006522336006164551, 0.006509151935577393, 0.006514336109161377, 0.0064925761222839355, 0.0066111359596252445, 0.0065185918807983394, 0.0066409921646118164, 0.0066109437942504885, 0.006613791942596436, 0.006692736148834228, 0.006490111827850342, 0.006621183872222901, 0.006615231990814209, 0.006549312114715576, 0.006608255863189697, 0.00646617603302002, 0.006590464115142822, 0.006534463882446289, 0.006338784217834473, 0.0064960322380065915, 0.006543360233306885, 0.006635231971740723, 0.0064552001953125, 0.006574560165405274, 0.006544640064239502, 0.006592959880828858, 0.006594783782958984, 0.006457344055175781, 0.006586368083953857, 0.006520832061767578, 0.006559391975402832, 0.0065764799118041995, 0.006579296112060547, 0.006603424072265625, 0.006472064018249512, 0.0065903358459472655, 0.0064880638122558594, 0.006561791896820069, 0.006600607872009277, 0.007827328205108643, 0.007993567943572998, 0.007356351852416993, 0.006551616191864014, 0.006571040153503418, 0.007392320156097412, 0.006946720123291015, 0.006848159790039062, 0.00722979211807251, 0.00684438419342041, 0.006606880187988281, 0.006665823936462402, 0.006563392162322998, 0.006592864036560058, 0.006569664001464844, 0.00651961612701416, 0.006596127986907959, 0.006666207790374756, 0.00652239990234375, 0.006618080139160157, 0.006582272052764892, 0.006434815883636475, 0.0066007041931152345, 0.0064757437705993655, 0.0067190399169921875, 0.006705599784851074, 0.00679744005203247, 0.006578080177307129, 0.006550816059112549, 0.006574207782745361, 0.006622911930084229, 0.006605247974395752, 0.0066943998336792, 0.006543456077575683, 0.006611392021179199, 0.0066212801933288575, 0.00660422420501709, 0.006630208015441895, 0.006483200073242187, 0.006699872016906738, 0.0066244797706604, 0.00669532823562622, 0.0063450241088867185, 0.006606527805328369, 0.006694911956787109, 0.006559743881225586, 0.006638944149017334, 0.006785696029663086, 0.0066911039352416995, 0.0067972798347473145, 0.006761216163635254, 0.0067983360290527345, 0.006797567844390869, 0.006801152229309082, 0.006844319820404053, 0.00675164794921875, 0.0068635520935058595, 0.006795263767242431, 0.006563519954681397, 0.006686495780944824, 0.00682038402557373, 0.006615039825439453, 0.006682079792022705, 0.006605343818664551, 0.006603007793426514, 0.006598400115966797, 0.0066109437942504885, 0.006649312019348145, 0.006603295803070068, 0.0066641921997070315, 0.006934368133544922, 0.006801568031311035, 0.006785344123840332, 0.006667168140411377, 0.006736832141876221, 0.006682144165039062, 0.006674047946929932, 0.006701600074768066, 0.006652063846588134, 0.006674176216125489, 0.0066613759994506834, 0.006640416145324707, 0.006697184085845948, 0.006590464115142822, 0.006899424076080322, 0.00680790376663208, 0.006664351940155029, 0.006896671772003174, 0.006634367942810059, 0.006669600009918213, 0.00667903995513916, 0.006690720081329346, 0.006690752029418945, 0.006696959972381592, 0.00670911979675293, 0.006640063762664795, 0.0066754879951477055, 0.007074719905853272, 0.006972991943359375, 0.0066399359703063964, 0.00673967981338501, 0.006596511840820313, 0.006644224166870117, 0.006617087841033936, 0.006706463813781738, 0.006353119850158692, 0.006581791877746582, 0.006676735877990722, 0.006806015968322754, 0.006578944206237793, 0.006624224185943603, 0.006569983959197998, 0.006606847763061524, 0.006746111869812011, 0.0066232957839965825, 0.006494143962860108, 0.006746111869812011, 0.006609951972961426, 0.006531231880187989, 0.006664063930511475, 0.00655398416519165, 0.006574656009674072, 0.0065920639038085935, 0.006484767913818359, 0.006609951972961426, 0.006527616024017334, 0.006557087898254394, 0.006677087783813477, 0.006642848014831543, 0.006630239963531494, 0.006511839866638183, 0.006533919811248779, 0.006512639999389648, 0.006786208152770996, 0.006564703941345215, 0.006459392070770263, 0.006576128005981445, 0.006604800224304199, 0.006532288074493408, 0.006672192096710205, 0.006724607944488525, 0.006674335956573486, 0.0067094721794128415, 0.006612736225128174, 0.006706528186798096, 0.006569024085998535, 0.006557439804077149, 0.006519040107727051, 0.00658787202835083, 0.006658048152923584, 0.006822144031524658, 0.006590464115142822, 0.006473279953002929, 0.006844863891601562, 0.006573184013366699, 0.0066097922325134275, 0.006725632190704346, 0.00657203197479248, 0.006438464164733887, 0.0067257599830627445, 0.006574399948120117, 0.006512639999389648, 0.006551551818847656, 0.006475776195526123, 0.006498271942138672, 0.006620672225952149, 0.00650108814239502, 0.006553376197814941, 0.006280223846435547, 0.006496448040008545, 0.006460192203521728, 0.006483903884887695, 0.006451488018035889, 0.006598688125610351, 0.006517920017242431, 0.006482528209686279, 0.006475776195526123, 0.006522463798522949, 0.006631872177124023, 0.0065327038764953615, 0.006535808086395264, 0.006717184066772461, 0.006573311805725098, 0.006526815891265869, 0.006511136054992676, 0.006619103908538818, 0.006620768070220947, 0.006572319984436035, 0.006528639793395996, 0.006458399772644043, 0.0064858880043029785, 0.006463295936584472, 0.006465760231018067, 0.006503392219543457, 0.00648089599609375, 0.006463136196136475, 0.006482272148132324, 0.006502111911773681, 0.006502687931060791, 0.006471936225891113, 0.006489247798919678, 0.0065049281120300294, 0.0065272321701049804, 0.006463615894317627, 0.006712384223937988, 0.006434656143188476, 0.0065483198165893555, 0.00652185583114624, 0.006485343933105469, 0.006566751956939698, 0.006450016021728516, 0.006567903995513916, 0.006518655776977539, 0.006479008197784424, 0.006552544116973877, 0.006501984119415283, 0.0065253438949584965, 0.006507872104644775, 0.0065411200523376465, 0.0065606398582458494, 0.006512608051300049, 0.0067656002044677735, 0.006539584159851074, 0.006571839809417725, 0.0065684161186218264, 0.006524511814117432, 0.0066301760673522945, 0.006508831977844238, 0.0066434240341186526, 0.006606847763061524, 0.0065773119926452635, 0.006355199813842773, 0.006614719867706299, 0.006731616020202637, 0.006616864204406738, 0.006633408069610595, 0.006660575866699219, 0.006560800075531006, 0.006638527870178223, 0.006589983940124512, 0.006587903976440429, 0.006631968021392823, 0.00670470380783081, 0.006634367942810059, 0.0066744318008422855, 0.006653567790985107, 0.006855040073394775, 0.0066375679969787596, 0.006653439998626709, 0.006596127986907959, 0.006688831806182861, 0.006692863941192627, 0.006560991764068604, 0.0066861119270324705, 0.006595136165618897, 0.006604576110839843, 0.006685887813568115, 0.006712063789367676, 0.006850560188293457, 0.006977663993835449, 0.006823808193206787, 0.006997375965118408, 0.006774464130401611, 0.006748767852783203, 0.006742591857910156, 0.00664521598815918, 0.006701375961303711, 0.0066826238632202144, 0.006639616012573242, 0.006712863922119141, 0.0067422399520874025, 0.00666374397277832, 0.006703807830810547, 0.006763807773590088, 0.0067325758934021, 0.006917632102966308, 0.006837887763977051, 0.0067012162208557125, 0.006826208114624023, 0.0067478718757629395, 0.006715648174285889, 0.006625279903411865, 0.006703551769256592, 0.006749983787536621, 0.006797567844390869, 0.006709311962127686, 0.006704448223114013, 0.006677343845367432, 0.006698783874511719, 0.006923711776733398, 0.006642240047454834, 0.006713119983673096, 0.006672063827514649, 0.006588960170745849, 0.006313951969146728, 0.00654969596862793, 0.0065985918045043945, 0.006541024208068848, 0.006558432102203369, 0.00653657579421997, 0.006736127853393554, 0.006534719944000244, 0.006567391872406006, 0.0065033278465271, 0.006561855792999268, 0.006491199970245361, 0.006678976058959961, 0.006570496082305908, 0.006500351905822754, 0.006580063819885254, 0.007224512100219726, 0.006534111976623535, 0.006516736030578613, 0.006586080074310303, 0.006505887985229492, 0.006552095890045166, 0.0065447998046875, 0.006453728199005127, 0.00664358377456665, 0.00657263994216919, 0.006481696128845215, 0.0065927357673645015, 0.006481535911560058, 0.006480480194091797, 0.006522848129272461, 0.006448095798492432, 0.006543903827667236, 0.006516543865203858, 0.006455808162689209, 0.006576128005981445, 0.006532447814941406, 0.006473504066467285, 0.006558591842651367, 0.006592512130737305, 0.006517024040222168, 0.006563199996948242, 0.006383647918701172, 0.0065327038764953615, 0.006505343914031982, 0.0064691839218139644, 0.006541408061981201, 0.006470911979675293, 0.006451648235321045, 0.0064783358573913576, 0.006453248023986816, 0.006682784080505371, 0.00656774377822876, 0.00645308780670166, 0.006567488193511963, 0.006490752220153808, 0.006497920036315918, 0.0065437440872192386, 0.0064287037849426265, 0.006516704082489014, 0.006479872226715088, 0.006535168170928955, 0.006529376029968261, 0.006196512222290039, 0.006441472053527832, 0.006492095947265625, 0.006508831977844238, 0.006446176052093506, 0.006467904090881348, 0.006525536060333252, 0.006479936122894287, 0.006517856121063232, 0.0065483198165893555, 0.0065413122177124024, 0.006586368083953857, 0.006520832061767578, 0.006561791896820069, 0.006561952114105225, 0.006502240180969238, 0.006549503803253174, 0.006550784111022949, 0.00666710376739502, 0.006645664215087891, 0.006555168151855468, 0.006515168190002442, 0.006582272052764892, 0.006549503803253174, 0.006589759826660156, 0.0066126718521118165, 0.006523903846740723, 0.006514368057250977, 0.0065797438621521, 0.00649126386642456, 0.00655679988861084, 0.006539999961853027, 0.006455103874206543, 0.0065354561805725095, 0.0064980478286743165, 0.006526944160461426, 0.006737055778503418, 0.006556511878967285, 0.00646998405456543, 0.006532351970672607, 0.006436927795410156, 0.006492512226104736, 0.0064626879692077635, 0.006529823780059814, 0.006510591983795166, 0.006495232105255127, 0.006464511871337891, 0.006503615856170654, 0.006460224151611328, 0.006514111995697022, 0.006488639831542969, 0.006563839912414551, 0.0065146880149841305, 0.0065920639038085935, 0.0065205121040344234, 0.006502304077148437, 0.006458208084106445, 0.00652288007736206, 0.006465536117553711, 0.006479872226715088, 0.00658787202835083, 0.006529727935791016, 0.006500192165374756, 0.006217120170593261, 0.0066830401420593264, 0.00651804780960083, 0.0065414719581604, 0.006540192127227783, 0.006458847999572754, 0.006568352222442627, 0.00654041576385498, 0.006510752201080322, 0.0065131521224975585, 0.006522848129272461, 0.006535359859466553, 0.006583871841430664, 0.0064776320457458495, 0.006508512020111084, 0.0067707200050354, 0.006586847782135009, 0.00657747220993042, 0.006621503829956054, 0.006529344081878662, 0.006557759761810303, 0.006696959972381592, 0.00658841609954834, 0.006598656177520752, 0.006605088233947754, 0.00667414379119873, 0.006767712116241455, 0.0066570878028869625, 0.006651743888854981, 0.00667468786239624, 0.007067391872406006, 0.006619135856628418, 0.006619423866271973, 0.006627039909362793, 0.006553855895996094, 0.006594304084777832, 0.006594367980957031, 0.006702911853790283, 0.006587808132171631, 0.006532256126403809, 0.006598464012145996, 0.00656166410446167, 0.006550687789916992, 0.0065953278541564945, 0.006822112083435058, 0.006551551818847656, 0.00658841609954834, 0.006701119899749756, 0.006624639987945557, 0.006683199882507324, 0.006711616039276123, 0.00665772819519043, 0.006772799968719483, 0.006712959766387939, 0.006641568183898926, 0.006640160083770752, 0.006736800193786621, 0.006607744216918945, 0.0067964158058166505, 0.006752287864685059, 0.00663808012008667, 0.006670271873474121, 0.006748320102691651, 0.006454368114471435, 0.006703584194183349, 0.006970880031585694, 0.006902719974517822, 0.006721536159515381, 0.006704512119293213, 0.006695807933807373, 0.006729472160339356, 0.006670335769653321, 0.006696544170379639, 0.006686495780944824, 0.006595104217529297, 0.006854752063751221, 0.006690815925598144, 0.006616223812103272, 0.006599520206451416, 0.0066632962226867674, 0.006694943904876709, 0.006812704086303711, 0.006874559879302978, 0.0067907838821411135, 0.006629663944244385, 0.006654208183288574, 0.0066416640281677245, 0.00672380781173706, 0.006649824142456054, 0.0066191678047180175, 0.006602752208709717, 0.006571231842041015, 0.006609695911407471, 0.006531392097473145, 0.006559423923492432, 0.006531199932098389, 0.00666815996170044, 0.0065799040794372555, 0.006588319778442383, 0.006705599784851074, 0.00662275218963623, 0.006528512001037597, 0.006624192237854004, 0.006567647933959961, 0.006487648010253907, 0.0065194878578186035, 0.006567935943603515, 0.006516992092132568, 0.006539103984832764, 0.006444287776947021, 0.006597280025482178, 0.006655007839202881, 0.006484960079193115, 0.006500351905822754, 0.006574079990386963, 0.00645027208328247, 0.0064841599464416505, 0.006537951946258545, 0.006502399921417237, 0.006872159957885742, 0.006612031936645508, 0.006487904071807861, 0.006557695865631104, 0.006571167945861816, 0.006521791934967041, 0.0065229439735412596, 0.006260000228881836, 0.006463327884674072, 0.006468480110168457, 0.0064880638122558594, 0.00643891191482544, 0.006513696193695068, 0.006552063941955567, 0.006457824230194092, 0.006502399921417237, 0.00652288007736206, 0.006450623989105225, 0.006447679996490478, 0.006446208000183105, 0.006468480110168457, 0.006576128005981445, 0.006486144065856934, 0.006474656105041504, 0.006421472072601318, 0.006558847904205322, 0.006511168003082275, 0.006447328090667725, 0.006502399921417237, 0.006467008113861084, 0.006455967903137207, 0.0064737281799316405, 0.006428671836853027, 0.006526976108551025, 0.0066109437942504885, 0.006456448078155518, 0.006482528209686279, 0.006599967956542969, 0.006451615810394287, 0.00656220817565918, 0.006424992084503174, 0.006480832099914551, 0.006546271800994873, 0.00641868782043457, 0.006444799900054932, 0.00645468807220459, 0.006480639934539795, 0.0065083842277526855, 0.0064033279418945314, 0.006451551914215088, 0.006553120136260986, 0.00643775987625122, 0.006752255916595459, 0.006731776237487793, 0.006492159843444824, 0.006518303871154785, 0.006490560054779053, 0.006485599994659424, 0.006570432186126709, 0.006459136009216309, 0.006733376026153565, 0.006503104209899902, 0.006449151992797852, 0.006491775989532471, 0.006468992233276367, 0.006558176040649414, 0.0064941120147705075, 0.006399871826171875, 0.006605440139770507, 0.006524479866027832, 0.00618617582321167, 0.0064397120475769044, 0.006489344120025635, 0.006473375797271729, 0.006447328090667725, 0.006487711906433106, 0.006502848148345947, 0.0065548157691955565, 0.006770592212677002, 0.006539968013763428, 0.006512576103210449, 0.0065064640045166015, 0.00660694408416748, 0.006516736030578613, 0.006502399921417237, 0.006520703792572021, 0.0066993279457092285, 0.00647756814956665, 0.006483808040618897, 0.006572159767150879, 0.006444543838500977, 0.006516511917114258, 0.0064783039093017575, 0.006473631858825684, 0.0065391998291015625, 0.006442527770996093, 0.006515456199645996, 0.00659443187713623, 0.006754240036010742, 0.007624288082122802, 0.006537824153900146, 0.0064651517868042, 0.006635744094848633, 0.006563744068145752, 0.006457824230194092, 0.006508512020111084, 0.006570015907287598, 0.006737215995788574, 0.007338880062103271, 0.006655807971954346, 0.006514527797698975, 0.006582431793212891, 0.006689023971557617, 0.006535935878753662, 0.006626560211181641, 0.006536960124969482, 0.006516767978668213, 0.006641536235809326, 0.006547711849212647, 0.006530144214630127, 0.006515200138092041, 0.006646016120910645, 0.006446944236755371, 0.006551712036132812, 0.006569471836090088, 0.006474239826202393, 0.006553311824798584, 0.006518080234527588, 0.006581056118011475, 0.006510752201080322, 0.006772736072540283, 0.006631423950195312, 0.006672383785247803, 0.006302175998687744, 0.006456511974334717, 0.006543903827667236, 0.006564383983612061, 0.006455423831939697, 0.006581952095031738, 0.006473855972290039, 0.006552703857421875, 0.007182720184326172, 0.006494719982147217, 0.006571104049682617, 0.00648095989227295, 0.006537119865417481, 0.00659603214263916, 0.006622111797332764, 0.006594336032867432, 0.0064711041450500485, 0.006555488109588623, 0.006463615894317627, 0.006560031890869141, 0.006520768165588379, 0.006473919868469238, 0.006563839912414551, 0.006559743881225586, 0.006563839912414551, 0.006516831874847412, 0.0066431999206542965, 0.006610847949981689, 0.006527520179748535, 0.006514656066894531, 0.006547167778015137, 0.006502528190612793, 0.006583712100982666, 0.006498176097869873, 0.006529920101165772, 0.006458559989929199, 0.006588479995727539, 0.0065352959632873535, 0.006574304103851319, 0.006772448062896728, 0.006513279914855957, 0.006587711811065674, 0.006507264137268066, 0.0065272641181945805, 0.006604512214660644, 0.0064778242111206055, 0.00656771183013916, 0.006559872150421143, 0.006510303974151611, 0.006571424007415771, 0.006525728225708008, 0.0066479997634887695, 0.006518080234527588, 0.0065420160293579105, 0.006659647941589355, 0.006504640102386474, 0.006616543769836425, 0.006492959976196289, 0.0065699520111083985, 0.006549727916717529, 0.006704864025115967, 0.006594816207885742, 0.006563007831573486, 0.006252031803131103, 0.0064910402297973634, 0.006594816207885742, 0.00658128023147583, 0.006480127811431885, 0.006664735794067383, 0.006471519947052002, 0.006579584121704102, 0.006846367835998535, 0.006521664142608643, 0.006610752105712891, 0.006502528190612793, 0.006813727855682373, 0.00658025598526001, 0.006582272052764892, 0.0068106880187988285, 0.0065914239883422855, 0.00660422420501709, 0.006603328227996827, 0.006550655841827392, 0.006843264102935791, 0.00648524808883667, 0.006601471900939942, 0.006460639953613281, 0.00656217622756958, 0.006560160160064697, 0.00646073579788208, 0.006578879833221436, 0.006582208156585693, 0.006705215930938721, 0.006717440128326416, 0.00658739185333252, 0.006611392021179199, 0.006523744106292724, 0.006575839996337891, 0.006562943935394287, 0.006584544181823731, 0.006562464237213135, 0.006493279933929444, 0.006607776165008545, 0.0067276802062988285, 0.00659276819229126, 0.00663702392578125, 0.006492447853088379, 0.006624447822570801, 0.0068206720352172855, 0.006481919765472412, 0.006894783973693848, 0.006687551975250244, 0.0065168957710266115, 0.00661897611618042, 0.006707200050354004, 0.006469632148742676, 0.006589632034301757, 0.0065316162109375, 0.006452608108520508, 0.0065504322052001955, 0.006502399921417237, 0.006463615894317627, 0.006691872119903564, 0.006607967853546143, 0.006529888153076172, 0.006665120124816895, 0.006238207817077636, 0.006483583927154541, 0.006508927822113037, 0.006504447937011719, 0.006426208019256592, 0.006463903903961182, 0.006506336212158203, 0.006455455780029297, 0.006529024124145508, 0.0064223999977111815, 0.00648748779296875, 0.006456319808959961, 0.006437920093536377, 0.00647756814956665, 0.006445151805877685, 0.0064666237831115725, 0.0064386558532714844, 0.006518208026885987, 0.006487936019897461, 0.006425280094146729, 0.00643891191482544, 0.006459392070770263, 0.006412191867828369, 0.0064115519523620604, 0.006439743995666504, 0.006401247978210449, 0.0064765758514404295, 0.006415520191192627, 0.006431583881378174, 0.00652288007736206, 0.006432767868041992, 0.006538879871368408, 0.006441535949707031, 0.006450623989105225, 0.006732160091400146, 0.006688767910003662, 0.00659660816192627, 0.006520256042480469, 0.006447679996490478, 0.006916096210479736, 0.006536960124969482, 0.006456736087799072, 0.0064989118576049805, 0.0064637441635131835, 0.006516384124755859, 0.006529376029968261, 0.006423840045928955, 0.0064232640266418456, 0.00654531192779541, 0.006501728057861328, 0.006603519916534423, 0.006453248023986816, 0.006696671962738037, 0.006705440044403076, 0.008413023948669433, 0.007256224155426026, 0.006558911800384521, 0.006488800048828125, 0.0065148801803588865, 0.0064635519981384275, 0.006477503776550293, 0.006477248191833496, 0.006437600135803223, 0.006148064136505127, 0.006466911792755127, 0.006460063934326172, 0.006458720207214355, 0.006446784019470215, 0.006431712150573731, 0.007444479942321777, 0.006547488212585449, 0.006457119941711426, 0.006594079971313477, 0.006525599956512451, 0.00646454381942749, 0.00648086404800415, 0.006442912101745605, 0.006584415912628174, 0.006517983913421631, 0.006459712028503418, 0.006452960014343262, 0.006560512065887451, 0.0064737281799316405, 0.006463232040405273, 0.006428927898406983, 0.006436607837677002, 0.00643503999710083, 0.006416416168212891, 0.006660223960876465, 0.006471551895141601, 0.006402048110961914, 0.006607071876525879, 0.006457119941711426, 0.00642412805557251, 0.006422976016998291, 0.0063975038528442385, 0.0069202880859375, 0.006507008075714111, 0.00648140811920166, 0.0067259840965271, 0.006470719814300537, 0.006454239845275879, 0.006468671798706055, 0.006415264129638672, 0.006561791896820069, 0.006475840091705322, 0.0064113597869873045, 0.006451456069946289, 0.006577951908111572, 0.006617919921875, 0.0064759359359741215, 0.0064570879936218266, 0.006469727993011475, 0.006461440086364746, 0.006448703765869141, 0.006522336006164551, 0.00654640007019043, 0.006952928066253662, 0.006610527992248535, 0.006482367992401123, 0.006512256145477295, 0.006517119884490967, 0.0064440321922302245, 0.006568960189819336, 0.00662662410736084, 0.006537568092346191, 0.006239136219024658, 0.006510335922241211, 0.006588672161102295, 0.008937472343444825, 0.008212703704833985, 0.006536992073059082, 0.006491648197174072, 0.0065351037979125975, 0.006769216060638428, 0.006637279987335205, 0.0065296320915222165, 0.00773526382446289, 0.006626368045806885, 0.006460063934326172, 0.006502111911773681, 0.006463808059692383, 0.006512864112854004, 0.0064652800559997555, 0.006475423812866211, 0.006500895977020264, 0.006807199954986572, 0.006615200042724609, 0.006653600215911866, 0.006451519966125488, 0.006475808143615723, 0.006435967922210693, 0.006546527862548828, 0.0064694080352783205, 0.006450655937194824, 0.006439455986022949, 0.006471680164337158, 0.006499392032623291, 0.006484928131103515, 0.006596479892730713, 0.006448287963867188, 0.006492447853088379, 0.006446784019470215, 0.006525631904602051, 0.006494624137878418, 0.006425856113433838, 0.00648576021194458, 0.0069838080406188965, 0.006519328117370606, 0.0065924482345581055, 0.006459199905395508, 0.006431519985198975, 0.006450911998748779, 0.006502592086791993, 0.006471487998962402, 0.006496255874633789, 0.006446656227111817, 0.006510464191436768, 0.006554175853729248, 0.006457344055175781, 0.006721183776855468, 0.007296480178833008, 0.007084447860717774, 0.006650335788726806, 0.006579520225524903, 0.00649286413192749, 0.0065203518867492675, 0.006650271892547607, 0.006504032135009765]",tokens/s,151.9923206479382,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1044.66432,904.855552,0.0,509.607936,491.434496,s,1,7.832982421875,7.832982421875,0.0,7.832982421875,7.832982421875,7.832982421875,7.832982421875,[7.832982421875],,kWh,2.3440985341665054e-05,2.5785837086470675e-06,7.252228023994778e-06,3.32717970743069e-05,,MB,1364.054016,1018.10176,0.0,608.17408,592.24832,s,10,0.19564134216308593,0.019564134216308596,0.0001360158079278712,0.019515311241149903,0.019726105117797852,0.01979231662750244,0.019845285835266114,"[0.01948374366760254, 0.01985852813720703, 0.01954537582397461, 0.019519615173339843, 0.019469696044921873, 0.019433792114257813, 0.019511007308959962, 0.019693599700927735, 0.01971139144897461, 0.01941459274291992]",tokens/s,13085.168869195308,kWh,5.661638348514217e-07,6.243810454996636e-08,3.5134718030232953e-07,9.799491197037176e-07,tokens/kWh,261238052.9280951,MB,1377.755136,1032.781824,0.0,622.854144,605.085696,s,10,10.322046630859376,1.0322046630859376,0.0030857666669471323,1.031349365234375,1.036057580566406,1.0378003967285157,1.0391946496582032,"[1.0313995361328125, 1.0314732666015625, 1.039543212890625, 1.029095947265625, 1.03034423828125, 1.0333880615234374, 1.0356702880859374, 1.0310013427734375, 1.0312991943359375, 1.02883154296875]",tokens/s,61.034407470754516,kWh,2.9547575426814806e-05,3.2583578491944143e-06,1.1555023455897596e-05,4.436095673190681e-05,tokens/kWh,1420167.747524863,,s,630,10.316964745521545,0.016376134516700867,0.00027373448692247595,0.01631972789764404,0.01657235870361328,0.01676669759750366,0.017506927680969238,"[0.01589008045196533, 0.01655571174621582, 0.016296735763549806, 0.016491935729980468, 0.016374048233032228, 0.016637279510498048, 0.016554975509643556, 0.01639423942565918, 0.016363519668579102, 0.016556032180786134, 0.0162938232421875, 0.01633286476135254, 0.016242687225341796, 0.016315616607666016, 0.016407487869262695, 0.01624662399291992, 0.016274656295776367, 0.016319263458251954, 0.016244735717773438, 0.016373760223388673, 0.016338943481445312, 0.016291839599609375, 0.01638387107849121, 0.01637366485595703, 0.01640470314025879, 0.016252927780151367, 0.016507999420166015, 0.016323680877685546, 0.016408479690551758, 0.016320415496826172, 0.01641881561279297, 0.016322208404541017, 0.016439008712768554, 0.016444032669067382, 0.016453632354736326, 0.016314367294311523, 0.01636092758178711, 0.016400800704956055, 0.016386016845703125, 0.01629427146911621, 0.016303903579711915, 0.01630780792236328, 0.016297664642333985, 0.016306655883789063, 0.016320512771606444, 0.01627996826171875, 0.01625657653808594, 0.016433120727539063, 0.016367679595947267, 0.016238847732543946, 0.01624678421020508, 0.01624678421020508, 0.016477279663085938, 0.016282527923583985, 0.01660211181640625, 0.016473087310791015, 0.016373760223388673, 0.0163155517578125, 0.016337696075439452, 0.016558143615722658, 0.01658608055114746, 0.016371488571166992, 0.01631558418273926, 0.01604812812805176, 0.01625075149536133, 0.016340511322021484, 0.01664044761657715, 0.016416927337646485, 0.016283647537231445, 0.016572128295898436, 0.016339231491088867, 0.01641414451599121, 0.016333375930786133, 0.01642518424987793, 0.016690975189208986, 0.016390016555786133, 0.016395488739013673, 0.016222240447998047, 0.016234912872314454, 0.016248319625854494, 0.016312768936157226, 0.01622275161743164, 0.016349184036254884, 0.016513023376464844, 0.016289791107177733, 0.01628329658508301, 0.016264896392822265, 0.016220832824707033, 0.016242687225341796, 0.01626300811767578, 0.016438688278198242, 0.01634377670288086, 0.01638198471069336, 0.016310272216796876, 0.016472095489501952, 0.01649843215942383, 0.01700009536743164, 0.016464448928833007, 0.016457120895385743, 0.016357152938842774, 0.01650281524658203, 0.016400800704956055, 0.016320863723754884, 0.016377344131469726, 0.016329216003417968, 0.016230432510375977, 0.016280895233154298, 0.01630009651184082, 0.01632057571411133, 0.016351808547973634, 0.016259071350097656, 0.016240640640258788, 0.016497888565063477, 0.01622505569458008, 0.016316415786743164, 0.01655129623413086, 0.01645427131652832, 0.016324031829833986, 0.01633951950073242, 0.01627136039733887, 0.016347040176391603, 0.01628988838195801, 0.016404127120971678, 0.016284000396728514, 0.016482080459594727, 0.0163350715637207, 0.01602457618713379, 0.016837631225585938, 0.01623859214782715, 0.01637990379333496, 0.016441503524780274, 0.016617311477661132, 0.016373151779174804, 0.016709503173828126, 0.016227039337158203, 0.016326656341552736, 0.016254655838012694, 0.016330463409423828, 0.01614499282836914, 0.01639958381652832, 0.01623734474182129, 0.016191488265991212, 0.01618943977355957, 0.016175104141235352, 0.01630975914001465, 0.016402944564819336, 0.0162795524597168, 0.017154048919677735, 0.016134143829345703, 0.017587839126586916, 0.016257408142089844, 0.01620732879638672, 0.016146976470947264, 0.01619558334350586, 0.016267263412475585, 0.01652236747741699, 0.016947967529296875, 0.01635545539855957, 0.016216064453125, 0.01618943977355957, 0.016186656951904296, 0.016251615524291992, 0.016574432373046875, 0.016181280136108398, 0.016242111206054687, 0.01633296012878418, 0.016350784301757813, 0.01618627166748047, 0.01646169662475586, 0.01634716796875, 0.016249887466430663, 0.016274431228637695, 0.01694099235534668, 0.017860832214355468, 0.016742240905761718, 0.016536703109741212, 0.01646860885620117, 0.016277631759643554, 0.01640665626525879, 0.016351232528686522, 0.016695295333862305, 0.017180192947387696, 0.01777302360534668, 0.017038976669311524, 0.016998783111572265, 0.016712959289550782, 0.01658470344543457, 0.01649126434326172, 0.016997983932495117, 0.016075679779052734, 0.01642214393615723, 0.016253440856933594, 0.01625894355773926, 0.01627564811706543, 0.01630182456970215, 0.016217920303344728, 0.016203296661376952, 0.016157503128051757, 0.016092992782592772, 0.016193824768066405, 0.0164388484954834, 0.016941215515136717, 0.016179487228393553, 0.016238176345825195, 0.01634739112854004, 0.0167794246673584, 0.016582592010498047, 0.01636070442199707, 0.016284032821655272, 0.01641721534729004, 0.016424959182739257, 0.01640233612060547, 0.016323936462402343, 0.016380672454833985, 0.01658880043029785, 0.016479711532592773, 0.016226783752441406, 0.016345151901245115, 0.016363264083862305, 0.01624892807006836, 0.01633296012878418, 0.016220159530639648, 0.016326528549194336, 0.016232576370239258, 0.016265216827392577, 0.016289535522460936, 0.01620992088317871, 0.016119487762451173, 0.016193376541137696, 0.01623520088195801, 0.016377887725830077, 0.016318464279174806, 0.01638604736328125, 0.016280736923217774, 0.01627440071105957, 0.016353151321411134, 0.016216064453125, 0.016242687225341796, 0.016270751953125, 0.01631702423095703, 0.01628483200073242, 0.016503648757934572, 0.016469856262207032, 0.016246208190917967, 0.0162390079498291, 0.016218431472778322, 0.01627136039733887, 0.016295936584472655, 0.0164138240814209, 0.016575359344482423, 0.016571487426757812, 0.0162224006652832, 0.015857919692993164, 0.01627369689941406, 0.016307296752929686, 0.0163110408782959, 0.01635305595397949, 0.016271743774414062, 0.016253183364868164, 0.016262912750244142, 0.016203647613525392, 0.016303903579711915, 0.016375200271606445, 0.01637606430053711, 0.016388799667358397, 0.01636944007873535, 0.016313568115234375, 0.016256000518798826, 0.016252607345581056, 0.016214111328125, 0.01622345542907715, 0.01620889663696289, 0.016166912078857423, 0.016326656341552736, 0.016154783248901367, 0.016232288360595704, 0.01617919921875, 0.01615667152404785, 0.016347135543823242, 0.01663599967956543, 0.01656412887573242, 0.016348224639892578, 0.01626755142211914, 0.01622697639465332, 0.016316255569458007, 0.01626128005981445, 0.016248832702636717, 0.016269088745117188, 0.016187360763549805, 0.01617945671081543, 0.01634819221496582, 0.01655894470214844, 0.016606592178344728, 0.017510528564453124, 0.016335487365722656, 0.016556032180786134, 0.016702816009521483, 0.016214687347412108, 0.016189407348632813, 0.016252639770507813, 0.016312128067016603, 0.016271871566772463, 0.016887807846069337, 0.016408512115478516, 0.016545055389404296, 0.016268064498901367, 0.016332799911499024, 0.016250879287719726, 0.01618636894226074, 0.01657980728149414, 0.016766239166259765, 0.016314495086669923, 0.01639846420288086, 0.016381343841552733, 0.016244895935058595, 0.018520992279052736, 0.01722947120666504, 0.016412832260131835, 0.016259008407592774, 0.016239999771118164, 0.01622515106201172, 0.01634662437438965, 0.01620774459838867, 0.01629644775390625, 0.016306655883789063, 0.016248096466064454, 0.016199071884155272, 0.016337888717651367, 0.016306175231933593, 0.016447200775146484, 0.016314655303955077, 0.016250879287719726, 0.016244735717773438, 0.016303936004638673, 0.016203968048095704, 0.016262880325317385, 0.016266592025756837, 0.016364479064941408, 0.016258655548095705, 0.01655049514770508, 0.016504287719726562, 0.016623968124389647, 0.01660495948791504, 0.016408159255981446, 0.016162464141845703, 0.01623334312438965, 0.016248672485351563, 0.016337247848510743, 0.016368671417236327, 0.016246816635131837, 0.016353311538696288, 0.016419647216796875, 0.016324607849121094, 0.016305856704711914, 0.016437568664550782, 0.016519168853759765, 0.01631023979187012, 0.016250911712646483, 0.016211360931396485, 0.01629654312133789, 0.01633452796936035, 0.016767072677612304, 0.01632896041870117, 0.016334815979003905, 0.01635327911376953, 0.01632383918762207, 0.01625369644165039, 0.016264863967895508, 0.016615776062011717, 0.01641267204284668, 0.016892032623291017, 0.016344959259033204, 0.01645903968811035, 0.01633875274658203, 0.016511903762817384, 0.01617683219909668, 0.016224479675292967, 0.01620591926574707, 0.0163852481842041, 0.01625894355773926, 0.016591808319091798, 0.016443359375, 0.016144384384155275, 0.01630745506286621, 0.016548608779907225, 0.01622220802307129, 0.016281600952148437, 0.016416959762573242, 0.016254751205444336, 0.01635740852355957, 0.016512960433959962, 0.016254688262939455, 0.016246463775634764, 0.016423583984375, 0.016416671752929688, 0.016226400375366212, 0.016375423431396485, 0.0163056640625, 0.0162938232421875, 0.016248895645141603, 0.01622105598449707, 0.016354400634765624, 0.016357376098632814, 0.016382879257202148, 0.016231712341308595, 0.016202207565307616, 0.01618092727661133, 0.016265024185180665, 0.016388864517211915, 0.016293600082397462, 0.01627779197692871, 0.016590848922729492, 0.016390144348144533, 0.01621196746826172, 0.016416160583496094, 0.016392383575439453, 0.01628816032409668, 0.016332799911499024, 0.016311391830444336, 0.016442144393920898, 0.016467136383056642, 0.016359359741210937, 0.016570783615112303, 0.016382015228271485, 0.016247360229492188, 0.016204992294311524, 0.016526016235351562, 0.016527456283569338, 0.01643929672241211, 0.01626316833496094, 0.016924127578735352, 0.016529951095581055, 0.01961369514465332, 0.017346559524536134, 0.01659459114074707, 0.016368095397949218, 0.016355039596557618, 0.01629404830932617, 0.01638105583190918, 0.016532352447509766, 0.01622220802307129, 0.016263391494750975, 0.016357919692993165, 0.016408575057983397, 0.016643743515014648, 0.016209375381469725, 0.016196479797363283, 0.01621401596069336, 0.016199680328369142, 0.016236543655395508, 0.01635308837890625, 0.01636751937866211, 0.016435232162475586, 0.0163371524810791, 0.016259071350097656, 0.016373760223388673, 0.016330623626708986, 0.016523391723632812, 0.016474111557006836, 0.01640243148803711, 0.016467903137207033, 0.01656972885131836, 0.01637984085083008, 0.016373855590820312, 0.01620240020751953, 0.016184640884399415, 0.01617990493774414, 0.01625212860107422, 0.016125823974609373, 0.016212352752685545, 0.016275999069213867, 0.016324480056762694, 0.0162346248626709, 0.016244735717773438, 0.01637580871582031, 0.016385440826416017, 0.01636751937866211, 0.016353343963623045, 0.01625766372680664, 0.016248863220214845, 0.01627280044555664, 0.01637843132019043, 0.016358400344848634, 0.016325632095336915, 0.01625823974609375, 0.016172096252441405, 0.016190240859985352, 0.016317216873168946, 0.016234111785888673, 0.01633683204650879, 0.0163571834564209, 0.01659782409667969, 0.0164003849029541, 0.016320512771606444, 0.016383392333984375, 0.01626576042175293, 0.016162879943847658, 0.016193119049072266, 0.016302431106567383, 0.01624275207519531, 0.0162729606628418, 0.016296384811401367, 0.01695088005065918, 0.018268928527832032, 0.01609004783630371, 0.016308223724365235, 0.016156063079833985, 0.01612041664123535, 0.01613350486755371, 0.016205759048461915, 0.01619811248779297, 0.016141632080078124, 0.01615555191040039, 0.01615683174133301, 0.016176511764526367, 0.016169439315795897, 0.016236543655395508, 0.016541696548461913, 0.01623619270324707, 0.01618569564819336, 0.01621811294555664, 0.016250656127929686, 0.016176671981811525, 0.01622604751586914, 0.016236959457397462, 0.016587295532226563, 0.016570367813110352, 0.016358816146850585, 0.016244512557983398, 0.016318399429321288, 0.016292192459106444, 0.016290336608886718, 0.0163154239654541, 0.01647305679321289, 0.01642291259765625, 0.016312288284301757, 0.016246816635131837, 0.016232448577880858, 0.016268800735473633, 0.016189952850341797, 0.016320192337036132, 0.017247936248779298, 0.016769664764404297, 0.016990207672119142, 0.016428800582885743, 0.016400224685668947, 0.01636761665344238, 0.016562271118164062, 0.016336223602294923, 0.016427999496459962, 0.016953344345092772, 0.017498111724853514, 0.01648988723754883, 0.016499296188354492, 0.0162093448638916, 0.01622038459777832, 0.01622256088256836, 0.016285696029663087, 0.0162857608795166, 0.016400096893310546, 0.01634867286682129, 0.016214303970336914, 0.016326271057128906, 0.016264320373535156, 0.01639516830444336, 0.01653651237487793, 0.01633059120178223, 0.016312896728515627, 0.016328256607055665, 0.01617910385131836, 0.016161216735839843, 0.016201183319091796, 0.0162289924621582, 0.01623859214782715, 0.016260448455810546, 0.01638262367248535, 0.016347135543823242, 0.016467872619628905, 0.016500255584716798, 0.016267744064331055, 0.01633417510986328, 0.01645792007446289, 0.016740928649902342, 0.016668415069580077, 0.01642288017272949, 0.0163056640625, 0.01615542411804199, 0.01616022491455078, 0.016254720687866212, 0.016337696075439452, 0.01641372871398926, 0.016260032653808595, 0.016267295837402343, 0.016277503967285157, 0.01619126319885254, 0.016209888458251952, 0.01616307258605957, 0.016166208267211914, 0.016222911834716795, 0.016313600540161132, 0.017076128005981444, 0.01637977600097656, 0.01621295928955078, 0.01621196746826172, 0.016236223220825196, 0.016322080612182616, 0.016201663970947265, 0.016188255310058595, 0.016215999603271483, 0.0163656005859375, 0.016230432510375977, 0.01618534469604492, 0.016867136001586912, 0.01631007957458496, 0.016283615112304688, 0.016465599060058594, 0.016384960174560547, 0.016362560272216796, 0.01633353614807129, 0.0162478084564209, 0.016260095596313476, 0.016390144348144533, 0.016504831314086914, 0.016248512268066406, 0.016345407485961912, 0.01637171173095703, 0.01635647964477539, 0.016089984893798828, 0.016295679092407228, 0.016170751571655272]",tokens/s,61.064471532043804,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1018.90048,1105.068032,0.0,702.54592,681.468416,s,1,9.9671279296875,9.9671279296875,0.0,9.9671279296875,9.9671279296875,9.9671279296875,9.9671279296875,[9.9671279296875],,kWh,2.4330175333335543e-05,2.6765279656981036e-06,7.3230614139998054e-06,3.432976471303345e-05,,MB,1434.25536,1425.932288,0.0,1008.730112,988.14208,s,10,0.23876870346069337,0.023876870346069338,0.00015100028902400719,0.02386571216583252,0.024063635444641112,0.024112681674957274,0.024151918659210205,"[0.023960895538330078, 0.023762399673461915, 0.024052736282348632, 0.02382912063598633, 0.02370534324645996, 0.023954912185668944, 0.02365683174133301, 0.023782432556152342, 0.023902303695678712, 0.024161727905273437]",tokens/s,10721.673162753646,kWh,7.044943914658517e-07,7.766771774161019e-08,4.653611487277107e-07,1.2475232579351725e-06,tokens/kWh,205206595.04474187,MB,1491.566592,1463.681024,0.0,1046.478848,988.14464,s,10,12.475591918945312,1.2475591918945312,0.0029883921850468904,1.2470914916992188,1.2519677734375,1.2519743652343749,1.251979638671875,"[1.24542822265625, 1.24469482421875, 1.242593017578125, 1.246116455078125, 1.2458607177734375, 1.248892578125, 1.25198095703125, 1.25196630859375, 1.2480665283203125, 1.2499923095703125]",tokens/s,50.498605925325926,kWh,3.64463082689509e-05,4.019587348747565e-06,1.605831873687225e-05,5.6524214354570724e-05,tokens/kWh,1114566.5750399877,,s,630,12.473087081909187,0.019798550923665368,0.0002567107898953006,0.019755440711975096,0.0199639835357666,0.020084584617614747,0.02100777183532715,"[0.019884607315063477, 0.02002943992614746, 0.01980620765686035, 0.019850271224975586, 0.019741056442260742, 0.019730655670166016, 0.019713727951049805, 0.020048576354980467, 0.01964371109008789, 0.019664896011352538, 0.019917503356933593, 0.019748832702636717, 0.019784927368164062, 0.019726720809936524, 0.019658655166625977, 0.019775711059570312, 0.019763200759887696, 0.019733919143676757, 0.019710496902465822, 0.019704191207885743, 0.01970790481567383, 0.019765119552612304, 0.01984444808959961, 0.019741472244262696, 0.019596607208251952, 0.019737279891967774, 0.019757055282592775, 0.019685056686401366, 0.019689184188842773, 0.019623584747314453, 0.019719104766845703, 0.01967513656616211, 0.019759103775024413, 0.019970176696777343, 0.019746175765991213, 0.019829248428344725, 0.019711999893188475, 0.019769344329833984, 0.01979132843017578, 0.019796512603759767, 0.01984111976623535, 0.01980406379699707, 0.019673088073730468, 0.01960140800476074, 0.019697664260864257, 0.019602943420410156, 0.019685056686401366, 0.020081472396850587, 0.01997209548950195, 0.019602848052978517, 0.019597919464111328, 0.019782943725585936, 0.019808319091796874, 0.019779935836791992, 0.01971868705749512, 0.019699487686157226, 0.019752960205078125, 0.01970278358459473, 0.019743743896484374, 0.020164608001708984, 0.019695615768432616, 0.019647903442382812, 0.019979904174804688, 0.01970195198059082, 0.019671136856079102, 0.019713951110839845, 0.01981644821166992, 0.019914560317993164, 0.01966304016113281, 0.019645856857299804, 0.019624095916748047, 0.019738208770751952, 0.019649375915527345, 0.019661951065063476, 0.01968012809753418, 0.019737951278686522, 0.019608224868774414, 0.019605344772338867, 0.019646623611450195, 0.019592416763305663, 0.019549055099487303, 0.0196648006439209, 0.01986297607421875, 0.01990713691711426, 0.019763103485107424, 0.019708000183105468, 0.019611648559570313, 0.01955366325378418, 0.019640960693359376, 0.01962188720703125, 0.019677183151245118, 0.020017152786254884, 0.02005606460571289, 0.02037555122375488, 0.019791872024536132, 0.019707359313964844, 0.019814655303955077, 0.019961952209472656, 0.01971219253540039, 0.019818496704101563, 0.019914751052856446, 0.02015577507019043, 0.020025983810424804, 0.019728384017944335, 0.019742752075195314, 0.019644384384155274, 0.019686880111694335, 0.019873983383178712, 0.019918527603149414, 0.019711936950683594, 0.01976972770690918, 0.019682880401611327, 0.01985811233520508, 0.019793119430541992, 0.019618688583374025, 0.01961347198486328, 0.019800512313842774, 0.019703584671020506, 0.019927040100097656, 0.019664896011352538, 0.019800064086914062, 0.019699104309082033, 0.019734912872314454, 0.019521760940551757, 0.019681215286254883, 0.01966217613220215, 0.019900415420532228, 0.019803327560424806, 0.01968012809753418, 0.019621824264526366, 0.01962598419189453, 0.01971609687805176, 0.019591167449951173, 0.0196911678314209, 0.019734527587890623, 0.019574880599975586, 0.019673343658447265, 0.019516576766967775, 0.01959343910217285, 0.019554111480712892, 0.019618879318237303, 0.019584575653076173, 0.019603647232055665, 0.01948467254638672, 0.01965875244140625, 0.019595264434814453, 0.019965951919555663, 0.01965260887145996, 0.0196496639251709, 0.019657184600830078, 0.019617567062377928, 0.01959385681152344, 0.01965875244140625, 0.019673088073730468, 0.019603456497192383, 0.01965056037902832, 0.019850784301757813, 0.020123327255249023, 0.019839775085449218, 0.019789056777954103, 0.01976192092895508, 0.019983680725097656, 0.01979667282104492, 0.019774656295776367, 0.019632959365844728, 0.019561567306518556, 0.019567520141601562, 0.019816064834594728, 0.019634559631347658, 0.019588800430297853, 0.020059999465942384, 0.019747295379638673, 0.019564544677734375, 0.019547807693481446, 0.019556224822998045, 0.019754528045654297, 0.0196364803314209, 0.01961414337158203, 0.019631584167480468, 0.019559200286865235, 0.019566207885742187, 0.019550207138061524, 0.02170044708251953, 0.019933727264404295, 0.02000486373901367, 0.019770431518554687, 0.019660959243774413, 0.01960425567626953, 0.01961372756958008, 0.01981715202331543, 0.01953513526916504, 0.01958310317993164, 0.01965884780883789, 0.019696096420288085, 0.019562496185302734, 0.020127424240112303, 0.019564031600952148, 0.01954694366455078, 0.019490943908691407, 0.01965068817138672, 0.019971391677856446, 0.019841472625732423, 0.01979132843017578, 0.019577375411987306, 0.019921119689941407, 0.020071903228759767, 0.020164960861206054, 0.01961884880065918, 0.019626527786254882, 0.019610015869140626, 0.01960483169555664, 0.019579519271850587, 0.019488800048828126, 0.019582624435424804, 0.020910432815551758, 0.021008384704589843, 0.019821887969970704, 0.019786432266235353, 0.019732128143310546, 0.019661151885986328, 0.019628032684326172, 0.019611648559570313, 0.01962518310546875, 0.019642656326293945, 0.020060672760009765, 0.019654176712036134, 0.019562976837158203, 0.0196997127532959, 0.019810304641723633, 0.01973628807067871, 0.01968751907348633, 0.019599552154541015, 0.019664896011352538, 0.019668703079223634, 0.019679519653320314, 0.01987583923339844, 0.019689472198486328, 0.019728384017944335, 0.019836511611938477, 0.019839391708374024, 0.019963903427124022, 0.01985945510864258, 0.019729663848876953, 0.019682239532470704, 0.019842111587524414, 0.0197578239440918, 0.02014959907531738, 0.019927711486816407, 0.019844736099243164, 0.019726720809936524, 0.019812351226806642, 0.01966454315185547, 0.019875360488891602, 0.019734432220458984, 0.021331584930419922, 0.01966556739807129, 0.019740768432617187, 0.019785120010375978, 0.019701759338378907, 0.019830656051635743, 0.02008892822265625, 0.019794559478759764, 0.01979916763305664, 0.01968182373046875, 0.019935583114624022, 0.019627552032470703, 0.019748607635498048, 0.019816768646240233, 0.019581344604492186, 0.019544063568115236, 0.019727712631225587, 0.019616416931152344, 0.01976108741760254, 0.019653791427612304, 0.019723167419433595, 0.01966217613220215, 0.019749536514282226, 0.019770624160766602, 0.01983945655822754, 0.019751136779785156, 0.01975712013244629, 0.020160512924194338, 0.019734399795532227, 0.019705984115600587, 0.019813472747802735, 0.019667871475219728, 0.019587072372436523, 0.019694623947143556, 0.019628095626831054, 0.01969174385070801, 0.019546239852905274, 0.01961836814880371, 0.01962166404724121, 0.019577024459838867, 0.019619871139526367, 0.01964591979980469, 0.019777088165283202, 0.019578847885131836, 0.019606527328491212, 0.019728128433227538, 0.019919103622436523, 0.01999667167663574, 0.019886079788208007, 0.019728384017944335, 0.019911712646484375, 0.019866048812866213, 0.019862047195434572, 0.019744768142700195, 0.019922271728515625, 0.01989241600036621, 0.019697696685791015, 0.019715551376342774, 0.019713024139404296, 0.01976316833496094, 0.01969308853149414, 0.019964704513549803, 0.019615743637084963, 0.019783679962158202, 0.019729984283447265, 0.019786176681518556, 0.019693567276000978, 0.019658559799194335, 0.01964028739929199, 0.019693792343139647, 0.019623008728027344, 0.019637151718139647, 0.01970755195617676, 0.019700063705444335, 0.019607551574707033, 0.01973017692565918, 0.01985152053833008, 0.019787776947021486, 0.019719295501708985, 0.019788448333740234, 0.01986172866821289, 0.019715328216552735, 0.019707679748535156, 0.019775808334350584, 0.019696287155151367, 0.019677183151245118, 0.01960697555541992, 0.019718719482421876, 0.01972208023071289, 0.019768831253051757, 0.01978812789916992, 0.019918399810791014, 0.01972915267944336, 0.019742719650268553, 0.01973356819152832, 0.019882144927978514, 0.020734752655029297, 0.019851264953613282, 0.019777151107788087, 0.019708192825317383, 0.02069923210144043, 0.019933183670043944, 0.020077728271484376, 0.019762016296386718, 0.019836544036865234, 0.019927295684814453, 0.01981452751159668, 0.01973967933654785, 0.019751136779785156, 0.01967180824279785, 0.019780704498291016, 0.021560224533081054, 0.020149343490600585, 0.01978166389465332, 0.019815296173095704, 0.01977494430541992, 0.019822463989257813, 0.019708576202392577, 0.019727712631225587, 0.01965328025817871, 0.019755008697509766, 0.019644416809082032, 0.019693344116210938, 0.019730560302734373, 0.019918367385864257, 0.020056543350219728, 0.019963903427124022, 0.019967487335205078, 0.019830272674560546, 0.019984479904174804, 0.019954591751098632, 0.0198144645690918, 0.01972012710571289, 0.019752960205078125, 0.019787776947021486, 0.019804159164428712, 0.019742944717407226, 0.019740224838256836, 0.019849407196044923, 0.01980348777770996, 0.020110015869140626, 0.019729631423950195, 0.019683584213256836, 0.019677312850952148, 0.02010358428955078, 0.019817663192749024, 0.01984707260131836, 0.019774368286132812, 0.019804159164428712, 0.019810304641723633, 0.01978950309753418, 0.01979964828491211, 0.019843807220458986, 0.01984649658203125, 0.019852127075195312, 0.019844064712524413, 0.01978454399108887, 0.01988812828063965, 0.01989017677307129, 0.019769344329833984, 0.019734527587890623, 0.01987174415588379, 0.019965951919555663, 0.022304672241210938, 0.01983395195007324, 0.019984960556030273, 0.019857696533203125, 0.019878047943115235, 0.019778783798217774, 0.019815040588378907, 0.019736703872680665, 0.019728416442871093, 0.019763200759887696, 0.019732479095458985, 0.019834815979003908, 0.019765216827392577, 0.019881696701049806, 0.019775583267211915, 0.01977987289428711, 0.019949567794799804, 0.01992246437072754, 0.019683296203613282, 0.019741184234619142, 0.01975279998779297, 0.019709375381469725, 0.019821279525756835, 0.019775487899780272, 0.019905216217041017, 0.01965465545654297, 0.01990435218811035, 0.020091039657592773, 0.019802112579345704, 0.019885568618774413, 0.019870208740234374, 0.019773439407348634, 0.0196997127532959, 0.019846752166748048, 0.019787296295166016, 0.019845823287963867, 0.0198590087890625, 0.019863296508789062, 0.019786624908447265, 0.019901599884033203, 0.01975587272644043, 0.019876064300537108, 0.019791648864746093, 0.019709407806396486, 0.01992144012451172, 0.019838943481445312, 0.019854623794555663, 0.01987990379333496, 0.019909215927124024, 0.020928064346313478, 0.021013120651245117, 0.02008678436279297, 0.0199117431640625, 0.019805120468139648, 0.020014751434326173, 0.019835391998291017, 0.019756895065307617, 0.019783679962158202, 0.019855039596557617, 0.019893791198730467, 0.01973023986816406, 0.019768543243408203, 0.020024831771850587, 0.0197508487701416, 0.0198305606842041, 0.019761695861816406, 0.019774879455566406, 0.0199215030670166, 0.01983283233642578, 0.019904512405395508, 0.019732288360595703, 0.01978780746459961, 0.019744928359985352, 0.019771392822265626, 0.01992086410522461, 0.0198492488861084, 0.019950687408447267, 0.02009529685974121, 0.019812959671020508, 0.01978495979309082, 0.01988684844970703, 0.01970502471923828, 0.019719135284423827, 0.019658527374267577, 0.019802175521850585, 0.01962188720703125, 0.019672895431518556, 0.019877887725830077, 0.019777376174926756, 0.019744928359985352, 0.01985945510864258, 0.01983283233642578, 0.01985945510864258, 0.019756511688232423, 0.01967977523803711, 0.019718143463134767, 0.019894271850585937, 0.01976927947998047, 0.019674911499023437, 0.019819807052612305, 0.019923967361450197, 0.019750911712646483, 0.019681280136108398, 0.019714048385620117, 0.01983897590637207, 0.019761247634887694, 0.01981020736694336, 0.019678752899169923, 0.019784160614013672, 0.019762943267822266, 0.019732736587524415, 0.0200130558013916, 0.019830495834350585, 0.01976963233947754, 0.019705856323242187, 0.01981644821166992, 0.019701759338378907, 0.01982080078125, 0.019732223510742188, 0.019804224014282227, 0.01976927947998047, 0.01984511947631836, 0.019748672485351563, 0.019760351181030273, 0.01970479965209961, 0.020015104293823242, 0.02020351982116699, 0.019797183990478515, 0.01978041648864746, 0.019812351226806642, 0.019685087203979493, 0.01971824073791504, 0.01971968078613281, 0.020263359069824217, 0.019822847366333007, 0.019742143630981444, 0.019978815078735352, 0.01985945510864258, 0.01971968078613281, 0.01974233627319336, 0.02008291244506836, 0.019882656097412108, 0.019896320343017578, 0.019725631713867188, 0.01968403244018555, 0.01967513656616211, 0.01971311950683594, 0.019917728424072266, 0.019752960205078125, 0.01989788818359375, 0.019843711853027343, 0.01967103958129883, 0.019783071517944336, 0.019896928787231444, 0.019718048095703124, 0.019736671447753908, 0.019650527954101563, 0.019934463500976562, 0.019714752197265626, 0.019597408294677734, 0.01987379264831543, 0.019904096603393553, 0.01973289680480957, 0.019668256759643555, 0.019681631088256837, 0.019718528747558594, 0.019853311538696287, 0.019779232025146483, 0.019773792266845704, 0.019721664428710938, 0.021006271362304686, 0.022251232147216797, 0.019907487869262695, 0.019965951919555663, 0.01980620765686035, 0.020279296875, 0.01989360046386719, 0.019763872146606444, 0.019908447265625, 0.020097312927246095, 0.020085952758789063, 0.019886783599853516, 0.01975497627258301, 0.01988764762878418, 0.019773248672485352, 0.01987596893310547, 0.019720767974853514, 0.019789535522460936, 0.019806367874145508, 0.019867008209228515, 0.019759872436523437, 0.019730432510375977, 0.01975036811828613, 0.019782175064086915, 0.019718143463134767, 0.019674272537231446, 0.019961856842041017, 0.019594079971313478, 0.019576736450195312, 0.0195828800201416, 0.01972172737121582, 0.019669696807861327, 0.019722240447998047, 0.0196997127532959, 0.01972428894042969, 0.019777536392211914, 0.019678783416748048, 0.019597888946533203, 0.019509120941162108, 0.01968070411682129, 0.019651199340820314, 0.019894048690795897, 0.019702943801879882]",tokens/s,50.50874702171723,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: BloomForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpz_gyg_if/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,1019.293696,1105.068032,0.0,702.54592,681.468416,s,1,9.910955078125,9.910955078125,0.0,9.910955078125,9.910955078125,9.910955078125,9.910955078125,[9.910955078125],,kWh,2.4414848762503044e-05,2.683985283765858e-06,7.480005983999567e-06,3.457884003026847e-05,,MB,1428.955136,1425.932288,0.0,1008.730112,988.14208,s,10,0.24117251205444337,0.024117251205444334,0.00021888919232726647,0.024027359962463378,0.024370783424377443,0.024486992168426515,0.024579959163665774,"[0.024603200912475587, 0.02428486442565918, 0.024028703689575194, 0.024344959259033204, 0.023893024444580076, 0.023920095443725586, 0.024011007308959963, 0.024026016235351562, 0.02415545654296875, 0.023905183792114256]",tokens/s,10614.808371785315,kWh,7.096975643305015e-07,7.82665342287443e-08,4.695951707135917e-07,1.2575592692728373e-06,tokens/kWh,203568934.08930755,MB,1486.266368,1463.681024,0.0,1046.478848,988.14464,s,10,12.606211059570313,1.2606211059570314,0.005413026921327183,1.2599550170898437,1.2684440795898437,1.2699958435058594,1.2712372546386719,"[1.2680992431640625, 1.257679443359375, 1.2617115478515626, 1.2522152099609376, 1.2550252685546874, 1.2614691162109375, 1.2585535888671875, 1.271547607421875, 1.2599266357421874, 1.2599833984375]",tokens/s,49.97536508178007,kWh,3.6800596544835976e-05,4.058631156372879e-06,1.5772890045486434e-05,5.66321177466953e-05,tokens/kWh,1112442.947688925,,s,630,12.603734987258916,0.02000592855120462,0.0002878657330783589,0.019946928024291995,0.02018754253387451,0.020391998481750488,0.021370219631195068,"[0.02011231994628906, 0.0201595516204834, 0.02015737533569336, 0.022726463317871093, 0.02218351936340332, 0.02017465591430664, 0.02029190444946289, 0.020217920303344728, 0.020253087997436522, 0.02025267219543457, 0.0201909122467041, 0.020013120651245116, 0.01997216033935547, 0.02011974334716797, 0.020067647933959962, 0.0201080322265625, 0.020031423568725587, 0.01994473648071289, 0.01994620704650879, 0.02011654472351074, 0.020284223556518554, 0.02000089645385742, 0.019914751052856446, 0.019969568252563477, 0.02002911949157715, 0.020071199417114258, 0.020105472564697267, 0.019990272521972656, 0.019974143981933593, 0.020002208709716796, 0.020130399703979493, 0.01986672019958496, 0.019911584854125978, 0.019939327239990236, 0.020053983688354492, 0.01998441505432129, 0.01993427276611328, 0.01999488067626953, 0.020127456665039064, 0.02007049560546875, 0.019972959518432618, 0.02018716812133789, 0.019834848403930665, 0.019914720535278322, 0.020045888900756835, 0.01995782470703125, 0.020009056091308593, 0.01999446487426758, 0.019959808349609375, 0.0202608642578125, 0.019984384536743165, 0.019959808349609375, 0.019939327239990236, 0.02005727958679199, 0.020003488540649414, 0.01998361587524414, 0.020041664123535155, 0.02008367919921875, 0.020174848556518556, 0.020121631622314454, 0.020000736236572267, 0.01997209548950195, 0.019986431121826173, 0.020378976821899413, 0.020211551666259767, 0.020095487594604493, 0.019992416381835937, 0.02010323143005371, 0.02009744071960449, 0.020140159606933595, 0.019980159759521485, 0.019899679183959962, 0.01999110412597656, 0.020526687622070314, 0.019964479446411134, 0.019929183959960937, 0.019914655685424804, 0.01997792053222656, 0.019928863525390625, 0.0198286075592041, 0.019869600296020508, 0.0199237117767334, 0.019779584884643556, 0.019965696334838866, 0.019905824661254883, 0.02006934356689453, 0.02013385581970215, 0.02040630340576172, 0.019979488372802733, 0.019817472457885742, 0.02018409538269043, 0.01983951950073242, 0.01987401580810547, 0.019955488204956056, 0.01988630485534668, 0.019801631927490234, 0.019888544082641603, 0.019920703887939453, 0.019957216262817382, 0.019782432556152342, 0.01979801559448242, 0.01993427276611328, 0.019895231246948242, 0.01991222381591797, 0.019907039642333986, 0.01988812828063965, 0.019826656341552736, 0.01981648063659668, 0.020031488418579102, 0.019993759155273436, 0.01999523162841797, 0.019877311706542968, 0.019860288619995118, 0.01985068893432617, 0.019928672790527343, 0.019784671783447266, 0.019895776748657227, 0.01992963218688965, 0.019908607482910155, 0.019918304443359375, 0.019890047073364257, 0.019927711486816407, 0.020002815246582033, 0.01992211151123047, 0.019958303451538085, 0.01987993621826172, 0.02024275207519531, 0.019971967697143555, 0.01993756866455078, 0.01996995162963867, 0.019980192184448242, 0.01992927932739258, 0.02006447982788086, 0.019953439712524414, 0.01985536003112793, 0.02045952033996582, 0.020479839324951173, 0.020117727279663086, 0.020381631851196288, 0.020149568557739257, 0.02004243278503418, 0.0198154239654541, 0.019803136825561524, 0.01972377586364746, 0.01983033561706543, 0.019897279739379884, 0.020233951568603515, 0.019895584106445312, 0.019870559692382814, 0.019826591491699217, 0.01985971260070801, 0.019844480514526367, 0.019775392532348633, 0.019796064376831055, 0.01984364891052246, 0.019855424880981444, 0.019804096221923827, 0.019857023239135744, 0.019808704376220704, 0.01987174415588379, 0.01981622314453125, 0.019794143676757813, 0.01982259178161621, 0.01984499168395996, 0.019894399642944337, 0.019787712097167967, 0.019795263290405273, 0.021068544387817384, 0.022208288192749025, 0.019957855224609376, 0.01984320068359375, 0.01985536003112793, 0.01989151954650879, 0.01997862434387207, 0.019848928451538086, 0.019828927993774413, 0.019985984802246094, 0.01996476745605469, 0.02008064079284668, 0.019990528106689453, 0.019885120391845704, 0.019900863647460937, 0.01996031951904297, 0.019971807479858397, 0.019898656845092774, 0.02030364799499512, 0.021373151779174804, 0.020051967620849608, 0.02011248016357422, 0.020153312683105468, 0.019959808349609375, 0.019857311248779298, 0.01993107223510742, 0.01993846321105957, 0.019893247604370116, 0.019949567794799804, 0.019953887939453126, 0.019910432815551757, 0.019834880828857423, 0.01982464027404785, 0.019795967102050782, 0.01984022331237793, 0.019868032455444336, 0.019888063430786133, 0.0197739200592041, 0.01975632095336914, 0.01975177574157715, 0.019773311614990234, 0.019795967102050782, 0.019808256149291992, 0.019877824783325195, 0.019935264587402343, 0.0198852481842041, 0.019911008834838866, 0.02002115249633789, 0.019874399185180663, 0.0200534725189209, 0.020003360748291017, 0.019965951919555663, 0.020017152786254884, 0.019931135177612306, 0.019916799545288084, 0.01998028755187988, 0.019961856842041017, 0.019924032211303712, 0.019895231246948242, 0.01981564712524414, 0.01988444709777832, 0.019892608642578125, 0.019901920318603515, 0.019896127700805663, 0.01980076789855957, 0.019709663391113283, 0.019832799911499024, 0.01982089614868164, 0.020357120513916017, 0.019846303939819336, 0.019798879623413087, 0.019793920516967774, 0.019762783050537108, 0.01983235168457031, 0.019775392532348633, 0.019837184906005858, 0.019696479797363282, 0.019746688842773436, 0.01971958351135254, 0.019843679428100586, 0.019705856323242187, 0.019844127655029298, 0.019785791397094726, 0.019864479064941407, 0.019805471420288087, 0.02003264045715332, 0.019843296051025392, 0.019837600708007812, 0.019840095520019533, 0.019760032653808594, 0.019802112579345704, 0.019744768142700195, 0.0198056640625, 0.019806432723999023, 0.01983110427856445, 0.01985852813720703, 0.01988700866699219, 0.01986355209350586, 0.01981439971923828, 0.019914751052856446, 0.019933183670043944, 0.019926240921020508, 0.019897119522094726, 0.019812351226806642, 0.019760255813598634, 0.019776384353637697, 0.01984716796875, 0.01988812828063965, 0.019826688766479493, 0.01981439971923828, 0.019767295837402343, 0.019888320922851564, 0.01992291259765625, 0.02010220718383789, 0.02003228759765625, 0.019950719833374025, 0.020077440261840822, 0.020290752410888672, 0.020403007507324218, 0.02003558349609375, 0.020061471939086913, 0.020313024520874023, 0.020184959411621094, 0.02002873611450195, 0.01986796760559082, 0.019920448303222656, 0.019821279525756835, 0.019952640533447266, 0.02005299186706543, 0.019963903427124022, 0.01986355209350586, 0.019965856552124024, 0.019882080078125, 0.01980124855041504, 0.019837791442871094, 0.01984716796875, 0.01980143928527832, 0.020136159896850588, 0.01991110420227051, 0.01986457633972168, 0.019833856582641602, 0.01982259178161621, 0.019922367095947267, 0.019921152114868165, 0.01989459228515625, 0.01990656089782715, 0.019887327194213867, 0.019907264709472655, 0.020166688919067383, 0.019955615997314453, 0.019955808639526368, 0.019910816192626954, 0.019811967849731445, 0.019937503814697267, 0.019982336044311523, 0.019900415420532228, 0.019820159912109375, 0.019935071945190428, 0.019904447555541993, 0.019893856048583985, 0.019986976623535158, 0.019876319885253905, 0.01996518325805664, 0.019937152862548827, 0.019975040435791017, 0.01990003204345703, 0.019902847290039063, 0.020245920181274413, 0.020143936157226563, 0.02021183967590332, 0.020122272491455078, 0.01996918487548828, 0.01994633674621582, 0.020002815246582033, 0.02002457618713379, 0.0198983039855957, 0.020095808029174805, 0.01992723274230957, 0.019885887145996095, 0.019871423721313477, 0.01991302490234375, 0.020039680480957032, 0.01992038345336914, 0.01991321563720703, 0.019982336044311523, 0.019974239349365236, 0.020059776306152344, 0.019902463912963866, 0.01993670463562012, 0.019871904373168946, 0.019888736724853515, 0.019873695373535158, 0.01990880012512207, 0.020047679901123047, 0.01990470314025879, 0.019865024566650392, 0.019925056457519533, 0.019863616943359374, 0.02002787208557129, 0.02001443290710449, 0.0200894718170166, 0.020073919296264647, 0.019989055633544923, 0.019941375732421874, 0.019977855682373046, 0.020046207427978517, 0.020008928298950197, 0.02061039924621582, 0.022488895416259765, 0.02007276725769043, 0.020034112930297852, 0.020094303131103514, 0.020132352828979492, 0.019971872329711916, 0.020162464141845703, 0.020512863159179686, 0.020545984268188478, 0.020428991317749022, 0.020750080108642578, 0.020112703323364258, 0.020056543350219728, 0.019947839736938477, 0.0200600643157959, 0.019879808425903322, 0.01981248092651367, 0.019953664779663087, 0.019900415420532228, 0.02002252769470215, 0.019877792358398438, 0.019850080490112304, 0.019838783264160158, 0.019864992141723634, 0.019971904754638673, 0.019850208282470704, 0.019997823715209962, 0.0199136962890625, 0.01984707260131836, 0.01989580726623535, 0.019775840759277345, 0.01984476852416992, 0.019793600082397462, 0.019864383697509765, 0.019850784301757813, 0.019873695373535158, 0.019847007751464845, 0.019875648498535157, 0.01998508834838867, 0.019877664566040038, 0.019859903335571288, 0.019901472091674803, 0.019962848663330077, 0.019857568740844725, 0.01986297607421875, 0.019900575637817382, 0.019915008544921876, 0.019902463912963866, 0.019930944442749024, 0.019979904174804688, 0.019960384368896484, 0.020017152786254884, 0.019980031967163085, 0.01986147117614746, 0.01989455986022949, 0.019886240005493164, 0.019855199813842775, 0.019887231826782228, 0.019954559326171874, 0.0202478084564209, 0.02001696014404297, 0.019989696502685547, 0.01999007987976074, 0.019970304489135744, 0.019881919860839845, 0.019986431121826173, 0.020961503982543945, 0.02044476890563965, 0.01991516876220703, 0.01988921546936035, 0.019923551559448242, 0.02002364730834961, 0.019860576629638672, 0.020895999908447267, 0.019872255325317383, 0.019881343841552733, 0.01997657585144043, 0.01987343978881836, 0.01993916893005371, 0.019953887939453126, 0.019864255905151368, 0.01986764717102051, 0.0198287353515625, 0.01976851272583008, 0.02060358428955078, 0.01980601692199707, 0.020400480270385744, 0.021295072555541993, 0.02178643226623535, 0.020225568771362303, 0.02007107162475586, 0.019949567794799804, 0.019825855255126954, 0.019855295181274414, 0.019899263381958007, 0.020121023178100585, 0.020140159606933595, 0.019988319396972657, 0.01997635269165039, 0.019943872451782228, 0.020111263275146483, 0.020027488708496095, 0.01995913505554199, 0.020008640289306642, 0.02004275131225586, 0.02053321647644043, 0.02005174446105957, 0.020173023223876953, 0.020402175903320312, 0.02045747184753418, 0.02003580856323242, 0.019993696212768555, 0.021475839614868163, 0.02005574417114258, 0.020044288635253905, 0.0200581111907959, 0.02008678436279297, 0.02031430435180664, 0.020106527328491212, 0.020451295852661134, 0.021363040924072266, 0.020153696060180665, 0.020020095825195313, 0.02044313621520996, 0.020092767715454103, 0.020033695220947265, 0.020207616806030275, 0.019961568832397462, 0.020003103256225587, 0.020144287109375, 0.02001686477661133, 0.020048479080200195, 0.02012067222595215, 0.02001519966125488, 0.020200384140014647, 0.01993926429748535, 0.020045759201049805, 0.02002124786376953, 0.019978239059448243, 0.019969152450561523, 0.019960704803466796, 0.01995142364501953, 0.019963520050048828, 0.019962432861328126, 0.020024896621704102, 0.019920768737792968, 0.01987436866760254, 0.019992576599121094, 0.019886079788208007, 0.019834880828857423, 0.020010400772094726, 0.019923551559448242, 0.02004582405090332, 0.01995942306518555, 0.01997657585144043, 0.019922304153442382, 0.019918752670288087, 0.020286176681518556, 0.020264959335327147, 0.02012566375732422, 0.02002694320678711, 0.02002992057800293, 0.019963199615478516, 0.019968191146850587, 0.020236799240112305, 0.019891359329223632, 0.0199401912689209, 0.01984832000732422, 0.019964639663696288, 0.020060319900512696, 0.019929088592529298, 0.01988803291320801, 0.019968095779418944, 0.019947519302368166, 0.019836320877075195, 0.020025951385498047, 0.01988812828063965, 0.02021990394592285, 0.01988377571105957, 0.019920576095581056, 0.019919391632080077, 0.01988937568664551, 0.0203702392578125, 0.02000796890258789, 0.02006524848937988, 0.020168703079223634, 0.020031488418579102, 0.019875360488891602, 0.019914783477783204, 0.019927488327026368, 0.01987993621826172, 0.019895776748657227, 0.01991689682006836, 0.01977065658569336, 0.01987276840209961, 0.019943424224853516, 0.019916191101074218, 0.01989638328552246, 0.02000079917907715, 0.01992140769958496, 0.019783679962158202, 0.019738624572753907, 0.020017152786254884, 0.020158559799194335, 0.019894176483154297, 0.019924991607666014, 0.02006787109375, 0.02037958335876465, 0.02062131118774414, 0.020125728607177734, 0.020070911407470703, 0.01984102439880371, 0.019957759857177734, 0.019985887527465822, 0.019982879638671874, 0.019907840728759764, 0.019856128692626953, 0.02001049613952637, 0.019906240463256834, 0.019810880661010742, 0.019809823989868164, 0.019905248641967774, 0.020158464431762696, 0.01990553665161133, 0.019983455657958983, 0.019941280364990235, 0.019957344055175782, 0.020167072296142577, 0.020125823974609373, 0.019961408615112305, 0.019951040267944337, 0.019938175201416015, 0.020016544342041014, 0.02012758445739746, 0.020244319915771483, 0.020043840408325197, 0.0200034236907959, 0.020166912078857423, 0.019994592666625975, 0.0200020809173584, 0.019966720581054687, 0.019933183670043944, 0.019976192474365235, 0.01994108772277832, 0.02054582405090332, 0.019977695465087892, 0.019898880004882814, 0.019961055755615235, 0.020015935897827148, 0.01987379264831543, 0.01984921646118164, 0.01988812828063965, 0.019924991607666014, 0.0203407039642334, 0.01995369529724121]",tokens/s,49.98518301415142,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: BloomForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp51nor_q1/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1171.64032,1109.262336,0.0,706.740224,681.6384,s,1,8.11124267578125,8.11124267578125,0.0,8.11124267578125,8.11124267578125,8.11124267578125,8.11124267578125,[8.11124267578125],,kWh,3.0492379691653087e-05,3.3563227701532543e-06,1.0048341371998726e-05,4.3897043833805065e-05,,MB,1471.512576,1413.349376,0.0,996.1472,949.238272,s,10,0.27525939369201663,0.02752593936920166,0.002131410169120802,0.02677203178405762,0.027904111862182612,0.030904280662536615,0.03330441570281983,"[0.033904449462890625, 0.02671004867553711, 0.026694944381713867, 0.026753408432006836, 0.026736160278320313, 0.026794815063476564, 0.026786304473876952, 0.02723740768432617, 0.026884096145629883, 0.026757759094238283]",tokens/s,9300.318385734525,kWh,7.888891874437568e-07,8.700050791072737e-08,5.028667686594466e-07,1.3787564640139308e-06,tokens/kWh,185674560.14292413,MB,1491.873792,1421.737984,0.0,1004.535808,949.240832,s,10,13.042000366210939,1.3042000366210937,0.08030272311483772,1.278968017578125,1.3109853759765624,1.427837170410156,1.5213186059570314,"[1.54468896484375, 1.285018310546875, 1.2828385009765626, 1.2757518310546876, 1.270318603515625, 1.277839111328125, 1.27610498046875, 1.269203857421875, 1.280096923828125, 1.2801392822265625]",tokens/s,48.30547326407049,kWh,3.715347994506116e-05,4.097577592889477e-06,1.5384284829941432e-05,5.663534236789207e-05,tokens/kWh,1112379.609021596,,s,630,13.039928680419937,0.020698299492730034,0.0016840006295560596,0.02020580768585205,0.020875152778625488,0.02679142370223999,0.0272004386138916,"[0.026969024658203125, 0.031070207595825194, 0.027200735092163086, 0.027015840530395508, 0.02678131294250488, 0.026876415252685547, 0.026869632720947265, 0.026756479263305665, 0.02700364875793457, 0.02683616065979004, 0.026823488235473633, 0.027015008926391602, 0.026839199066162108, 0.02675916862487793, 0.027100799560546875, 0.026810752868652345, 0.028041215896606447, 0.02686476707458496, 0.026909311294555663, 0.0270830078125, 0.026796031951904296, 0.026785791397094725, 0.02696396827697754, 0.027219968795776366, 0.027828224182128908, 0.0270882568359375, 0.02706496047973633, 0.02691276741027832, 0.026918912887573244, 0.02676940727233887, 0.02692300796508789, 0.026870847702026367, 0.02683180809020996, 0.02670796775817871, 0.027031328201293944, 0.0271997127532959, 0.027322368621826174, 0.02752102470397949, 0.020911584854125975, 0.020648479461669922, 0.020833759307861327, 0.020515296936035158, 0.02022310447692871, 0.02031715202331543, 0.020701152801513672, 0.020357120513916017, 0.02022604751586914, 0.020595808029174805, 0.02023891258239746, 0.020373760223388673, 0.02039756774902344, 0.02053977584838867, 0.02027337646484375, 0.020194816589355468, 0.02020604705810547, 0.020301151275634765, 0.020371904373168947, 0.02304435157775879, 0.021390399932861327, 0.020444095611572264, 0.020325759887695312, 0.02019571113586426, 0.020463872909545898, 0.020435552597045898, 0.020365312576293947, 0.020152320861816408, 0.020289535522460937, 0.021549184799194335, 0.020514688491821288, 0.020336448669433595, 0.02035430335998535, 0.020369472503662108, 0.020192127227783203, 0.02025881576538086, 0.020301599502563477, 0.0202324161529541, 0.020178943634033202, 0.020172639846801756, 0.020186975479125978, 0.02010963249206543, 0.02022604751586914, 0.02023219108581543, 0.02020351982116699, 0.020213727951049806, 0.02017283248901367, 0.020333984375, 0.020248575210571287, 0.020322912216186522, 0.020610048294067384, 0.02087424087524414, 0.02123161506652832, 0.02045747184753418, 0.020402368545532228, 0.020533056259155275, 0.020305919647216796, 0.020546911239624023, 0.02016489601135254, 0.02074179267883301, 0.020107999801635742, 0.020025312423706056, 0.02022403144836426, 0.020109312057495117, 0.020147552490234377, 0.02027996826171875, 0.02060492706298828, 0.02044108772277832, 0.02248899269104004, 0.020410400390625, 0.020383424758911133, 0.020383583068847657, 0.020185375213623048, 0.02014028739929199, 0.020369375228881836, 0.020285472869873047, 0.020172800064086914, 0.02013520050048828, 0.02164201545715332, 0.02019321632385254, 0.020397056579589845, 0.02024880027770996, 0.02041116714477539, 0.02022585678100586, 0.02026710319519043, 0.020164703369140623, 0.020322303771972656, 0.020231935501098634, 0.020520256042480468, 0.020148128509521485, 0.02027190399169922, 0.020168703079223634, 0.02028544044494629, 0.02024038314819336, 0.020183040618896485, 0.020368896484375, 0.020080480575561523, 0.0201181755065918, 0.020179040908813478, 0.020206655502319336, 0.02028371238708496, 0.02037980842590332, 0.02071180725097656, 0.02015011215209961, 0.02015043258666992, 0.020460832595825196, 0.020246240615844728, 0.020127840042114258, 0.02019139289855957, 0.020160480499267577, 0.020318784713745118, 0.02039193534851074, 0.020355295181274415, 0.02035660743713379, 0.020234752655029296, 0.02022604751586914, 0.02019660758972168, 0.020165376663208008, 0.020164159774780272, 0.02026540756225586, 0.020323360443115234, 0.02029257583618164, 0.020479808807373046, 0.023942399978637695, 0.021115840911865233, 0.0209039363861084, 0.020852703094482422, 0.020989984512329102, 0.020545536041259766, 0.020883359909057618, 0.0205515193939209, 0.020510976791381835, 0.020406047821044923, 0.02037196731567383, 0.020210975646972655, 0.020357568740844725, 0.020496383666992187, 0.02019327926635742, 0.02004377555847168, 0.02006947135925293, 0.01999555206298828, 0.020010784149169923, 0.020166879653930665, 0.020033536911010744, 0.020058143615722657, 0.020028768539428712, 0.020040319442749022, 0.020053375244140626, 0.020334400177001954, 0.02002617645263672, 0.02004377555847168, 0.020256351470947266, 0.020071136474609376, 0.02017043113708496, 0.021268735885620116, 0.020029312133789064, 0.02014361572265625, 0.020142719268798827, 0.020275136947631837, 0.02052511978149414, 0.023498687744140625, 0.020352319717407228, 0.02022230339050293, 0.02092073631286621, 0.020180320739746092, 0.0200731201171875, 0.02000230407714844, 0.019991039276123047, 0.02019705581665039, 0.020018720626831056, 0.02035081672668457, 0.020033632278442383, 0.020043712615966797, 0.020032543182373047, 0.020397951126098633, 0.02003558349609375, 0.020321599960327147, 0.020782943725585937, 0.02026358413696289, 0.020109504699707032, 0.02025257682800293, 0.02026915168762207, 0.020191232681274415, 0.020190271377563476, 0.02010412788391113, 0.020083808898925783, 0.020131839752197265, 0.020006015777587892, 0.02006403160095215, 0.019992576599121094, 0.019986431121826173, 0.020079999923706054, 0.020031808853149414, 0.0200392951965332, 0.020045888900756835, 0.020144287109375, 0.020273632049560546, 0.02013132858276367, 0.020247039794921876, 0.020060159683227538, 0.020119487762451174, 0.020080320358276366, 0.020399744033813477, 0.02013465690612793, 0.02003468894958496, 0.020210559844970704, 0.020059423446655275, 0.02012233543395996, 0.02015382385253906, 0.020099615097045897, 0.020156415939331054, 0.020146175384521483, 0.020268608093261718, 0.02051251220703125, 0.020311391830444336, 0.02019596862792969, 0.020146207809448244, 0.02046156883239746, 0.020205568313598633, 0.020197023391723634, 0.020113759994506836, 0.020164575576782227, 0.020158496856689453, 0.02006982421875, 0.02015622329711914, 0.021019392013549805, 0.020352672576904297, 0.020363616943359374, 0.020314111709594726, 0.02020886421203613, 0.02018511962890625, 0.020185440063476563, 0.020138656616210938, 0.020086528778076172, 0.02020147132873535, 0.020068351745605468, 0.020305248260498048, 0.020138656616210938, 0.02004991912841797, 0.02005606460571289, 0.02004582405090332, 0.020135936737060548, 0.02008406448364258, 0.020012928009033204, 0.02003228759765625, 0.020125696182250977, 0.0201744327545166, 0.020081056594848632, 0.02004400062561035, 0.02005574417114258, 0.020055231094360353, 0.020335519790649414, 0.02026905632019043, 0.020164159774780272, 0.020168672561645506, 0.02022038459777832, 0.020184160232543946, 0.02020025634765625, 0.020082015991210938, 0.02008483123779297, 0.020055904388427734, 0.020101951599121093, 0.02002252769470215, 0.020069183349609374, 0.019998655319213868, 0.019991968154907228, 0.020032096862792968, 0.02000214385986328, 0.019980960845947266, 0.02015180778503418, 0.019990591049194335, 0.020120000839233397, 0.020471647262573243, 0.020099231719970703, 0.02020694351196289, 0.020195999145507813, 0.02020911979675293, 0.020174848556518556, 0.020167903900146486, 0.020009759902954102, 0.020185375213623048, 0.0201312313079834, 0.020127359390258788, 0.02004652786254883, 0.02000806427001953, 0.020017696380615235, 0.020097503662109373, 0.020116992950439453, 0.020045536041259766, 0.019974048614501954, 0.02007526397705078, 0.020279296875, 0.02020966339111328, 0.020268608093261718, 0.020144575119018553, 0.020145471572875977, 0.02019603157043457, 0.020209760665893556, 0.020224191665649413, 0.020405824661254884, 0.020227584838867187, 0.020275680541992188, 0.020369472503662108, 0.02031420707702637, 0.02023632049560547, 0.02024038314819336, 0.020248575210571287, 0.0202794246673584, 0.020300895690917968, 0.020330463409423828, 0.0203143367767334, 0.020191167831420897, 0.020295551300048828, 0.020187936782836913, 0.02026905632019043, 0.0202324161529541, 0.020227872848510742, 0.020178943634033202, 0.02026700782775879, 0.020250335693359375, 0.02016694450378418, 0.020147327423095704, 0.02008153533935547, 0.020329952239990234, 0.025277984619140624, 0.020311040878295897, 0.02079539108276367, 0.020238336563110353, 0.02008883285522461, 0.020142080307006836, 0.020154239654541016, 0.020221439361572266, 0.020075136184692383, 0.020139455795288086, 0.02020924758911133, 0.020065120697021484, 0.020185216903686524, 0.020116832733154295, 0.020203424453735352, 0.020170944213867188, 0.02008304023742676, 0.022324800491333008, 0.020322751998901368, 0.020438079833984376, 0.020267967224121095, 0.020348800659179687, 0.020122976303100587, 0.020427072525024414, 0.020176671981811525, 0.020181695938110353, 0.020183040618896485, 0.02048409652709961, 0.020274911880493164, 0.02032467269897461, 0.020186687469482423, 0.020227519989013672, 0.021617631912231445, 0.020353023529052734, 0.020137407302856444, 0.020121440887451172, 0.02013257598876953, 0.020271104812622072, 0.020142080307006836, 0.020076543807983398, 0.020377599716186523, 0.020178943634033202, 0.02022400093078613, 0.02031545639038086, 0.020195072174072265, 0.02013279914855957, 0.020254432678222658, 0.02012598419189453, 0.020113407135009767, 0.0202445125579834, 0.020188928604125977, 0.020195552825927734, 0.020000383377075194, 0.0201046085357666, 0.020087776184082032, 0.019998720169067383, 0.020092927932739257, 0.020053983688354492, 0.020042816162109376, 0.020097280502319338, 0.01999945640563965, 0.02014224052429199, 0.02010095977783203, 0.0200581111907959, 0.020165664672851562, 0.020048864364624025, 0.02004694366455078, 0.02012828826904297, 0.020103551864624022, 0.020202911376953125, 0.020259424209594725, 0.02020966339111328, 0.020440479278564454, 0.020212320327758788, 0.020305919647216796, 0.02021174430847168, 0.020165792465209963, 0.020521760940551758, 0.02031184005737305, 0.02025644874572754, 0.020724767684936522, 0.020133184432983398, 0.020453279495239257, 0.020113279342651367, 0.020074752807617186, 0.02012611198425293, 0.020180864334106444, 0.02018707275390625, 0.020365503311157225, 0.020148223876953125, 0.020214944839477538, 0.02005193519592285, 0.02010316848754883, 0.020044864654541014, 0.02019945526123047, 0.020073759078979493, 0.020081151962280275, 0.0200949764251709, 0.02026278305053711, 0.020309823989868164, 0.020175167083740234, 0.020068351745605468, 0.020164608001708984, 0.020066015243530272, 0.020023679733276366, 0.019989471435546875, 0.01997091293334961, 0.020044895172119142, 0.02006118392944336, 0.020121440887451172, 0.02010691261291504, 0.020029951095581054, 0.020033151626586913, 0.020021631240844728, 0.02002467155456543, 0.020010879516601562, 0.02017132759094238, 0.020031904220581053, 0.020000064849853515, 0.02011801528930664, 0.02004582405090332, 0.020082687377929686, 0.020105279922485352, 0.02003046417236328, 0.019999904632568358, 0.020051071166992188, 0.020019519805908204, 0.02003388786315918, 0.020113407135009767, 0.02026905632019043, 0.02046940803527832, 0.020199392318725588, 0.020172607421875, 0.020177024841308594, 0.02033679962158203, 0.02029939270019531, 0.02013599967956543, 0.0201693115234375, 0.020160512924194338, 0.02016364860534668, 0.020118463516235353, 0.020411968231201172, 0.02010643196105957, 0.0201592960357666, 0.02003558349609375, 0.020207168579101563, 0.020425151824951172, 0.020516864776611327, 0.020312063217163084, 0.020175872802734376, 0.02016972732543945, 0.020100223541259767, 0.020102016448974608, 0.020082015991210938, 0.02028982353210449, 0.020460960388183593, 0.020362207412719727, 0.02031830406188965, 0.020389440536499024, 0.020395519256591797, 0.02018931198120117, 0.020077375411987303, 0.020207359313964845, 0.020099231719970703, 0.020164352416992187, 0.020121856689453124, 0.02027270317077637, 0.020392383575439453, 0.020230144500732423, 0.020246528625488282, 0.02037555122375488, 0.02025267219543457, 0.02067251205444336, 0.02025881576538086, 0.02031001663208008, 0.020295679092407228, 0.020361215591430663, 0.02050662422180176, 0.022425600051879883, 0.02103500747680664, 0.020445024490356446, 0.02034499168395996, 0.020174848556518556, 0.020227167129516603, 0.020177824020385742, 0.020570112228393556, 0.020246528625488282, 0.020202880859375, 0.02105196762084961, 0.020237951278686522, 0.02027769660949707, 0.020319807052612306, 0.02017695999145508, 0.02022028732299805, 0.020154144287109373, 0.02026927947998047, 0.020180992126464844, 0.020317567825317382, 0.0201177921295166, 0.02025712013244629, 0.020178783416748048, 0.020174400329589844, 0.02012015914916992, 0.020178016662597657, 0.020170879364013673, 0.020103424072265626, 0.020619264602661135, 0.020092832565307618, 0.020173919677734374, 0.02047488021850586, 0.020442432403564453, 0.020374143600463867, 0.020158527374267578, 0.020381696701049806, 0.020514368057250976, 0.02064633560180664, 0.02059040069580078, 0.020496576309204102, 0.02011244773864746, 0.02017375946044922, 0.020215423583984374, 0.02017523193359375, 0.020133407592773437, 0.020134368896484376, 0.02012876892089844, 0.02100022315979004, 0.021195743560791017, 0.02171062469482422, 0.020363487243652344, 0.020186624526977538, 0.020117631912231447, 0.02011788749694824, 0.020180608749389647, 0.020515199661254882, 0.020307647705078126, 0.020686527252197266, 0.02038438415527344, 0.02081702423095703, 0.020246944427490234, 0.020165088653564454, 0.020238336563110353, 0.020200639724731444, 0.020107295989990233, 0.020263200759887696, 0.02024323272705078, 0.020356832504272462, 0.020316160202026368, 0.020073759078979493, 0.020397951126098633, 0.02015318489074707, 0.02022400093078613, 0.020163999557495118, 0.02013654327392578, 0.020213760375976563, 0.02014806365966797, 0.02015452766418457, 0.020132095336914062, 0.020094720840454102, 0.020213375091552733, 0.02022345542907715, 0.02018556785583496, 0.020171199798583984, 0.020125696182250977, 0.02015785598754883, 0.020277856826782226, 0.020158239364624023, 0.020403776168823242, 0.02026767921447754]",tokens/s,48.31314767434083,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1172.39808,1109.262336,0.0,706.740224,681.6384,s,1,8.467322265625,8.467322265625,0.0,8.467322265625,8.467322265625,8.467322265625,8.467322265625,[8.467322265625],,kWh,3.163328099167302e-05,3.4820551652054004e-06,1.003139691399535e-05,4.514673307087377e-05,,MB,1511.624704,1413.349376,0.0,996.1472,949.238272,s,10,0.36979677009582523,0.036979677009582525,0.0037964822348549284,0.03911412811279297,0.03977917366027832,0.04022387447357177,0.040579635124206544,"[0.03968035125732422, 0.039476638793945314, 0.039167713165283204, 0.04066857528686523, 0.039113121032714845, 0.039115135192871096, 0.038862945556640625, 0.031132608413696288, 0.030735904693603516, 0.03184377670288086]",tokens/s,6922.721362159622,kWh,9.00259352461571e-07,9.92822468878701e-08,5.695483188860297e-07,1.5690899182354707e-06,tokens/kWh,163151899.0880308,MB,1545.0112,1423.835136,0.0,1006.63296,949.240832,s,10,18.18659765625,1.818659765625,0.23351107890337508,2.003505126953125,2.0106150268554686,2.0155365173339845,2.019473709716797,"[2.0204580078125, 2.0069830322265627, 2.0095213623046875, 2.00326171875, 2.00374853515625, 2.009217041015625, 1.5757489013671875, 1.519130126953125, 1.5216429443359376, 1.516885986328125]",tokens/s,34.64089391032931,kWh,4.400386707379863e-05,4.853239254597283e-06,1.762668846031665e-05,6.648379478871257e-05,tokens/kWh,947599.3390602302,,s,630,18.184453041076672,0.028864211176312157,0.003827032328238246,0.031614912033081054,0.03209693832397461,0.03231874008178711,0.033272937164306654,"[0.03209875106811524, 0.032194561004638675, 0.032043041229248045, 0.03238220977783203, 0.03228067016601562, 0.03224822235107422, 0.032309505462646486, 0.03233542251586914, 0.03234838485717773, 0.03219068908691406, 0.03229004669189453, 0.032537345886230466, 0.03229216003417969, 0.03238982391357422, 0.032412990570068356, 0.03234201431274414, 0.03254281616210938, 0.032440929412841796, 0.032118175506591795, 0.0346399040222168, 0.03242860794067383, 0.03239084625244141, 0.03253884887695312, 0.032327777862548826, 0.03239251327514649, 0.03237139129638672, 0.03216300964355469, 0.03207673645019531, 0.03212275314331055, 0.03218950271606445, 0.03212963104248047, 0.03216419219970703, 0.032036865234375, 0.03183171272277832, 0.03183651161193848, 0.03196879959106445, 0.031772703170776365, 0.031846847534179684, 0.03241708755493164, 0.031702976226806644, 0.031677120208740236, 0.031580223083496096, 0.03158566474914551, 0.0315068473815918, 0.03148822402954102, 0.03153676795959472, 0.03159049606323242, 0.031559968948364256, 0.031643648147583005, 0.03182131195068359, 0.0316441593170166, 0.03157206344604492, 0.031508384704589845, 0.03147983932495117, 0.03200201416015625, 0.031565471649169924, 0.03177507209777832, 0.032679935455322266, 0.031660032272338864, 0.031934240341186526, 0.03172735977172852, 0.03185094451904297, 0.03170483207702637, 0.031739776611328124, 0.03201935958862305, 0.03208396911621094, 0.03205734252929687, 0.03277619171142578, 0.03187455940246582, 0.03180390357971191, 0.03187001609802246, 0.031777727127075196, 0.03180339241027832, 0.0317255687713623, 0.03169075202941894, 0.03197452735900879, 0.03174652862548828, 0.03174646377563477, 0.031733760833740236, 0.031925952911376954, 0.03200188827514648, 0.03193302345275879, 0.03189132881164551, 0.03177382469177246, 0.03177712059020996, 0.03189929580688477, 0.031752479553222655, 0.032317825317382816, 0.0320714225769043, 0.03224009704589844, 0.031781951904296876, 0.03187808036804199, 0.03172352027893066, 0.031674335479736325, 0.031805471420288084, 0.03171891212463379, 0.031649599075317385, 0.03160883140563965, 0.03170995140075684, 0.0319008960723877, 0.03204169464111328, 0.03216332626342774, 0.031746847152709964, 0.03179484748840332, 0.03180668830871582, 0.03153215980529785, 0.03169593620300293, 0.03180611228942871, 0.03167212867736816, 0.031815391540527344, 0.031725343704223634, 0.03167708778381348, 0.03164777565002441, 0.032008190155029294, 0.03171750450134277, 0.03178073692321777, 0.03176243209838867, 0.03173785591125488, 0.031682559967041016, 0.03179644775390625, 0.03273603057861328, 0.03160883140563965, 0.03183369636535645, 0.03180175971984863, 0.031779903411865235, 0.031941568374633786, 0.03199590492248535, 0.03168412780761719, 0.03158473587036133, 0.03167436790466309, 0.03182284736633301, 0.03177103996276855, 0.03189411163330078, 0.03171039962768555, 0.03208480072021484, 0.032276702880859376, 0.03218204879760742, 0.032349281311035157, 0.03233679962158203, 0.03220479965209961, 0.03200204849243164, 0.031866880416870115, 0.031676416397094724, 0.03182355117797851, 0.03171155166625977, 0.03184220886230469, 0.03210659027099609, 0.031757984161376956, 0.03176278305053711, 0.031784959793090824, 0.035009632110595705, 0.03204143905639648, 0.03172127914428711, 0.031695487976074216, 0.031605791091918946, 0.031689695358276364, 0.03157606315612793, 0.031631359100341795, 0.031859935760498045, 0.031689504623413085, 0.031645599365234374, 0.031587936401367187, 0.03169945526123047, 0.031647743225097655, 0.0316231689453125, 0.0315043830871582, 0.03148524856567383, 0.03164368057250976, 0.03146575927734375, 0.031674144744873046, 0.03158012771606445, 0.0317957763671875, 0.03164752006530762, 0.03177068710327149, 0.031714752197265626, 0.031587104797363284, 0.03166399955749512, 0.03184227180480957, 0.03181702423095703, 0.031729631423950196, 0.03165056037902832, 0.033814655303955075, 0.03337571334838867, 0.03207171249389648, 0.031770751953125, 0.031684991836547854, 0.03181977653503418, 0.03176819229125977, 0.03179523277282715, 0.03160678482055664, 0.031532703399658205, 0.0315316162109375, 0.03165888023376465, 0.03164454460144043, 0.03221417617797852, 0.03159708786010742, 0.031627264022827145, 0.03149456024169922, 0.03151823997497558, 0.03170518493652344, 0.03156732749938965, 0.03150543975830078, 0.03168579292297363, 0.03154803276062012, 0.03151872062683105, 0.031680511474609374, 0.03155254364013672, 0.03167136001586914, 0.03220387268066406, 0.032072513580322266, 0.031888608932495115, 0.03175708770751953, 0.03158582305908203, 0.03161532783508301, 0.03168092727661133, 0.031629024505615236, 0.03193795204162598, 0.031759967803955076, 0.031681535720825195, 0.03183375930786133, 0.03169657516479492, 0.03183888053894043, 0.031663263320922855, 0.0319719352722168, 0.03177312088012695, 0.03172742462158203, 0.032400928497314456, 0.03210636901855469, 0.03195171165466309, 0.03173107147216797, 0.03168294334411621, 0.03157401657104492, 0.03156377601623535, 0.03167231941223145, 0.03157401657104492, 0.03166358375549316, 0.0316912956237793, 0.03180748748779297, 0.03163750457763672, 0.03155907249450684, 0.03168012809753418, 0.03486796951293945, 0.032030113220214845, 0.03222940826416015, 0.03168544006347656, 0.03169481658935547, 0.03172761535644531, 0.03179315185546875, 0.03182803153991699, 0.031757823944091795, 0.032159614562988284, 0.03179311943054199, 0.031690080642700194, 0.0317138557434082, 0.031719423294067385, 0.03166108894348144, 0.031760799407958985, 0.03178348731994629, 0.031792543411254884, 0.03163811111450195, 0.0315513916015625, 0.03163324737548828, 0.031606208801269534, 0.03168924713134766, 0.0315283203125, 0.03164393615722656, 0.03160128021240234, 0.03164512062072754, 0.03165267181396485, 0.03162291145324707, 0.03167436790466309, 0.03160000038146973, 0.03168646430969238, 0.03167315292358398, 0.031616031646728514, 0.03158320045471191, 0.03161257553100586, 0.03165539169311524, 0.03261695861816406, 0.032280864715576174, 0.031929567337036134, 0.03176499176025391, 0.03192665672302246, 0.03210444641113281, 0.03198310470581055, 0.03183689689636231, 0.03170281600952148, 0.03172966384887695, 0.03160054397583008, 0.03209574508666992, 0.03200640106201172, 0.03188489532470703, 0.03193318367004395, 0.0315863037109375, 0.03182937622070312, 0.032142047882080076, 0.03207107162475586, 0.03195750427246094, 0.03195084762573242, 0.03184639930725098, 0.031662080764770506, 0.03206480026245117, 0.03190585517883301, 0.031875743865966796, 0.031749120712280275, 0.032096736907958986, 0.03175068855285645, 0.03177459144592285, 0.03164777565002441, 0.03177628707885742, 0.031666751861572265, 0.031698944091796875, 0.03194470405578613, 0.031784959793090824, 0.03231948852539063, 0.032055393218994144, 0.031726367950439455, 0.03159459114074707, 0.03144409561157226, 0.03157843208312988, 0.03147423934936523, 0.03167814445495606, 0.03175356864929199, 0.03166511917114258, 0.03174502372741699, 0.0318342399597168, 0.03168550491333008, 0.03224576187133789, 0.040529918670654294, 0.031737119674682616, 0.03147776031494141, 0.031646432876586916, 0.03164768028259277, 0.031514656066894534, 0.03168054389953613, 0.032290206909179685, 0.031717952728271485, 0.031651872634887696, 0.03180102348327637, 0.031826240539550785, 0.03169004821777344, 0.03166073608398438, 0.031634944915771485, 0.03157423973083496, 0.03167465591430664, 0.031488000869750975, 0.03150028800964356, 0.03180748748779297, 0.031686016082763675, 0.031629695892333984, 0.03164521598815918, 0.03197942352294922, 0.03158095932006836, 0.03179136085510254, 0.031844127655029295, 0.03171036720275879, 0.03178377532958984, 0.0316231689453125, 0.03152239990234375, 0.031576511383056644, 0.03167814445495606, 0.03162092781066895, 0.03164137649536133, 0.031715072631835935, 0.031671232223510745, 0.03302131271362305, 0.0315350399017334, 0.03175699234008789, 0.03150233650207519, 0.03155353546142578, 0.03168460845947266, 0.0316144962310791, 0.03159087944030762, 0.03197747230529785, 0.03155695915222168, 0.03396470260620117, 0.03184982490539551, 0.03165855979919434, 0.03161311912536621, 0.03159696006774902, 0.031641504287719724, 0.03176761627197266, 0.03147372817993164, 0.031600927352905275, 0.03145379257202149, 0.031676416397094724, 0.024221696853637696, 0.02404662322998047, 0.023991296768188477, 0.024186847686767578, 0.02386720085144043, 0.02388559913635254, 0.023844255447387695, 0.023931264877319336, 0.0238384952545166, 0.023841632843017577, 0.024132768630981447, 0.02398294448852539, 0.024030752182006836, 0.02388755226135254, 0.023849407196044923, 0.02410531234741211, 0.02393836784362793, 0.024000192642211916, 0.02391756820678711, 0.024130880355834963, 0.024099519729614258, 0.024050880432128906, 0.023978815078735352, 0.024129535675048826, 0.023973888397216796, 0.023900224685668946, 0.02387727928161621, 0.023875871658325196, 0.023812095642089845, 0.02481939125061035, 0.024087072372436524, 0.024213279724121094, 0.024045568466186523, 0.023932928085327147, 0.023908351898193358, 0.02473369598388672, 0.02396099281311035, 0.023806560516357423, 0.024024543762207032, 0.023996448516845702, 0.024002687454223633, 0.023990655899047853, 0.024250368118286132, 0.024055168151855467, 0.024095359802246093, 0.02404751968383789, 0.0239268798828125, 0.024084672927856446, 0.023942975997924804, 0.024029184341430664, 0.024012224197387695, 0.024664224624633788, 0.024119712829589843, 0.02433856010437012, 0.024284383773803712, 0.02449068832397461, 0.024059104919433593, 0.023964672088623046, 0.023914079666137695, 0.024023231506347657, 0.02404351997375488, 0.02400217628479004, 0.023935359954833986, 0.02421958351135254, 0.024168512344360352, 0.023994367599487306, 0.02415782356262207, 0.02385923194885254, 0.023922655105590822, 0.023816576004028322, 0.023829631805419922, 0.023906368255615235, 0.023869535446166993, 0.02401968002319336, 0.024092416763305664, 0.024344703674316407, 0.024653024673461914, 0.024386463165283204, 0.02444857597351074, 0.024426368713378905, 0.02453116798400879, 0.024369375228881836, 0.02431398391723633, 0.02429952049255371, 0.024344415664672853, 0.024264448165893553, 0.023904191970825196, 0.023818399429321287, 0.023839040756225584, 0.02395484733581543, 0.024003168106079102, 0.02388374328613281, 0.023887903213500976, 0.023997631072998047, 0.023861503601074217, 0.02462774467468262, 0.025712671279907225, 0.024462911605834962, 0.02415782356262207, 0.024289247512817382, 0.02395222473144531, 0.023975263595581053, 0.02434662437438965, 0.02397772789001465, 0.02400089645385742, 0.02400284767150879, 0.024119712829589843, 0.02390399932861328, 0.023916543960571288, 0.023902496337890624, 0.023898208618164062, 0.02384252738952637, 0.02385081672668457, 0.023881919860839845, 0.02421286392211914, 0.023964000701904298, 0.023929119110107422, 0.024174591064453126, 0.023918367385864257, 0.023791616439819335, 0.023764991760253908, 0.02380803108215332, 0.023938175201416015, 0.023888736724853515, 0.023879680633544922, 0.02415398406982422, 0.02397177505493164, 0.023871007919311522, 0.023765920639038086, 0.023828224182128908, 0.023799072265625, 0.023861024856567385, 0.024033376693725586, 0.023857696533203125, 0.02394463920593262, 0.024848575592041015, 0.02424083137512207, 0.026019327163696288, 0.024252511978149413, 0.024139808654785155, 0.024060447692871093, 0.024686431884765624, 0.02404761505126953, 0.023960927963256835, 0.024046239852905275, 0.02422761535644531, 0.024234207153320312, 0.02408038330078125, 0.024029184341430664, 0.02397337532043457, 0.024043167114257812, 0.024093183517456054, 0.024006399154663086, 0.024453279495239257, 0.02472991943359375, 0.024395904541015624, 0.024407615661621095, 0.02436966323852539, 0.024093727111816406, 0.023984479904174804, 0.024005184173583983, 0.02389414405822754, 0.02404118347167969, 0.024329631805419923, 0.024888063430786134, 0.024577823638916016, 0.024501535415649416, 0.02432713508605957, 0.0241561279296875, 0.024116735458374023, 0.02403955268859863, 0.02402547264099121, 0.02400611114501953, 0.024195167541503908, 0.02406172752380371, 0.024246047973632813, 0.023892864227294922, 0.02408857536315918, 0.02445516777038574, 0.024115360260009766, 0.02396726417541504, 0.02407846450805664, 0.024120384216308594, 0.024098976135253906, 0.02396022415161133, 0.024510751724243163, 0.02412646484375, 0.023889856338500978, 0.023961984634399414, 0.023984128952026368, 0.024005151748657225, 0.023797760009765623, 0.023805696487426756, 0.023939327239990236, 0.02390630340576172, 0.023996416091918944, 0.02388742446899414, 0.02428767967224121, 0.02397804832458496, 0.023934783935546874, 0.023920415878295898, 0.02390457534790039, 0.02379747200012207, 0.023985952377319337, 0.023955936431884765, 0.023997888565063477, 0.02396019172668457, 0.024194751739501953, 0.02392915153503418, 0.02394688034057617, 0.024117631912231444, 0.02409062385559082, 0.023883775711059572, 0.02405580711364746, 0.024092159271240234, 0.028621120452880858, 0.024277183532714845, 0.02413680076599121, 0.024257440567016602, 0.023973888397216796, 0.023774368286132812, 0.023955904006958007, 0.02384118461608887, 0.02389360046386719, 0.023951776504516603, 0.02384390449523926, 0.023923648834228515, 0.024807424545288087, 0.0238919677734375, 0.023872512817382813, 0.023838815689086915, 0.023822975158691407, 0.024374719619750976, 0.02384899139404297, 0.023818944931030272, 0.02392630386352539, 0.024074079513549805, 0.023933120727539062, 0.024078847885131836, 0.024212959289550782, 0.023927391052246092, 0.023949216842651368, 0.023840864181518553, 0.02390425682067871]",tokens/s,34.644979344547785,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4972.498944,7346.192384,0.0,6943.670272,6539.1744,s,1,11.7941640625,11.7941640625,0.0,11.7941640625,11.7941640625,11.7941640625,11.7941640625,[11.7941640625],,kWh,0.00013259486651253003,1.4618991521099659e-05,4.208364477800708e-05,0.00018929750281163678,,MB,4797.804544,7641.890816,0.0,7224.68864,6917.39904,s,10,2.0134595489501956,0.20134595489501955,0.00051771705743223,0.20138206481933596,0.2017876007080078,0.20210072479248048,0.2023512240600586,"[0.2003088684082031, 0.20104896545410156, 0.20091384887695313, 0.20132870483398438, 0.20151091003417967, 0.2014354248046875, 0.201718017578125, 0.20131452941894531, 0.2014664306640625, 0.20241384887695313]",tokens/s,1271.4434721744308,kWh,5.904086264416644e-06,6.511160666742724e-07,3.9020475660794805e-06,1.0457249897170397e-05,tokens/kWh,24480623.731605615,MB,4797.804544,7643.987968,0.0,7226.785792,6917.4016,s,10,18.930143188476563,1.8930143188476563,0.014283110508362238,1.8886383666992188,1.9184749267578125,1.92010947265625,1.9214171093749999,"[1.8801324462890625, 1.885781982421875, 1.8883280029296876, 1.8926431884765624, 1.8868673095703126, 1.8760447998046874, 1.88894873046875, 1.891541015625, 1.9217440185546875, 1.9181116943359375]",tokens/s,33.28025539624565,kWh,5.4961760128917076e-05,6.0621266488289e-06,3.6594512608919554e-05,9.761839938666552e-05,tokens/kWh,645370.1391933054,,s,630,18.92765817832947,0.030043901870364233,0.0005085053205969684,0.02993129634857178,0.03053910484313965,0.03077317123413086,0.03214753639221192,"[0.030493183135986326, 0.03035545539855957, 0.030366912841796875, 0.030017440795898437, 0.030159679412841797, 0.030275232315063478, 0.030035808563232423, 0.030014047622680663, 0.030494047164916993, 0.029865760803222657, 0.029780864715576172, 0.029750816345214842, 0.02958790397644043, 0.029610015869140624, 0.02954444885253906, 0.029655040740966795, 0.029544416427612304, 0.02955267143249512, 0.02958131217956543, 0.02997200012207031, 0.02998524856567383, 0.0297574405670166, 0.029954048156738283, 0.029576992034912108, 0.029720064163208007, 0.029856000900268555, 0.02970467185974121, 0.02974515151977539, 0.029816831588745117, 0.02993561553955078, 0.03064793586730957, 0.029877952575683594, 0.029772480010986327, 0.029815807342529296, 0.02993040084838867, 0.029843168258666994, 0.029827455520629882, 0.029879936218261717, 0.029612415313720702, 0.029684768676757813, 0.029610719680786133, 0.0296507511138916, 0.03015920066833496, 0.029795808792114256, 0.029815423965454103, 0.0295765438079834, 0.029581247329711916, 0.029538368225097655, 0.029542144775390626, 0.029680543899536133, 0.02958745574951172, 0.029577215194702147, 0.02966032028198242, 0.029647712707519532, 0.029702144622802733, 0.02993078422546387, 0.029727455139160155, 0.02999488067626953, 0.030124160766601564, 0.029971616744995117, 0.02986684799194336, 0.02970377540588379, 0.0298438720703125, 0.03011177635192871, 0.029791200637817383, 0.02949331283569336, 0.029543359756469725, 0.029795360565185548, 0.029754335403442383, 0.030302207946777345, 0.030217599868774415, 0.02985843276977539, 0.029933055877685546, 0.02962483215332031, 0.02978755187988281, 0.02972732734680176, 0.029777536392211913, 0.029792640686035158, 0.02977791976928711, 0.02977561569213867, 0.029745407104492187, 0.029672447204589843, 0.029880672454833983, 0.030077600479125978, 0.030117887496948242, 0.02994175910949707, 0.029867040634155274, 0.029726720809936522, 0.029903839111328125, 0.029997055053710937, 0.030627744674682617, 0.02990812873840332, 0.02976249694824219, 0.02964672088623047, 0.02993984031677246, 0.029918943405151367, 0.03004572868347168, 0.02982067108154297, 0.02981171226501465, 0.029661184310913087, 0.03010755157470703, 0.029871807098388672, 0.02979596710205078, 0.030241567611694335, 0.02985753631591797, 0.029827327728271485, 0.029667327880859375, 0.030085119247436523, 0.03022960090637207, 0.030000032424926756, 0.02995574378967285, 0.029800800323486327, 0.029829120635986327, 0.029925376892089843, 0.029836416244506836, 0.029671808242797852, 0.02995609664916992, 0.0298438720703125, 0.030248992919921874, 0.030352928161621093, 0.03022435188293457, 0.030024255752563477, 0.030736383438110353, 0.029831167221069335, 0.030738143920898436, 0.029753631591796875, 0.030517696380615234, 0.030015039443969726, 0.029956960678100587, 0.029785600662231446, 0.03006924819946289, 0.029984607696533203, 0.02985331153869629, 0.029700639724731446, 0.02972572708129883, 0.02989676856994629, 0.030077407836914063, 0.03019615936279297, 0.029970016479492188, 0.02987254333496094, 0.029853439331054686, 0.029677215576171874, 0.029805152893066407, 0.02965862464904785, 0.029616479873657228, 0.029747360229492186, 0.029677536010742186, 0.030015520095825195, 0.030135616302490235, 0.030853824615478517, 0.03060086441040039, 0.030261600494384765, 0.02976348876953125, 0.02981395149230957, 0.02973695945739746, 0.030126848220825196, 0.029989023208618164, 0.030005247116088866, 0.030119935989379884, 0.029906944274902345, 0.029999103546142578, 0.029949951171875, 0.029851648330688478, 0.0303308162689209, 0.030219903945922853, 0.029905344009399416, 0.02953011131286621, 0.029779264450073242, 0.029883392333984377, 0.029975744247436525, 0.030079488754272462, 0.03017692756652832, 0.029929536819458008, 0.030008895874023438, 0.029915327072143554, 0.030251167297363282, 0.02989094352722168, 0.029890560150146486, 0.029869152069091798, 0.0297271671295166, 0.02971900749206543, 0.029752416610717772, 0.030515552520751953, 0.029948480606079103, 0.03060860824584961, 0.03014735984802246, 0.02972991943359375, 0.029825920104980468, 0.029693056106567382, 0.031082496643066407, 0.030416032791137696, 0.030864063262939452, 0.03056656074523926, 0.030199392318725586, 0.030131872177124024, 0.029950176239013672, 0.030140960693359375, 0.029789728164672853, 0.029788127899169924, 0.029712896347045898, 0.029982048034667967, 0.029883039474487304, 0.0297227840423584, 0.0296343994140625, 0.029908992767333983, 0.02940108871459961, 0.029691680908203125, 0.030107872009277344, 0.032718849182128903, 0.030158624649047852, 0.02980998420715332, 0.029811136245727538, 0.029921760559082033, 0.029742559432983397, 0.0297989444732666, 0.02999091148376465, 0.030263296127319338, 0.029929471969604493, 0.02974515151977539, 0.02951763153076172, 0.032061630249023435, 0.029995008468627928, 0.030220287322998047, 0.030062591552734375, 0.030517248153686522, 0.030105600357055663, 0.030212095260620117, 0.030050304412841795, 0.02996633529663086, 0.029742176055908204, 0.030178207397460938, 0.02950547218322754, 0.029509248733520507, 0.02945430374145508, 0.029510112762451173, 0.029539424896240233, 0.029668256759643553, 0.029326847076416016, 0.029675392150878905, 0.02940787124633789, 0.029648895263671874, 0.031909887313842776, 0.03117251205444336, 0.029927263259887694, 0.030468128204345704, 0.029506975173950196, 0.0295350399017334, 0.029715967178344727, 0.03086089515686035, 0.02952412796020508, 0.029589536666870118, 0.029454912185668945, 0.03030281639099121, 0.029698047637939453, 0.029625568389892578, 0.02971113586425781, 0.029498559951782227, 0.029614912033081055, 0.02944819259643555, 0.029612031936645508, 0.029542400360107423, 0.02954649543762207, 0.02974060821533203, 0.029540735244750975, 0.02952592086791992, 0.029838752746582032, 0.02989104080200195, 0.02993180847167969, 0.030310047149658202, 0.02990835189819336, 0.02963657569885254, 0.030422016143798827, 0.029661056518554687, 0.029719968795776368, 0.0298687686920166, 0.029861471176147462, 0.029745567321777345, 0.029667104721069336, 0.029549087524414062, 0.029640384674072266, 0.02954854393005371, 0.029664863586425783, 0.02956729507446289, 0.029895967483520507, 0.029567455291748045, 0.030314176559448243, 0.030546592712402343, 0.029752895355224608, 0.02970992088317871, 0.0296210880279541, 0.029560831069946288, 0.030033824920654296, 0.030205631256103517, 0.030114208221435547, 0.030105600357055663, 0.030482431411743165, 0.030074880599975585, 0.02987731170654297, 0.029651103973388673, 0.032624736785888675, 0.0305916805267334, 0.030038015365600586, 0.030182464599609375, 0.030004159927368164, 0.03057459259033203, 0.029716480255126954, 0.029691743850708007, 0.029675359725952147, 0.0313449592590332, 0.030728191375732423, 0.030242368698120116, 0.03015235137939453, 0.02998147201538086, 0.02993561553955078, 0.029792255401611328, 0.030646976470947267, 0.029970432281494142, 0.02970857620239258, 0.0297139835357666, 0.029604000091552736, 0.02965020751953125, 0.029631200790405272, 0.02965488052368164, 0.02976736068725586, 0.029652799606323242, 0.02963862419128418, 0.029864639282226563, 0.029626079559326172, 0.02989619255065918, 0.029909088134765626, 0.030066911697387694, 0.030611936569213866, 0.029913087844848633, 0.029724672317504884, 0.029755392074584962, 0.03011577606201172, 0.029953567504882813, 0.02979414367675781, 0.0297838077545166, 0.02967353630065918, 0.029723264694213866, 0.030176607131958008, 0.02997465515136719, 0.02974390411376953, 0.029870080947875976, 0.029800447463989257, 0.030168256759643554, 0.02973369598388672, 0.02994175910949707, 0.0295731201171875, 0.029652992248535157, 0.029442047119140623, 0.029607263565063477, 0.029548288345336914, 0.029524896621704103, 0.029472768783569334, 0.02976486396789551, 0.02983193588256836, 0.030007295608520508, 0.029913087844848633, 0.029783071517944334, 0.029572063446044922, 0.02994175910949707, 0.02955183982849121, 0.029630783081054688, 0.030065120697021483, 0.02970163154602051, 0.02950553512573242, 0.02976793670654297, 0.03031884765625, 0.02974412727355957, 0.029512704849243163, 0.029674528121948242, 0.02949129676818848, 0.029505695343017578, 0.029387487411499023, 0.029491199493408202, 0.029378559112548826, 0.03003705596923828, 0.030148992538452147, 0.029557279586791992, 0.029626399993896484, 0.02947587203979492, 0.029543392181396483, 0.029456384658813478, 0.029454336166381836, 0.029381792068481447, 0.029569887161254884, 0.029528032302856444, 0.029640064239501954, 0.029854528427124022, 0.029577056884765626, 0.030136320114135744, 0.02974412727355957, 0.02933452796936035, 0.029471744537353517, 0.029608959197998046, 0.029658687591552733, 0.029868000030517577, 0.029790624618530274, 0.0297096004486084, 0.029866720199584963, 0.030195520401000975, 0.029804800033569338, 0.029728736877441406, 0.030042144775390626, 0.02970751953125, 0.03029376029968262, 0.030086143493652344, 0.030052352905273437, 0.029789695739746092, 0.029659648895263672, 0.02974710464477539, 0.03012144088745117, 0.03027827262878418, 0.030397695541381838, 0.03017804718017578, 0.03017942428588867, 0.030322208404541015, 0.03024729537963867, 0.030054399490356445, 0.030083072662353515, 0.03093708801269531, 0.03017318344116211, 0.029871231079101564, 0.030669408798217772, 0.0298351993560791, 0.03218262481689453, 0.03114188766479492, 0.030824415206909178, 0.030007328033447266, 0.03018547248840332, 0.030136320114135744, 0.029998495101928712, 0.030079328536987304, 0.030346847534179686, 0.029788448333740235, 0.02977555274963379, 0.030005247116088866, 0.02983123207092285, 0.029883007049560546, 0.030998367309570313, 0.03014313507080078, 0.030203231811523436, 0.029942495346069336, 0.029928607940673826, 0.029893375396728514, 0.029754623413085938, 0.03002582359313965, 0.030601184844970705, 0.030883712768554686, 0.029615039825439452, 0.030105600357055663, 0.029638656616210936, 0.02962019157409668, 0.029837343215942384, 0.029702144622802733, 0.029638656616210936, 0.029928831100463866, 0.029802879333496093, 0.030429439544677736, 0.030205440521240235, 0.030527999877929687, 0.03025529670715332, 0.029921152114868163, 0.030022783279418944, 0.03009609603881836, 0.03012179183959961, 0.030015552520751953, 0.029894880294799805, 0.029915136337280275, 0.02971820831298828, 0.029767200469970702, 0.029714752197265625, 0.02995846366882324, 0.02971459197998047, 0.030006464004516602, 0.02966531181335449, 0.02975382423400879, 0.029702463150024415, 0.029797632217407225, 0.029702239990234375, 0.03132646369934082, 0.030064287185668944, 0.030062528610229493, 0.03051807975769043, 0.03002572822570801, 0.029767679214477538, 0.02979430389404297, 0.030672224044799804, 0.03004483222961426, 0.02965692710876465, 0.02960915184020996, 0.029578208923339844, 0.02957926368713379, 0.029656736373901368, 0.030007648468017577, 0.03036947250366211, 0.030242399215698244, 0.03025971221923828, 0.03037971115112305, 0.03043712043762207, 0.030198047637939453, 0.029876735687255858, 0.030816160202026367, 0.02976367950439453, 0.029427711486816405, 0.029706239700317383, 0.033320384979248045, 0.030476863861083985, 0.03020595169067383, 0.029714239120483397, 0.02966547203063965, 0.030289920806884765, 0.029988479614257813, 0.031218048095703124, 0.030121984481811522, 0.030072832107543947, 0.030055456161499024, 0.029884960174560545, 0.02978656005859375, 0.03332710266113281, 0.030219776153564453, 0.030765567779541016, 0.030662656784057617, 0.03077939224243164, 0.03059712028503418, 0.030364864349365233, 0.030797632217407226, 0.030634815216064454, 0.030813791275024413, 0.030444128036499023, 0.030383583068847655, 0.030455808639526367, 0.030366239547729493, 0.030686304092407225, 0.030331104278564454, 0.030397119522094725, 0.030678367614746092, 0.03040323257446289, 0.031154176712036134, 0.033298431396484376, 0.03065782356262207, 0.03060960006713867, 0.030292512893676758, 0.030534847259521485, 0.030358335494995118, 0.030423040390014647, 0.03178848075866699, 0.030431808471679686, 0.030246912002563478, 0.0302587833404541, 0.030376352310180665, 0.030332319259643553, 0.030115936279296877, 0.030413312911987303, 0.030238719940185548, 0.03011577606201172, 0.030281791687011717, 0.0301977596282959, 0.03013212776184082, 0.030666624069213867, 0.030285823822021486, 0.030363872528076173, 0.03012112045288086, 0.030241632461547853, 0.029913087844848633, 0.03099852752685547, 0.03079523277282715, 0.030538272857666016, 0.031471616744995115, 0.0303470401763916, 0.030373119354248048, 0.030317119598388672, 0.030187936782836915, 0.030087039947509765, 0.030062719345092772, 0.029947519302368164, 0.029995391845703125, 0.029970048904418945, 0.030009727478027343, 0.02989798355102539, 0.029928192138671875, 0.02999091148376465, 0.02989606475830078, 0.030360191345214844, 0.030246912002563478, 0.030296064376831053, 0.030651935577392576, 0.03030790328979492, 0.030407583236694336, 0.030363071441650392, 0.030389856338500977, 0.030350303649902342, 0.030427135467529298, 0.03033497619628906, 0.03022233581542969, 0.030148223876953126, 0.0337817268371582, 0.03048899269104004, 0.030631519317626952, 0.030654111862182618, 0.03064854431152344, 0.03037424087524414, 0.03047372817993164, 0.030525344848632813, 0.030477088928222658, 0.031715328216552735, 0.030494112014770508, 0.03030076789855957, 0.030453760147094725, 0.030341119766235353, 0.03041689682006836, 0.030323871612548826, 0.03051811218261719, 0.030487775802612305, 0.030484703063964842, 0.03042745590209961, 0.030372095108032227, 0.030537696838378908, 0.030569599151611327, 0.030627904891967775, 0.030484735488891603, 0.030188127517700194, 0.03037593650817871, 0.030119935989379884, 0.030110879898071288, 0.030333215713500977, 0.03035798454284668, 0.03039344024658203]",tokens/s,33.284624757292775,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4974.399488,7346.192384,0.0,6943.670272,6539.1744,s,1,11.6430400390625,11.6430400390625,0.0,11.6430400390625,11.6430400390625,11.6430400390625,11.6430400390625,[11.6430400390625],,kWh,0.0001315777272250822,1.4506553731726868e-05,4.096031054598437e-05,0.00018704459150279345,,MB,4998.361088,7641.890816,0.0,7224.68864,6917.39904,s,10,2.0815584869384764,0.20815584869384765,0.000453871607753706,0.20819725036621095,0.20859234619140626,0.20877018737792968,0.20891246032714844,"[0.20725570678710936, 0.20804966735839844, 0.20759536743164062, 0.20847407531738282, 0.20827040100097657, 0.20816304016113282, 0.2085528259277344, 0.20894802856445313, 0.20801791381835938, 0.20823146057128905]",tokens/s,1229.8477395968862,kWh,6.11945123915013e-06,6.748611463750618e-07,4.059250932582706e-06,1.0853563318107898e-05,tokens/kWh,23586723.778806727,MB,5003.59168,7643.987968,0.0,7226.785792,6917.4016,s,10,21.9069091796875,2.19069091796875,0.011229853992387764,2.188249633789063,2.2043486328125,2.2094287109375004,2.2134927734375003,"[2.199599365234375, 2.18250732421875, 2.184108154296875, 2.190656494140625, 2.1799404296875, 2.2032197265625, 2.2145087890625, 2.18830078125, 2.17586962890625, 2.188198486328125]",tokens/s,28.758050477707183,kWh,6.39527449554342e-05,7.05387187779224e-06,4.247406407181902e-05,0.00011348068090504545,tokens/kWh,555160.5744480422,,s,630,21.90452645874025,0.03476908961704799,0.0004906550635814626,0.034681968688964844,0.03518411064147949,0.035453441619873044,0.0366552529525757,"[0.03605731201171875, 0.03514614486694336, 0.03532620620727539, 0.035020320892333985, 0.03482806396484375, 0.03461328125, 0.03512313461303711, 0.035039424896240234, 0.03488822555541992, 0.03494857788085937, 0.03509446334838867, 0.03498636627197266, 0.03486947250366211, 0.03474431991577148, 0.035250175476074216, 0.034953216552734374, 0.034870849609375, 0.034533119201660155, 0.03476723098754883, 0.034670143127441405, 0.0347143669128418, 0.034988033294677735, 0.034723838806152346, 0.03475251388549805, 0.03512115097045899, 0.03506995010375977, 0.03489936065673828, 0.03524655914306641, 0.03473977661132813, 0.03499679946899414, 0.03505955123901367, 0.03475471878051758, 0.03481353759765625, 0.03477926254272461, 0.03464771270751953, 0.03507606506347656, 0.034941600799560546, 0.03487539291381836, 0.034881504058837894, 0.034925662994384765, 0.034953216552734374, 0.034638782501220704, 0.03467264175415039, 0.03491849517822266, 0.03464796829223633, 0.03465334320068359, 0.03486956787109375, 0.03456902313232422, 0.0346638069152832, 0.03458707046508789, 0.034503902435302734, 0.034400959014892575, 0.03486310577392578, 0.03522057723999023, 0.03516713714599609, 0.034936702728271485, 0.036371742248535156, 0.03508310317993164, 0.03495017623901367, 0.035183582305908206, 0.034802688598632815, 0.034579456329345705, 0.0344002571105957, 0.03526591873168945, 0.034756446838378904, 0.034487071990966796, 0.03435481643676758, 0.03434739303588867, 0.034351104736328124, 0.034465824127197266, 0.03425481414794922, 0.03438387298583984, 0.034272415161132816, 0.03441340637207031, 0.034446945190429686, 0.03443548965454102, 0.034533374786376955, 0.03497475051879883, 0.03445654296875, 0.03442483139038086, 0.03459686279296875, 0.03438528060913086, 0.034361278533935544, 0.03424121475219727, 0.03424870300292969, 0.03424428939819336, 0.03450207901000977, 0.034438014984130856, 0.03434454345703125, 0.034314655303955076, 0.03473612976074219, 0.03449958419799805, 0.03440332794189453, 0.03457228851318359, 0.03428518295288086, 0.03444060897827148, 0.034159584045410155, 0.03429158401489258, 0.03429526519775391, 0.03434972763061524, 0.03440435028076172, 0.034353153228759765, 0.034508190155029296, 0.03450057601928711, 0.034579071044921875, 0.03504908752441406, 0.03499385452270508, 0.03461600112915039, 0.03497369766235352, 0.03471558380126953, 0.034398273468017576, 0.035288928985595706, 0.03471308898925781, 0.0346610221862793, 0.03475894546508789, 0.03472339248657227, 0.03474038314819336, 0.03467411041259766, 0.03481363296508789, 0.03487424087524414, 0.0350682258605957, 0.035092159271240236, 0.035232864379882815, 0.03617849731445313, 0.03706447982788086, 0.03496352005004883, 0.0356577262878418, 0.03526611328125, 0.03573190307617188, 0.03524607849121094, 0.0349224967956543, 0.03474822235107422, 0.03471299362182617, 0.034538272857666016, 0.03472995376586914, 0.03469929504394531, 0.034754302978515624, 0.03475024032592773, 0.03468540954589844, 0.03455945587158203, 0.03477686309814453, 0.034617183685302734, 0.034560798645019535, 0.034375198364257814, 0.03450518417358398, 0.03445568084716797, 0.03473612976074219, 0.03453849411010742, 0.03451087951660156, 0.03432723236083984, 0.03437968063354492, 0.03435558319091797, 0.03437948989868164, 0.034418304443359374, 0.03455414581298828, 0.03462387084960938, 0.03571916961669922, 0.03565347290039062, 0.03534864044189453, 0.03473993682861328, 0.034498241424560545, 0.03449654388427734, 0.03443564987182617, 0.03484672164916992, 0.03442892837524414, 0.03424991989135742, 0.0342147216796875, 0.034285568237304685, 0.03433219146728515, 0.03429219055175781, 0.034293407440185546, 0.034242366790771486, 0.03432502365112305, 0.03438383865356445, 0.034756641387939456, 0.03468825531005859, 0.03489049530029297, 0.034676353454589845, 0.0345109748840332, 0.03455516815185547, 0.034710494995117185, 0.03456739044189453, 0.034626335144042966, 0.034531326293945314, 0.03453071975708008, 0.03447663879394531, 0.03484627151489258, 0.03490041732788086, 0.03470883178710937, 0.03577897644042969, 0.035053569793701174, 0.03458793640136719, 0.03449740982055664, 0.034445152282714844, 0.034705150604248045, 0.03478076934814453, 0.034433055877685546, 0.034529918670654296, 0.034664447784423826, 0.03515801620483398, 0.03547676849365235, 0.034922592163085936, 0.0349315185546875, 0.035038848876953126, 0.03464764785766602, 0.034605663299560545, 0.03638995361328125, 0.03472889709472656, 0.034748416900634765, 0.03454073715209961, 0.0348919677734375, 0.0344951057434082, 0.03486515045166016, 0.034856319427490234, 0.03529110336303711, 0.03501123046875, 0.034912158966064456, 0.03497510528564453, 0.034721694946289065, 0.03488441467285156, 0.03444076919555664, 0.03461503982543945, 0.034406944274902346, 0.03443097686767578, 0.03443920135498047, 0.03441990280151367, 0.03447084808349609, 0.03501465606689453, 0.034586208343505856, 0.03492441558837891, 0.03478787231445313, 0.034531326293945314, 0.03454771041870117, 0.03458793640136719, 0.034450145721435545, 0.034533374786376955, 0.03445280075073242, 0.03431414413452148, 0.034261600494384765, 0.034398143768310546, 0.03424812698364258, 0.0346561279296875, 0.03448108673095703, 0.03449446487426758, 0.034168033599853515, 0.03443571090698242, 0.034239967346191405, 0.034367359161376954, 0.0342597770690918, 0.034403968811035156, 0.039963008880615235, 0.034525184631347655, 0.035682239532470704, 0.03506499099731445, 0.03468339157104492, 0.03452134323120117, 0.03476249694824219, 0.034836734771728516, 0.034687103271484374, 0.03458047866821289, 0.0355860481262207, 0.03444460678100586, 0.03449721527099609, 0.034574337005615234, 0.03466239929199219, 0.03457843017578125, 0.03549209594726563, 0.034487777709960935, 0.034545505523681644, 0.03460655975341797, 0.03439510345458984, 0.034334400177001956, 0.03438214492797852, 0.03421286392211914, 0.03421014404296875, 0.03436150360107422, 0.03437750244140625, 0.03426287841796875, 0.03582860946655273, 0.03449814224243164, 0.034431392669677735, 0.03530547332763672, 0.034543617248535156, 0.03452444839477539, 0.034243297576904294, 0.03441049575805664, 0.03431219100952149, 0.034301952362060545, 0.03452928161621094, 0.034351104736328124, 0.03440790557861328, 0.034496799468994144, 0.03444883346557617, 0.03455055999755859, 0.034603038787841794, 0.03439580917358399, 0.034289249420166014, 0.03444771194458008, 0.034345375061035154, 0.03502412796020508, 0.03442134475708008, 0.03432243347167969, 0.0344447021484375, 0.03446656036376953, 0.034648063659667966, 0.03481087875366211, 0.034640830993652345, 0.03446102523803711, 0.03461503982543945, 0.03447087860107422, 0.03446710586547851, 0.034533729553222654, 0.03467305755615235, 0.03457020950317383, 0.03500851058959961, 0.03676361465454102, 0.03507379150390625, 0.03475977706909179, 0.03473171234130859, 0.03464396667480469, 0.03451084899902344, 0.03465241622924805, 0.034938625335693356, 0.034816001892089846, 0.03480575942993164, 0.0349977912902832, 0.03506147384643555, 0.03487615966796875, 0.03496527862548828, 0.0348895378112793, 0.03525273513793945, 0.03497289657592773, 0.03486995315551758, 0.03498303985595703, 0.03494182586669922, 0.03491804885864258, 0.03499248123168945, 0.035135486602783206, 0.03512934494018555, 0.03501670455932617, 0.03509622573852539, 0.03543280029296875, 0.035198974609375, 0.03525836944580078, 0.03544473648071289, 0.03513958358764648, 0.034816001892089846, 0.034934078216552734, 0.03475667190551758, 0.034804096221923826, 0.034953472137451175, 0.03495116806030273, 0.034811550140380856, 0.03497132873535156, 0.03473884963989258, 0.034803009033203124, 0.03485327911376953, 0.03485520172119141, 0.03475251388549805, 0.03504537582397461, 0.03469481658935547, 0.03472623825073242, 0.034713470458984375, 0.034643360137939457, 0.03503177642822266, 0.03489107131958008, 0.0351607666015625, 0.0353546257019043, 0.03515801620483398, 0.035297279357910154, 0.03488153457641602, 0.03485440063476562, 0.034722305297851565, 0.035087711334228514, 0.034914974212646485, 0.03501670455932617, 0.0347770881652832, 0.03472588729858399, 0.03574208068847656, 0.03530569458007812, 0.03515094375610352, 0.03498294448852539, 0.03851161575317383, 0.03489449691772461, 0.03488742446899414, 0.03475199890136719, 0.035006656646728515, 0.034822719573974606, 0.03499622344970703, 0.03477932739257812, 0.0349029426574707, 0.037933025360107425, 0.03545388793945312, 0.035163486480712894, 0.03481052780151367, 0.03561881637573242, 0.035645439147949216, 0.034754558563232424, 0.03508224105834961, 0.03509471893310547, 0.03500969696044922, 0.03486991882324219, 0.03484672164916992, 0.034711456298828124, 0.03490825653076172, 0.034872447967529294, 0.0350047378540039, 0.034838623046875, 0.034961887359619144, 0.03498112106323242, 0.03534467315673828, 0.034885215759277347, 0.03505433654785156, 0.03518886566162109, 0.0352542724609375, 0.03508019256591797, 0.03514348983764649, 0.03470892715454101, 0.03492736053466797, 0.035699935913085935, 0.03601475143432617, 0.03501500701904297, 0.03491721725463867, 0.0349617919921875, 0.035165985107421874, 0.0349334716796875, 0.03545289611816406, 0.0349752311706543, 0.03506358337402344, 0.03519062423706055, 0.0352407341003418, 0.034929248809814455, 0.0349315185546875, 0.03478102493286133, 0.03468326568603516, 0.03494873428344727, 0.035011520385742186, 0.035064830780029296, 0.03496448135375976, 0.034772544860839846, 0.03468067169189453, 0.03605833435058594, 0.0354815673828125, 0.03536518478393555, 0.03490252685546875, 0.034869247436523435, 0.034729057312011716, 0.03457664108276367, 0.03459084701538086, 0.034447200775146486, 0.034288318634033206, 0.03442483139038086, 0.034463455200195316, 0.034496063232421874, 0.034458335876464845, 0.03441609573364258, 0.0345318717956543, 0.03453129577636719, 0.034497760772705076, 0.03434988784790039, 0.03420310211181641, 0.034396095275878905, 0.03477766418457031, 0.03481744003295899, 0.034679424285888674, 0.03483363342285156, 0.035054367065429685, 0.034836383819580076, 0.034723934173583985, 0.03788390350341797, 0.03502262496948242, 0.034848255157470705, 0.03475939178466797, 0.034751968383789064, 0.034462238311767576, 0.03460300827026367, 0.03495935821533203, 0.035216926574707035, 0.03478764724731445, 0.03482371139526367, 0.034796257019042966, 0.034779041290283204, 0.034799583435058595, 0.034555713653564454, 0.034649761199951175, 0.03464044952392578, 0.03459411239624023, 0.03458220672607422, 0.03455487823486328, 0.034508800506591795, 0.03578060913085938, 0.034551807403564457, 0.03461737442016601, 0.03453235244750977, 0.03451500701904297, 0.034744350433349606, 0.03434310531616211, 0.03433132934570313, 0.03442435073852539, 0.03434543991088867, 0.03427123260498047, 0.034356575012207034, 0.03452380752563477, 0.034344959259033206, 0.035000736236572266, 0.03475491333007812, 0.03462083053588867, 0.034664703369140626, 0.034602367401123046, 0.03455279922485351, 0.0348590087890625, 0.03444736099243164, 0.034434368133544925, 0.034508575439453126, 0.034544063568115235, 0.03422198486328125, 0.03428409576416016, 0.03456630325317383, 0.03448947143554688, 0.03491913604736328, 0.03472588729858399, 0.03461939239501953, 0.03448419189453125, 0.03464195251464844, 0.03442179107666016, 0.03449654388427734, 0.03474937438964844, 0.034751838684082034, 0.03454742431640625, 0.035093441009521484, 0.03463577651977539, 0.03436937713623047, 0.034465953826904296, 0.034326526641845705, 0.034493953704833984, 0.034242721557617185, 0.03440995025634765, 0.03432128143310547, 0.03441459274291992, 0.03443097686767578, 0.03458009719848633, 0.034474369049072265, 0.034571712493896484, 0.03433260726928711, 0.03443289566040039, 0.03444947052001953, 0.03438633728027344, 0.03457974243164062, 0.034356224060058595, 0.03431615829467773, 0.03441436767578125, 0.03426339340209961, 0.03439206314086914, 0.0344268798828125, 0.034369537353515625, 0.03468697738647461, 0.03511075210571289, 0.03467689514160156, 0.03464518356323242, 0.03464684677124023, 0.034514400482177736, 0.03436912155151367, 0.0344378547668457, 0.034422111511230466, 0.034546558380126956, 0.03450198364257812, 0.034615966796875, 0.035519710540771486, 0.034775264739990236, 0.03448684692382813, 0.03472320175170898, 0.034425472259521486, 0.034541568756103515, 0.034632991790771485, 0.03437161636352539, 0.03469513702392578, 0.03421014404296875, 0.03440473556518555, 0.035811328887939455, 0.03480739212036133, 0.03461062240600586, 0.03491526412963867, 0.0369183349609375, 0.034604000091552733, 0.03457843017578125, 0.0343818244934082, 0.034576385498046876, 0.034543617248535156, 0.03430403137207031, 0.0345327033996582, 0.03437363052368164, 0.034603649139404294, 0.0344035530090332, 0.03437811279296875, 0.03466486358642578, 0.03442844772338867, 0.03464230346679688, 0.03467887878417969, 0.03512319946289062, 0.03460300827026367, 0.03446076965332031, 0.034756641387939456, 0.03461324691772461, 0.03445235061645508, 0.035104351043701174, 0.03462390518188477, 0.03474227142333984, 0.035149822235107424, 0.03465811157226562, 0.03480595016479492, 0.03451289749145508, 0.034699264526367186, 0.034631679534912106, 0.03467647933959961, 0.0353702392578125, 0.034776065826416014, 0.03472588729858399, 0.03470070266723633, 0.03449711990356445, 0.03439187240600586, 0.034369728088378904, 0.034383071899414065, 0.034298656463623046, 0.03497369766235352, 0.03506585693359375, 0.03538639831542969, 0.035049888610839845, 0.03528553771972656, 0.03466243362426758, 0.034852863311767575]",tokens/s,28.76117870827655,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3167.440896,4431.151104,0.0,4028.628992,3944.723968,s,1,10.4510712890625,10.4510712890625,0.0,10.4510712890625,10.4510712890625,10.4510712890625,10.4510712890625,[10.4510712890625],,kWh,9.542317954166417e-05,1.0518519734284388e-05,3.059169114000282e-05,0.00013653339041595137,,MB,3221.356544,4770.889728,0.0,4353.687552,4305.05728,s,10,1.1044366760253905,0.11044366760253906,0.0002848389871099568,0.11044403076171874,0.11082803268432617,0.11088694343566895,0.11093407203674317,"[0.11081494140625, 0.11040764617919922, 0.1105552978515625, 0.10989884948730469, 0.11029542541503906, 0.11020684814453124, 0.11029376220703126, 0.11048041534423828, 0.11053763580322265, 0.11094585418701172]",tokens/s,2317.923748433311,kWh,3.307156384222962e-06,3.647112465421859e-07,2.195594765213451e-06,5.8674623959785986e-06,tokens/kWh,43630445.79125987,MB,3221.356544,4770.889728,0.0,4353.687552,4305.05984,s,10,22.064403320312493,2.20644033203125,0.02121619752585357,2.201143310546875,2.2305120117187496,2.23901015625,2.245808671875,"[2.17440966796875, 2.191793701171875, 2.18486376953125, 2.195716552734375, 2.19678369140625, 2.2055029296875, 2.21407568359375, 2.22512548828125, 2.24750830078125, 2.22862353515625]",tokens/s,28.55277756004495,kWh,6.443335558952629e-05,7.106840502124574e-06,3.5765296714585635e-05,0.00010730549280623651,tokens/kWh,587108.8082486164,,s,630,22.061627162933306,0.035018455814179915,0.0007061827530035264,0.03489764785766602,0.03583762092590332,0.03606394004821777,0.037804643249511743,"[0.034930912017822266, 0.03487385559082031, 0.034369537353515625, 0.03424374389648437, 0.03439049530029297, 0.03438937759399414, 0.03450556945800781, 0.03436076736450195, 0.034350975036621094, 0.0344277458190918, 0.03423027038574219, 0.03426508712768555, 0.03424019241333008, 0.03431046295166015, 0.034127872467041014, 0.03433881759643555, 0.034592769622802735, 0.034414657592773436, 0.034242462158203125, 0.0344598388671875, 0.03416035079956055, 0.034283294677734374, 0.03441052627563477, 0.039233566284179684, 0.034801952362060545, 0.03440662384033203, 0.03442051315307617, 0.03619574356079101, 0.03471414566040039, 0.03429321670532227, 0.03460156631469727, 0.03425894546508789, 0.03419334411621094, 0.03423379135131836, 0.034156478881835935, 0.034189537048339845, 0.03401161575317383, 0.03408406448364258, 0.03418806457519531, 0.03419136047363281, 0.03421184158325195, 0.03419750213623047, 0.03413212966918945, 0.03407593536376953, 0.03412844848632812, 0.0343469123840332, 0.034145824432373045, 0.034136001586914065, 0.034460575103759765, 0.034002880096435546, 0.03408259201049805, 0.03435059356689453, 0.03478704071044922, 0.03865657424926758, 0.03477875137329101, 0.03438361740112305, 0.034415454864501954, 0.034574207305908204, 0.03422035217285156, 0.03425491333007812, 0.03429667282104492, 0.0342367057800293, 0.03420419311523438, 0.035590145111083986, 0.034772991180419925, 0.03468492889404297, 0.034598911285400394, 0.03451206588745117, 0.034319168090820314, 0.0341907844543457, 0.03426902389526367, 0.03425158309936523, 0.03419884872436523, 0.03433123016357422, 0.034359294891357424, 0.03413520050048828, 0.03426300811767578, 0.034296703338623044, 0.03416640090942383, 0.03450099182128906, 0.03444940948486328, 0.03449001693725586, 0.038003040313720704, 0.03472793579101562, 0.034334495544433595, 0.03486313629150391, 0.034406593322753906, 0.03426003265380859, 0.03444623947143555, 0.03434870529174805, 0.034486686706542966, 0.03436067199707031, 0.03439680099487305, 0.03439820861816406, 0.03511865615844727, 0.03501715087890625, 0.03504531097412109, 0.035157344818115235, 0.034888416290283206, 0.03482624053955078, 0.0348686408996582, 0.0345337905883789, 0.03427756881713867, 0.03441392135620117, 0.034362014770507814, 0.034199710845947265, 0.03403913497924805, 0.03506134414672851, 0.035959392547607424, 0.03574406433105469, 0.03539286422729492, 0.03495920181274414, 0.0361313591003418, 0.034621376037597656, 0.03507440185546875, 0.0351539192199707, 0.035143680572509765, 0.03511904144287109, 0.035114208221435544, 0.03525923156738281, 0.03527065658569336, 0.035348480224609374, 0.03517424011230469, 0.03525759887695312, 0.034951774597167966, 0.03464156723022461, 0.03520153427124023, 0.03472022247314453, 0.03489737701416016, 0.034605792999267575, 0.03473183822631836, 0.03454329681396484, 0.035555553436279294, 0.03652579116821289, 0.035888511657714846, 0.035148799896240236, 0.03516758346557617, 0.03520169448852539, 0.034996063232421874, 0.03534864044189453, 0.03500646209716797, 0.034602592468261716, 0.03450851058959961, 0.03444806289672851, 0.03445753479003906, 0.03437993621826172, 0.034490272521972655, 0.034459648132324217, 0.034325729370117186, 0.03424361419677734, 0.034491233825683594, 0.034395038604736326, 0.03439734268188477, 0.03438844680786133, 0.034417022705078126, 0.034277374267578126, 0.034221534729003904, 0.03460764694213867, 0.03411763381958008, 0.03419945526123047, 0.034740318298339845, 0.034361343383789066, 0.03430809783935547, 0.034311233520507814, 0.03449766540527344, 0.03425471878051758, 0.03497148895263672, 0.03433465576171875, 0.03417449569702148, 0.034119422912597654, 0.03420969772338867, 0.03434729766845703, 0.03478537750244141, 0.03441916656494141, 0.03588313674926758, 0.03458780670166016, 0.034210655212402345, 0.03432969665527344, 0.03469321441650391, 0.03445638275146484, 0.034395263671875, 0.034361312866210934, 0.03479580688476563, 0.0345852165222168, 0.034907424926757816, 0.035050209045410154, 0.03508428955078125, 0.0350904312133789, 0.03534131240844727, 0.03566726303100586, 0.03503484725952148, 0.03513647842407226, 0.03527475357055664, 0.034988033294677735, 0.035533023834228517, 0.03512847900390625, 0.035033729553222655, 0.0353546257019043, 0.035141632080078124, 0.03514566421508789, 0.03535651016235351, 0.034732257843017575, 0.03520307159423828, 0.035149822235107424, 0.03503513717651367, 0.03503513717651367, 0.034648063659667966, 0.03462144088745117, 0.034475200653076174, 0.0344543342590332, 0.03450172805786133, 0.034560352325439456, 0.03436374282836914, 0.03452918243408203, 0.03460537719726563, 0.034678592681884765, 0.03457231903076172, 0.03462566375732422, 0.03453142547607422, 0.03457763290405273, 0.0349969596862793, 0.03492659378051758, 0.03510172653198242, 0.03472662353515625, 0.03456230545043945, 0.03439311981201172, 0.03437871932983398, 0.03445555114746094, 0.0342806396484375, 0.034406208038330076, 0.03439923095703125, 0.03455126571655273, 0.03472848129272461, 0.03467673492431641, 0.034840576171875, 0.034815807342529294, 0.03479158401489258, 0.03475049591064453, 0.03483561706542969, 0.034711456298828124, 0.03484972763061524, 0.035095680236816404, 0.035137889862060546, 0.035195423126220704, 0.035076095581054685, 0.03486873626708984, 0.034781631469726564, 0.034826305389404295, 0.034620639801025394, 0.035007137298583985, 0.035038719177246096, 0.03589379119873047, 0.035388992309570315, 0.03505027389526367, 0.03480316925048828, 0.03495993423461914, 0.03501001739501953, 0.035047969818115234, 0.03480780792236328, 0.03483612823486328, 0.03474028778076172, 0.03469062423706055, 0.03486793518066406, 0.034770942687988284, 0.03468697738647461, 0.034936832427978515, 0.03493379211425781, 0.03466953659057617, 0.034516990661621096, 0.03445731353759766, 0.03464774322509766, 0.034586654663085935, 0.03461552047729492, 0.034574687957763674, 0.034598911285400394, 0.03474444961547852, 0.03475033569335938, 0.03489791870117188, 0.03472566223144531, 0.034827617645263674, 0.034665184020996095, 0.034506591796875, 0.034570560455322266, 0.0345263671875, 0.034406593322753906, 0.03444598388671875, 0.03438387298583984, 0.034514942169189454, 0.03478966522216797, 0.0347685432434082, 0.03464380645751953, 0.034613632202148435, 0.034813953399658204, 0.03504867172241211, 0.035170177459716796, 0.03483430480957031, 0.0347902717590332, 0.03499734497070312, 0.0347097282409668, 0.03464262390136719, 0.03473408126831055, 0.03476598358154297, 0.03711676788330078, 0.03487948989868164, 0.03597875213623047, 0.0356704330444336, 0.037318912506103516, 0.03496451187133789, 0.03497625732421875, 0.0348570556640625, 0.03459708786010742, 0.03460015869140625, 0.03469619369506836, 0.034622528076171874, 0.03476076889038086, 0.03558617782592773, 0.035128833770751954, 0.03504115295410156, 0.03498035049438476, 0.034854911804199216, 0.03485696029663086, 0.03475251388549805, 0.035194881439208986, 0.03498767852783203, 0.03513353729248047, 0.03505721664428711, 0.034912033081054686, 0.035035839080810545, 0.035481822967529296, 0.035323902130126955, 0.03514572906494141, 0.03505379104614258, 0.034796737670898435, 0.03478511810302734, 0.03457510375976563, 0.03501055908203125, 0.034856128692626956, 0.034904895782470705, 0.03474816131591797, 0.03460137557983398, 0.034807647705078125, 0.03592396926879883, 0.035018047332763674, 0.034861759185791014, 0.03498534393310547, 0.034828929901123046, 0.03462348937988281, 0.03486105728149414, 0.0348221435546875, 0.034631679534912106, 0.03467843246459961, 0.03489007949829102, 0.03522150421142578, 0.03531103897094726, 0.03498614501953125, 0.034917919158935544, 0.03491929626464844, 0.038319583892822265, 0.034860862731933596, 0.03538198471069336, 0.03479132843017578, 0.034781280517578124, 0.035028640747070315, 0.03523209762573242, 0.035120254516601564, 0.034958206176757815, 0.03478227233886719, 0.034817985534667965, 0.03504844665527344, 0.034955265045166016, 0.03504246520996094, 0.03492918395996094, 0.03459257507324219, 0.034974369049072265, 0.03474723052978516, 0.03453763198852539, 0.03449878311157226, 0.03475724792480469, 0.03703647994995117, 0.03562099075317383, 0.03499008178710938, 0.035004417419433595, 0.03487539291381836, 0.03468198394775391, 0.03465692901611328, 0.034963680267333985, 0.03516783905029297, 0.03485532760620117, 0.034783233642578126, 0.034748416900634765, 0.034893505096435545, 0.03522089767456055, 0.035261344909667966, 0.03526873779296875, 0.03516198348999024, 0.035012702941894534, 0.03497942352294922, 0.03487776184082031, 0.03503104019165039, 0.03525996780395508, 0.03520556640625, 0.03518259048461914, 0.03514080047607422, 0.03493952178955078, 0.03489811325073242, 0.03489369583129883, 0.03525151824951172, 0.03876665496826172, 0.035402496337890624, 0.03537676620483399, 0.03530790328979492, 0.035460094451904296, 0.03510492706298828, 0.03542716979980469, 0.035320095062255856, 0.035057376861572266, 0.03493478393554687, 0.03465420913696289, 0.035176448822021485, 0.03554064178466797, 0.03541167831420899, 0.03525475311279297, 0.03530534362792969, 0.03510646438598633, 0.03519120025634766, 0.03517462539672851, 0.03506335830688476, 0.03521376037597656, 0.03510444641113281, 0.034853214263916014, 0.03503916931152344, 0.03501379013061524, 0.0353059196472168, 0.03501100921630859, 0.03485081481933594, 0.034260990142822266, 0.034710784912109376, 0.03419350433349609, 0.034957374572753906, 0.03451257705688476, 0.03489238357543945, 0.03473657608032227, 0.034297439575195314, 0.03434131240844727, 0.03419289779663086, 0.03409766387939453, 0.034385055541992185, 0.03427363204956055, 0.03425331115722656, 0.034209121704101564, 0.03426899337768555, 0.034062240600585936, 0.034134654998779296, 0.03487472152709961, 0.03484156799316406, 0.034522720336914066, 0.034519454956054685, 0.03452099227905273, 0.03513695907592773, 0.03844681549072266, 0.03542486572265625, 0.03494947052001953, 0.03450265502929688, 0.03457404708862305, 0.03455414581298828, 0.03446739196777344, 0.034447647094726565, 0.035035392761230466, 0.035149505615234375, 0.035130592346191404, 0.034948097229003904, 0.03530047988891601, 0.03544153594970703, 0.03544063949584961, 0.03548681640625, 0.035640224456787106, 0.03574169540405273, 0.03559219360351563, 0.03551776123046875, 0.03566201782226563, 0.035769855499267575, 0.03602329635620117, 0.03621478271484375, 0.03571507263183594, 0.035777729034423826, 0.036109119415283206, 0.03597340774536133, 0.03602608108520508, 0.03628646469116211, 0.03618611145019531, 0.03605913543701172, 0.03604889678955078, 0.03609328079223633, 0.036168350219726565, 0.036016128540039063, 0.03605846405029297, 0.0365656623840332, 0.035800193786621096, 0.03572358322143555, 0.03589734268188476, 0.035664447784423826, 0.03551232147216797, 0.03648102569580078, 0.035522560119628906, 0.03604275131225586, 0.03527475357055664, 0.035454975128173825, 0.035555328369140625, 0.03521331024169922, 0.03632707214355469, 0.03536931228637695, 0.03544678497314453, 0.035389438629150394, 0.03549798583984375, 0.03560857772827149, 0.03531158447265625, 0.035330078125, 0.0355860481262207, 0.03581657409667969, 0.035692447662353514, 0.03596144104003906, 0.03552499389648438, 0.03525344085693359, 0.035624897003173825, 0.035748737335205075, 0.03572124862670899, 0.03580105590820312, 0.03584329605102539, 0.03592428970336914, 0.03610201644897461, 0.036020641326904294, 0.03618998336791992, 0.03610051345825195, 0.0359147834777832, 0.035990272521972656, 0.03597321701049805, 0.03587699127197266, 0.03599359893798828, 0.03585228729248047, 0.03589120101928711, 0.03586975860595703, 0.03597334289550781, 0.03568921661376953, 0.03541167831420899, 0.03565132904052734, 0.0355230712890625, 0.03575398254394531, 0.03540991973876953, 0.03551228713989258, 0.035366943359375, 0.03548364639282227, 0.03551641464233399, 0.03582566452026367, 0.03546047973632813, 0.03532662582397461, 0.03542422485351562, 0.03538479995727539, 0.035506526947021486, 0.03550348663330078, 0.0357589111328125, 0.035604480743408204, 0.035520511627197264, 0.03583699035644531, 0.035498207092285156, 0.03557449722290039, 0.0358171501159668, 0.03576863861083984, 0.03624755096435547, 0.03548160171508789, 0.03548108673095703, 0.036453887939453124, 0.03563600158691406, 0.035370559692382814, 0.03594112014770508, 0.0360134391784668, 0.035947040557861326, 0.03609190368652344, 0.03636825561523437, 0.035724544525146486, 0.035459007263183594, 0.03534329605102539, 0.03542425537109375, 0.035448833465576174, 0.03573049545288086, 0.03535763168334961, 0.036317024230957035, 0.03567827224731445, 0.03545609664916992, 0.03606787109375, 0.03543497467041016, 0.03526838302612305, 0.0349760627746582, 0.03486300659179688, 0.03476070404052734, 0.0346907844543457, 0.035084415435791015, 0.03537526321411133, 0.03512934494018555, 0.03511705780029297, 0.03539961624145508, 0.035102783203125, 0.034829727172851564, 0.034780799865722654, 0.03489238357543945, 0.03487343978881836, 0.035295040130615234, 0.03520764923095703, 0.03480121612548828, 0.034666942596435546, 0.03440841674804687, 0.034530494689941404, 0.03442319869995117, 0.03452105712890625, 0.03457660675048828, 0.03458483123779297, 0.034754112243652345, 0.03476144027709961, 0.035069473266601564, 0.03455609512329102, 0.03465830230712891, 0.03451903915405274, 0.03507382583618164, 0.0351923828125, 0.03521807861328125, 0.03576627349853516, 0.03555084609985352, 0.03553958511352539, 0.03582336044311524, 0.04110704040527344, 0.03608358383178711]",tokens/s,28.556370540904123,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3158.24128,4431.151104,0.0,4028.628992,3944.723968,s,1,10.468568359375,10.468568359375,0.0,10.468568359375,10.468568359375,10.468568359375,10.468568359375,[10.468568359375],,kWh,9.744984121242244e-05,1.0741945159657262e-05,3.0471135488013834e-05,0.00013866292186009353,,MB,2980.560896,4770.889728,0.0,4353.687552,4305.05728,s,10,1.1536016311645507,0.1153601631164551,0.00015453403989926204,0.1153535041809082,0.11555985946655274,0.11557176933288574,0.11558129722595215,"[0.11535424041748046, 0.11558367919921875, 0.11516553497314454, 0.11529641723632812, 0.11506012725830078, 0.11532889556884765, 0.11546454620361328, 0.11543820953369141, 0.11535276794433594, 0.11555721282958985]",tokens/s,2219.136945407837,kWh,3.4513731909318464e-06,3.806244922671959e-07,2.295939745247176e-06,6.127937428446218e-06,tokens/kWh,41775883.48269258,MB,2984.689664,4770.889728,0.0,4353.687552,4305.05984,s,10,25.45242407226562,2.5452424072265627,0.010921404254275176,2.54685986328125,2.5569438720703124,2.5578602416992187,2.5585933374023435,"[2.5409130859375, 2.551753173828125, 2.55426025390625, 2.556740234375, 2.527989990234375, 2.52950341796875, 2.535803955078125, 2.541966552734375, 2.558776611328125, 2.554716796875]",tokens/s,24.75206283736577,kWh,7.40796593303255e-05,8.170885498793493e-06,3.88408431641512e-05,0.00012109138799327019,tokens/kWh,520268.2126618394,,s,630,25.449817668914807,0.04039653598240444,0.0006701905993121197,0.04035440063476563,0.04085800514221191,0.04105314445495605,0.04386141094207764,"[0.04106208038330078, 0.040908832550048825, 0.043471134185791016, 0.04086374282836914, 0.040578624725341794, 0.04029280090332031, 0.04080025482177734, 0.040118431091308596, 0.0404826545715332, 0.039979007720947264, 0.039946239471435545, 0.04038655853271484, 0.04025548934936524, 0.040268959045410155, 0.0402762565612793, 0.03991545486450195, 0.04027619171142578, 0.04063216018676758, 0.04064896011352539, 0.04031875228881836, 0.040258079528808596, 0.0403394546508789, 0.04021452713012695, 0.04071219253540039, 0.040515583038330076, 0.04030451202392578, 0.040673408508300785, 0.04037222290039062, 0.04026118469238281, 0.04007980728149414, 0.04029439926147461, 0.040101886749267575, 0.0398045768737793, 0.0396475830078125, 0.04419372940063476, 0.03988828659057617, 0.03996307373046875, 0.03963926315307617, 0.03953855895996094, 0.03944643020629883, 0.039504096984863284, 0.039512065887451174, 0.03975968170166016, 0.03998527908325195, 0.04018316650390625, 0.039768768310546876, 0.039847934722900394, 0.03944243240356445, 0.03978649520874023, 0.03997491073608399, 0.04287648010253906, 0.04073625564575195, 0.04035878372192383, 0.039951744079589846, 0.039817920684814455, 0.04006694412231445, 0.039882881164550785, 0.04040281677246094, 0.04015526580810547, 0.040114177703857425, 0.04027971267700195, 0.04032931137084961, 0.04018832015991211, 0.04092768096923828, 0.04038246536254883, 0.040478080749511716, 0.04066368103027344, 0.040439231872558594, 0.04025811386108399, 0.04053807830810547, 0.04048278427124023, 0.04053225708007813, 0.040823745727539065, 0.0408328971862793, 0.04035404968261719, 0.04038051223754883, 0.040443710327148434, 0.04036214447021484, 0.04022060775756836, 0.0401640625, 0.04034764862060547, 0.04045619201660156, 0.040425472259521485, 0.04078182220458984, 0.0409989128112793, 0.04087744140625, 0.04097526550292969, 0.04121712112426758, 0.04081523132324219, 0.04061347198486328, 0.040692127227783204, 0.04073267364501953, 0.04023196792602539, 0.04039779281616211, 0.04048691177368164, 0.04090060806274414, 0.04074700927734375, 0.04075110244750976, 0.04036812973022461, 0.040235008239746094, 0.04019564819335938, 0.03996307373046875, 0.03986943817138672, 0.039785472869873044, 0.04006911849975586, 0.03975372695922851, 0.04039241409301758, 0.040429855346679686, 0.04058281707763672, 0.040589664459228514, 0.040292350769042966, 0.040510494232177736, 0.0406343994140625, 0.04063452911376953, 0.04064131164550781, 0.040568801879882814, 0.04057014465332031, 0.04081449508666992, 0.04056729507446289, 0.040358238220214844, 0.04017356872558594, 0.040007038116455076, 0.04041766357421875, 0.041142528533935546, 0.04036812973022461, 0.04079990386962891, 0.041070751190185543, 0.040422176361083986, 0.04000732803344727, 0.04115251159667969, 0.040140064239501956, 0.04033200073242187, 0.04057907104492187, 0.04040233612060547, 0.040342113494873044, 0.03989641571044922, 0.04076134490966797, 0.04068214416503906, 0.04084940719604492, 0.040812545776367185, 0.04029644775390625, 0.04025059127807617, 0.04030953598022461, 0.040271137237548826, 0.04030332946777344, 0.040321025848388675, 0.040022239685058594, 0.041644065856933594, 0.04010147094726563, 0.04007571029663086, 0.04048457717895508, 0.040089599609375, 0.04014899063110351, 0.03995238494873047, 0.04001587295532227, 0.04029782485961914, 0.04039952087402344, 0.04047052764892578, 0.04061983871459961, 0.04059360122680664, 0.040564735412597655, 0.040357887268066404, 0.040432926177978515, 0.04097014236450195, 0.040873886108398434, 0.0403988151550293, 0.04039680099487305, 0.041669567108154296, 0.04055859375, 0.04087376022338867, 0.04085782241821289, 0.04071014404296875, 0.04068096160888672, 0.04098303985595703, 0.04071971130371094, 0.04076723098754883, 0.04044073486328125, 0.040400894165039065, 0.040591358184814456, 0.04250995254516601, 0.04156646347045898, 0.040683712005615234, 0.04033529663085938, 0.040051807403564454, 0.04024208068847656, 0.04005887985229492, 0.04004044723510742, 0.04025139236450195, 0.04090265655517578, 0.04108835220336914, 0.040599552154541016, 0.04084326553344726, 0.040721057891845706, 0.040976383209228515, 0.04096614456176758, 0.04043571090698242, 0.040716289520263675, 0.04036403274536133, 0.040065025329589846, 0.040275966644287106, 0.040032257080078126, 0.04013238525390625, 0.04017484664916992, 0.04033020782470703, 0.040390655517578124, 0.04057702255249023, 0.0404213752746582, 0.04053343963623047, 0.04028812789916992, 0.040624576568603514, 0.04045235061645508, 0.04014899063110351, 0.040140159606933595, 0.04024111938476563, 0.04023305511474609, 0.04020809555053711, 0.043995967864990236, 0.04038864135742187, 0.04009910583496094, 0.04070678329467774, 0.04050080108642578, 0.04054880142211914, 0.040648704528808595, 0.04040499114990234, 0.0403614387512207, 0.040403488159179685, 0.04037222290039062, 0.04051148986816406, 0.041009151458740234, 0.04085964965820312, 0.0404213752746582, 0.04058515167236328, 0.040527294158935544, 0.0407740478515625, 0.041019615173339845, 0.04087795257568359, 0.040540065765380856, 0.04071142578125, 0.04058415985107422, 0.040570465087890625, 0.04556841659545898, 0.04151500701904297, 0.04004044723510742, 0.039965953826904294, 0.04003097534179687, 0.040234657287597654, 0.04032956695556641, 0.04009081649780273, 0.03992671966552734, 0.039888065338134764, 0.03987036895751953, 0.0396431999206543, 0.03996819305419922, 0.03992633438110352, 0.03991321563720703, 0.04006447982788086, 0.0401431999206543, 0.039973312377929685, 0.04025753784179688, 0.04041676712036133, 0.04034928131103516, 0.040856479644775394, 0.04061145782470703, 0.04000806427001953, 0.03974863815307617, 0.03992031860351562, 0.039980926513671876, 0.039969184875488284, 0.040376319885253906, 0.03964313507080078, 0.0395489273071289, 0.03941785430908203, 0.03942956924438477, 0.03946758270263672, 0.039782398223876955, 0.03954278564453125, 0.03983747100830078, 0.03949619293212891, 0.03960992050170899, 0.03940572738647461, 0.039610366821289066, 0.03994825744628906, 0.04005007934570313, 0.040067710876464845, 0.039948287963867186, 0.04015718460083008, 0.043896480560302736, 0.04035184097290039, 0.04060124969482422, 0.04004048156738281, 0.04010649490356445, 0.04016134262084961, 0.04012851333618164, 0.04076464080810547, 0.04032531356811524, 0.03995881652832031, 0.039768383026123046, 0.03964723205566406, 0.040032257080078126, 0.040232769012451174, 0.039944385528564455, 0.039815166473388675, 0.03979612731933594, 0.03965935897827148, 0.03974220657348633, 0.04027395248413086, 0.040468448638916014, 0.040398975372314454, 0.04059689712524414, 0.040425952911376954, 0.040409088134765625, 0.04088150405883789, 0.040562496185302735, 0.04056671905517578, 0.04069468688964844, 0.04099737548828125, 0.04057088088989258, 0.04035353469848633, 0.04014652633666992, 0.040210689544677734, 0.040021568298339846, 0.04033622360229492, 0.04056876754760742, 0.040400894165039065, 0.04020780944824219, 0.04031935882568359, 0.03999513626098633, 0.04001007843017578, 0.03991712188720703, 0.0397762565612793, 0.039989856719970705, 0.04016489410400391, 0.040016353607177736, 0.04016316986083984, 0.039919776916503905, 0.03978854370117187, 0.03954687881469727, 0.040103935241699216, 0.0409804801940918, 0.0405948486328125, 0.040835678100585936, 0.04024115371704102, 0.04027532958984375, 0.03997350311279297, 0.03966566467285156, 0.03970457458496094, 0.03982534408569336, 0.03998099136352539, 0.03970060729980469, 0.040114177703857425, 0.04009292984008789, 0.04010470581054688, 0.04050118255615234, 0.040538177490234376, 0.0400423698425293, 0.04037235260009766, 0.03999683380126953, 0.039637439727783205, 0.03967606353759766, 0.04089401626586914, 0.04068601608276367, 0.040202144622802735, 0.04012812805175781, 0.04030636978149414, 0.039869216918945315, 0.03988275146484375, 0.03970041656494141, 0.04020844650268555, 0.04034764862060547, 0.040570209503173825, 0.04014147186279297, 0.039898143768310544, 0.04112892913818359, 0.039823360443115234, 0.03964044952392578, 0.03977280044555664, 0.039927806854248044, 0.03974457550048828, 0.04050207901000977, 0.0400711669921875, 0.03991142272949219, 0.040115646362304684, 0.04114076614379883, 0.041139999389648435, 0.03975193786621094, 0.03987251281738281, 0.041717758178710936, 0.04673535919189453, 0.043407360076904294, 0.03999129486083984, 0.03990095901489258, 0.03995260620117187, 0.03997081756591797, 0.039695743560791016, 0.040263614654541015, 0.03995625686645508, 0.03975030517578125, 0.03977036666870117, 0.039753536224365234, 0.03975596618652344, 0.03984502410888672, 0.03953062438964844, 0.03993673706054687, 0.04001171112060547, 0.039908512115478516, 0.039750560760498044, 0.0404398078918457, 0.040285888671875, 0.039868736267089845, 0.039820510864257815, 0.040220542907714846, 0.03976688003540039, 0.039870529174804686, 0.0397022705078125, 0.03972121429443359, 0.04016332626342774, 0.040474624633789064, 0.04281744003295898, 0.040460384368896485, 0.040217823028564456, 0.040114974975585936, 0.04003772735595703, 0.04014556884765625, 0.039894367218017576, 0.04008617782592774, 0.03992089462280273, 0.03980556869506836, 0.040244640350341795, 0.03996950531005859, 0.03989254379272461, 0.040044670104980466, 0.040634654998779295, 0.0401387825012207, 0.03999948883056641, 0.0397127685546875, 0.04015024185180664, 0.03967001724243164, 0.0399486083984375, 0.0396956787109375, 0.03967478561401367, 0.03984371185302735, 0.04105215835571289, 0.04064051055908203, 0.040678462982177734, 0.04064710235595703, 0.04053657531738281, 0.040371646881103514, 0.040378944396972656, 0.040062721252441404, 0.04012419128417969, 0.040126529693603516, 0.03988111877441406, 0.0399441909790039, 0.04464345550537109, 0.04030963134765625, 0.04014694213867188, 0.04003635025024414, 0.04016515350341797, 0.03968841552734375, 0.04018175888061523, 0.03984086227416992, 0.039826335906982424, 0.039746910095214846, 0.03999164962768555, 0.039839134216308594, 0.040502174377441406, 0.040277183532714846, 0.040385345458984374, 0.04005379104614258, 0.040372383117675784, 0.04049798583984375, 0.04029849624633789, 0.04055244827270508, 0.04056883239746094, 0.04025680160522461, 0.04032284927368164, 0.04016428756713867, 0.04048271942138672, 0.043775550842285155, 0.04048284912109375, 0.04012019348144531, 0.04045270538330078, 0.04011420822143555, 0.04016880035400391, 0.039870590209960935, 0.039645854949951174, 0.03960176086425781, 0.03965161514282227, 0.03951411056518555, 0.040072799682617184, 0.0400654411315918, 0.04049071884155273, 0.04006467056274414, 0.04028889465332031, 0.04024943923950195, 0.04056399917602539, 0.04035033416748047, 0.04033059310913086, 0.040056896209716794, 0.04022332763671875, 0.0405032958984375, 0.04078291320800781, 0.04036111831665039, 0.04029212951660156, 0.04111721420288086, 0.04066352081298828, 0.0405032958984375, 0.04030831909179688, 0.040308769226074216, 0.040333694458007815, 0.04041424179077149, 0.04039574432373047, 0.04027376174926758, 0.04066934585571289, 0.04043971252441406, 0.040287521362304686, 0.04043036651611328, 0.04046236801147461, 0.04072652816772461, 0.040460289001464846, 0.04064255905151367, 0.0404881591796875, 0.040921886444091796, 0.044075263977050784, 0.041053951263427736, 0.04089452743530273, 0.04089644622802734, 0.040597503662109374, 0.04088627243041992, 0.04064156723022461, 0.0410711669921875, 0.0405951042175293, 0.04048358535766602, 0.04049417495727539, 0.04085238265991211, 0.040377761840820314, 0.040450656890869144, 0.040322975158691404, 0.04042351913452148, 0.04053811264038086, 0.04036812973022461, 0.0401162223815918, 0.04014694213867188, 0.040149055480957034, 0.04038444900512695, 0.04051968002319336, 0.04061539077758789, 0.04038915252685547, 0.04039465713500977, 0.04054153442382812, 0.04031702423095703, 0.04077840042114258, 0.04068320083618164, 0.04079568099975586, 0.04096659088134766, 0.040551841735839846, 0.04045481491088867, 0.04038412857055664, 0.040595169067382815, 0.04035475158691406, 0.040313983917236326, 0.04032144165039062, 0.040968448638916015, 0.04082710266113281, 0.041603073120117184, 0.04032627105712891, 0.04015359878540039, 0.04078182220458984, 0.04055039978027344, 0.04057843017578125, 0.04048550415039062, 0.04070809555053711, 0.040474624633789064, 0.04049286270141601, 0.04056857681274414, 0.040384960174560544, 0.040687614440917966, 0.040572223663330076, 0.040524478912353515, 0.04050495910644531, 0.04068511962890625, 0.04072911834716797, 0.040542209625244144, 0.04100124740600586, 0.04037836837768555, 0.040629440307617185, 0.04038943862915039, 0.04066255950927734, 0.04036412811279297, 0.04053619384765625, 0.04033561706542969, 0.0404727668762207, 0.04049692916870117, 0.040587295532226564, 0.04074291229248047, 0.041025054931640624, 0.040581600189208984, 0.040687614440917966, 0.040588863372802736, 0.040685760498046876, 0.04041475296020508, 0.04058595275878906, 0.04038006210327148, 0.04048931121826172, 0.04022886276245117, 0.04014448165893555, 0.04025708770751953, 0.04043862533569336, 0.040560222625732424, 0.040470943450927735, 0.04037555313110352, 0.04033203125, 0.04042342376708984, 0.04050057601928711, 0.04054390335083008, 0.0404431037902832, 0.04056156921386719, 0.04044275283813477, 0.040576385498046874, 0.04065548706054688, 0.04054009628295899, 0.04043721771240234, 0.041783905029296874, 0.04043571090698242, 0.04045228958129883, 0.04035728073120117, 0.04053238296508789, 0.040460384368896485, 0.040626014709472656, 0.040556190490722656]",tokens/s,24.754597781244684,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1849.36448,2745.040896,0.0,2365.587456,2314.318336,s,1,8.9284794921875,8.9284794921875,0.0,8.9284794921875,8.9284794921875,8.9284794921875,8.9284794921875,[8.9284794921875],,kWh,5.5911116945814375e-05,6.160209576682485e-06,1.722529155800956e-05,7.929661808050642e-05,,MB,1901.912064,3099.459584,0.0,2682.257408,2607.60832,s,10,0.5095027809143067,0.05095027809143067,0.0002655836749891441,0.05082716751098633,0.051276120376586916,0.051431324577331544,0.05155548793792725,"[0.05158652877807617, 0.0511148796081543, 0.05124163055419922, 0.050816158294677734, 0.050872478485107425, 0.05082032012939453, 0.050745792388916015, 0.050738784790039064, 0.050834014892578126, 0.05073219299316406]",tokens/s,5024.506432341861,kWh,1.55596752468974e-06,1.7159585863836332e-07,1.0371137138509626e-06,2.764677097179066e-06,tokens/kWh,92596708.76617353,MB,1906.229248,3099.459584,0.0,2682.257408,2607.61088,s,10,17.19343530273438,1.7193435302734374,0.22054194164928356,1.5809989624023437,2.0618133544921875,2.0663018188476565,2.0698925903320315,"[2.06081591796875, 2.070790283203125, 2.03573388671875, 1.562585205078125, 1.5716207275390626, 1.566328369140625, 1.5710460205078125, 1.5799976806640625, 1.5925169677734374, 1.582000244140625]",tokens/s,36.641892030722175,kWh,4.591858439613902e-05,5.064410582656288e-06,2.1991932487149806e-05,7.297492746594513e-05,tokens/kWh,863310.2106117188,,s,630,17.191366809844972,0.02728788382515075,0.0035817107658509817,0.02500704002380371,0.032768848037719726,0.03301007080078125,0.03351466503143311,"[0.03260588836669922, 0.03253193664550781, 0.03265647888183594, 0.03264486312866211, 0.03263862228393555, 0.03267324829101562, 0.03269718551635742, 0.03250771331787109, 0.03239913558959961, 0.03298358535766602, 0.03254886245727539, 0.032479137420654294, 0.03261849594116211, 0.0326098861694336, 0.032732639312744144, 0.0326255989074707, 0.03259772872924805, 0.03271820831298828, 0.032682910919189456, 0.03288246536254883, 0.03271072006225586, 0.03261190414428711, 0.03260681533813477, 0.0325511360168457, 0.03248083114624024, 0.032549087524414065, 0.03287449645996094, 0.032755294799804685, 0.03254508972167969, 0.03263091278076172, 0.03272236633300781, 0.032710464477539065, 0.032621280670166015, 0.03270041656494141, 0.032866302490234374, 0.032664737701416015, 0.032643199920654294, 0.03271753692626953, 0.032797695159912106, 0.0328917121887207, 0.032989376068115236, 0.032612350463867186, 0.03279872131347656, 0.03265500640869141, 0.03266204833984375, 0.03255686569213867, 0.03279264068603516, 0.03267987060546875, 0.032615585327148436, 0.032610912322998044, 0.03269862365722656, 0.032644287109375, 0.032664382934570316, 0.03255081558227539, 0.032714847564697266, 0.033099777221679685, 0.033159168243408206, 0.0333507194519043, 0.03320649719238281, 0.032932575225830076, 0.03264921569824219, 0.03267951965332031, 0.03279254531860352, 0.032989662170410154, 0.032909473419189456, 0.032457889556884764, 0.032608448028564455, 0.03293225479125977, 0.03412198257446289, 0.03322675323486328, 0.03264284896850586, 0.03252409744262695, 0.03251673507690429, 0.0326448974609375, 0.032612350463867186, 0.03308319854736328, 0.033644031524658204, 0.03267558288574219, 0.03276243209838867, 0.03278220748901367, 0.03278435134887695, 0.03287622451782227, 0.032734046936035155, 0.032679935455322266, 0.03274300765991211, 0.032741886138916015, 0.0327710075378418, 0.033022945404052734, 0.033535999298095705, 0.0332014389038086, 0.033097633361816405, 0.033139518737792965, 0.032994335174560546, 0.03289715194702148, 0.033131359100341796, 0.032671745300292966, 0.0327081298828125, 0.03270470428466797, 0.03413020706176758, 0.034401729583740236, 0.033116607666015624, 0.033043582916259764, 0.03279280090332031, 0.03281999969482422, 0.03272911834716797, 0.03316118240356445, 0.03279052734375, 0.03252633666992188, 0.03254233551025391, 0.032506240844726565, 0.032595966339111326, 0.03255078506469727, 0.03265139389038086, 0.032671615600585936, 0.03256742477416992, 0.03258566284179688, 0.03254399871826172, 0.032578369140625, 0.03269539260864258, 0.03284819030761719, 0.03275862503051758, 0.032656673431396485, 0.032622112274169925, 0.032573631286621094, 0.032766719818115235, 0.03278438568115234, 0.03269968032836914, 0.032688575744628905, 0.03257753753662109, 0.03261219024658203, 0.03346243286132813, 0.03281283187866211, 0.03317334365844726, 0.03314470291137695, 0.033262081146240234, 0.03308316802978516, 0.03320035171508789, 0.032976863861083984, 0.03264924621582031, 0.03263068771362305, 0.03264521789550781, 0.032702465057373044, 0.032652736663818356, 0.03319046401977539, 0.03264889526367187, 0.03269254302978516, 0.03273468780517578, 0.03267023849487305, 0.032860160827636715, 0.032873886108398434, 0.032768608093261715, 0.03272246551513672, 0.03263638305664063, 0.0327435188293457, 0.032762783050537106, 0.03362406539916992, 0.03482998275756836, 0.03316361618041992, 0.03317375946044922, 0.033031486511230466, 0.03277180862426758, 0.03272518539428711, 0.03249375915527344, 0.03258607864379883, 0.032796607971191404, 0.0325939826965332, 0.0324956169128418, 0.03242598342895508, 0.032487422943115234, 0.032415393829345704, 0.03244271850585938, 0.032481281280517575, 0.03249900817871094, 0.03241030502319336, 0.03238326263427734, 0.03242364883422852, 0.03249683380126953, 0.0324780158996582, 0.03253657531738281, 0.0325049934387207, 0.032645633697509766, 0.03264483261108399, 0.03235903930664062, 0.03245651245117188, 0.03252774429321289, 0.02765500831604004, 0.02487424087524414, 0.02498396873474121, 0.024819295883178712, 0.025044063568115234, 0.024728479385375975, 0.0247271671295166, 0.02473750305175781, 0.0248407039642334, 0.02465987205505371, 0.024899391174316405, 0.02476611137390137, 0.02487376022338867, 0.02481305694580078, 0.024700544357299806, 0.024660287857055666, 0.02460643196105957, 0.024714080810546876, 0.024832000732421877, 0.024770559310913084, 0.02515488052368164, 0.024736448287963866, 0.02468454360961914, 0.02469273567199707, 0.024685951232910158, 0.024772991180419923, 0.024709375381469726, 0.024649696350097658, 0.024668191909790037, 0.02481545639038086, 0.024793119430541993, 0.024724992752075195, 0.024772735595703126, 0.024879264831542968, 0.02495318412780762, 0.02485043144226074, 0.024807424545288087, 0.02496451187133789, 0.025219680786132813, 0.025026559829711914, 0.024890464782714845, 0.024877983093261717, 0.024732864379882813, 0.02472208023071289, 0.02483830451965332, 0.024805376052856445, 0.02532745552062988, 0.024834144592285157, 0.02501420783996582, 0.02490492820739746, 0.024748064041137694, 0.02479372787475586, 0.02489369583129883, 0.024823808670043947, 0.024784160614013673, 0.024712032318115234, 0.02469055938720703, 0.024696832656860353, 0.024849695205688478, 0.024617696762084963, 0.024657472610473633, 0.024770624160766603, 0.024725696563720704, 0.02466975975036621, 0.024674623489379884, 0.024731967926025392, 0.024666112899780275, 0.024838144302368165, 0.024977407455444335, 0.024767871856689452, 0.02477939224243164, 0.02467840003967285, 0.02476032066345215, 0.02513283157348633, 0.0255053768157959, 0.02506831932067871, 0.025184095382690428, 0.025040544509887696, 0.02478121566772461, 0.024770496368408203, 0.024852479934692383, 0.02485968017578125, 0.024845279693603516, 0.024893152236938478, 0.02488675117492676, 0.024842111587524415, 0.02482067108154297, 0.024929344177246095, 0.02485139274597168, 0.02487001609802246, 0.024768831253051758, 0.024786624908447265, 0.02482211112976074, 0.024853023529052734, 0.024685951232910158, 0.024768640518188476, 0.024799455642700197, 0.024908000946044923, 0.02470710372924805, 0.024752416610717774, 0.024815359115600587, 0.024862655639648436, 0.024867008209228516, 0.024724992752075195, 0.024729055404663088, 0.024794015884399414, 0.02476416015625, 0.02472166442871094, 0.024766016006469726, 0.024783296585083006, 0.02474505615234375, 0.02469366455078125, 0.027510784149169923, 0.025131263732910157, 0.024964864730834962, 0.025001535415649413, 0.025012096405029296, 0.025160255432128905, 0.025036735534667967, 0.02484000015258789, 0.025004480361938478, 0.026435392379760742, 0.024838144302368165, 0.02490982437133789, 0.024810880661010743, 0.02536716842651367, 0.02472310447692871, 0.02490403175354004, 0.024796255111694337, 0.02491596794128418, 0.024913919448852538, 0.02476032066345215, 0.02473936080932617, 0.024784448623657227, 0.024757247924804687, 0.02496713638305664, 0.024806655883789063, 0.02466886329650879, 0.024698879241943358, 0.024788991928100586, 0.02483328056335449, 0.02472012710571289, 0.02469068717956543, 0.02476025581359863, 0.024860736846923828, 0.024788991928100586, 0.024716863632202147, 0.02479897689819336, 0.02485728073120117, 0.024694688796997072, 0.024755647659301758, 0.024770368576049806, 0.02500489616394043, 0.024737152099609375, 0.024857215881347657, 0.02507161521911621, 0.024966848373413085, 0.024899904251098632, 0.024893152236938478, 0.024908000946044923, 0.025317312240600586, 0.024977439880371093, 0.02490787124633789, 0.02481337547302246, 0.024817472457885743, 0.024803712844848634, 0.024751968383789062, 0.024787103652954102, 0.024858463287353517, 0.024852479934692383, 0.02474723243713379, 0.024931264877319337, 0.02485043144226074, 0.024997568130493163, 0.024903072357177734, 0.02483292770385742, 0.02493622398376465, 0.024858848571777344, 0.02490982437133789, 0.02553241539001465, 0.024870912551879884, 0.024821983337402344, 0.024794015884399414, 0.02489833641052246, 0.02492425537109375, 0.024780799865722656, 0.02481260871887207, 0.024850400924682617, 0.02490777587890625, 0.024859840393066407, 0.024833824157714842, 0.024800352096557617, 0.025110496520996093, 0.024927936553955077, 0.025153823852539062, 0.025102304458618163, 0.024905248641967772, 0.024750591278076172, 0.02492572784423828, 0.025033184051513672, 0.02469228744506836, 0.02463142395019531, 0.024660287857055666, 0.024748031616210937, 0.02473574447631836, 0.024634912490844728, 0.02493507194519043, 0.024743743896484375, 0.024670015335083006, 0.024651968002319335, 0.02465177536010742, 0.024764415740966796, 0.024616960525512696, 0.02465692710876465, 0.024658943176269533, 0.02471241569519043, 0.024661823272705077, 0.024753087997436522, 0.02509823989868164, 0.02512873649597168, 0.02510870361328125, 0.025157375335693358, 0.025179840087890624, 0.02503327941894531, 0.024813568115234375, 0.024758272171020508, 0.024765792846679686, 0.024783519744873046, 0.024768287658691407, 0.02474233627319336, 0.024886207580566408, 0.025008703231811525, 0.02494451141357422, 0.02485203170776367, 0.02503167915344238, 0.025112415313720705, 0.024936447143554686, 0.02750668716430664, 0.025780223846435548, 0.02528665542602539, 0.024952831268310546, 0.025161727905273438, 0.025006080627441408, 0.025141248703002928, 0.024688640594482423, 0.024750175476074218, 0.02489334487915039, 0.025140575408935547, 0.02498419189453125, 0.024838176727294922, 0.024893440246582032, 0.024831872940063476, 0.024727039337158203, 0.02497395133972168, 0.024836095809936523, 0.024966623306274412, 0.024976160049438475, 0.024846208572387694, 0.0246712646484375, 0.024806175231933594, 0.025053375244140624, 0.02502979278564453, 0.025398111343383788, 0.025106399536132813, 0.02503273582458496, 0.025382144927978516, 0.02522598457336426, 0.025030080795288085, 0.024927936553955077, 0.024882047653198243, 0.024970239639282226, 0.024968191146850584, 0.025085664749145507, 0.025144927978515624, 0.026033952713012697, 0.027518976211547853, 0.026250112533569337, 0.025417760848999025, 0.02511039924621582, 0.024909952163696288, 0.024811519622802734, 0.025063455581665037, 0.024987520217895506, 0.02493449592590332, 0.025204736709594725, 0.02485875129699707, 0.024962368011474608, 0.02495136070251465, 0.024975584030151366, 0.02503376007080078, 0.02523417663574219, 0.02508185577392578, 0.024991743087768553, 0.02497331237792969, 0.024961023330688475, 0.024864767074584963, 0.02482316780090332, 0.024879104614257814, 0.024912031173706054, 0.02502000045776367, 0.024787200927734374, 0.024892032623291014, 0.024949920654296874, 0.024896352767944337, 0.024779903411865235, 0.024756128311157227, 0.024816511154174804, 0.02511471939086914, 0.025277503967285155, 0.025063999176025392, 0.024961376190185548, 0.02484841537475586, 0.025016319274902343, 0.025091264724731447, 0.024945472717285155, 0.024993343353271483, 0.02510220718383789, 0.025309024810791017, 0.024889663696289064, 0.025665695190429688, 0.02547110366821289, 0.03313919830322266, 0.02542505645751953, 0.025211679458618165, 0.025284671783447267, 0.02530508804321289, 0.025118719100952147, 0.024983327865600587, 0.024887744903564452, 0.024958751678466798, 0.02482745552062988, 0.02484783935546875, 0.02493539237976074, 0.025014368057250977, 0.02508348846435547, 0.024723167419433593, 0.02484694480895996, 0.025135103225708007, 0.025194208145141603, 0.025051424026489258, 0.02491164779663086, 0.025081344604492187, 0.024832735061645506, 0.02490982437133789, 0.02500809669494629, 0.025046848297119142, 0.024820032119750975, 0.024778656005859375, 0.024856191635131836, 0.024994112014770507, 0.031006784439086915, 0.025231552124023438, 0.02520841598510742, 0.026127967834472656, 0.024942272186279296, 0.02509417533874512, 0.025069631576538087, 0.0251167049407959, 0.025032960891723632, 0.024893600463867186, 0.024934816360473632, 0.025236608505249024, 0.02497830390930176, 0.02490563201904297, 0.025331199645996092, 0.024973215103149413, 0.024818368911743164, 0.02488934326171875, 0.02488528060913086, 0.025173919677734375, 0.025025951385498048, 0.025033119201660157, 0.025016191482543946, 0.025007999420166016, 0.024936960220336913, 0.02490310478210449, 0.025018943786621093, 0.02507776069641113, 0.02485843276977539, 0.024901824951171873, 0.024962976455688478, 0.02534204864501953, 0.025253311157226562, 0.025148319244384765, 0.025240991592407228, 0.025032703399658202, 0.024971872329711913, 0.02501593589782715, 0.025100799560546876, 0.024995712280273436, 0.02492166328430176, 0.02510047912597656, 0.02499942398071289, 0.025122943878173827, 0.02522380828857422, 0.025236768722534178, 0.02513564872741699, 0.0249182071685791, 0.024832000732421877, 0.025226303100585937, 0.025201215744018554, 0.024885120391845702, 0.024863231658935548, 0.02490982437133789, 0.024786943435668944, 0.02470848083496094, 0.0247957763671875, 0.02490982437133789, 0.024930303573608398, 0.02492416000366211, 0.02485980796813965, 0.025113439559936522, 0.026128543853759765, 0.02492748832702637, 0.024858688354492186, 0.024992288589477538, 0.02489139175415039, 0.024773759841918944, 0.02495372772216797, 0.024951871871948243, 0.02484284782409668, 0.02493020820617676, 0.02485875129699707, 0.02558188819885254, 0.02513920021057129, 0.02496291160583496, 0.02522537612915039, 0.025278656005859376, 0.025220767974853515, 0.025605440139770508, 0.026491167068481446, 0.02570707130432129, 0.025649152755737304, 0.02498067283630371, 0.02529676818847656, 0.025541568756103517, 0.024938495635986328, 0.024805376052856445, 0.024780799865722656, 0.026757312774658204, 0.024968544006347657, 0.024820192337036133, 0.024740928649902343, 0.024976320266723633, 0.024860671997070313]",tokens/s,36.646300842072556,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4969.734144,7346.192384,0.0,6943.670272,6539.1744,s,1,11.756984375,11.756984375,0.0,11.756984375,11.756984375,11.756984375,11.756984375,[11.756984375],,kWh,0.00013279523331669526,1.4641012119514684e-05,4.293447879200052e-05,0.00019037072422821048,,MB,5058.154496,7641.890816,0.0,7224.68864,6917.39904,s,10,2.0122382202148437,0.20122382202148437,0.0006060728743618355,0.2010496368408203,0.20204302520751952,0.20231422958374023,0.2025311930847168,"[0.20076329040527344, 0.20133564758300782, 0.2011254119873047, 0.20132041931152345, 0.20258543395996093, 0.20038633728027344, 0.20092022705078125, 0.20198275756835937, 0.20084483337402342, 0.20097386169433593]",tokens/s,1272.2151752622374,kWh,5.900931269333342e-06,6.507662158810033e-07,3.933125368720259e-06,1.0484822853934602e-05,tokens/kWh,24416244.658242535,MB,5062.479872,7643.987968,0.0,7226.785792,6917.4016,s,10,19.582733764648438,1.958273376464844,0.02183788982488931,1.9564053955078125,1.9850491088867186,1.9903446838378906,1.9945811437988281,"[1.92741796875, 1.9416356201171876, 1.9384376220703126, 1.936445068359375, 1.9584119873046875, 1.9543988037109374, 1.983872314453125, 1.9956402587890625, 1.9820599365234375, 1.9644141845703125]",tokens/s,32.17119772813855,kWh,5.622335264525129e-05,6.201267903624188e-06,3.7160018616878563e-05,9.958463916575402e-05,tokens/kWh,632627.687641057,,s,630,19.579840072631836,0.03107911122639974,0.0007084460346753492,0.031024640083312988,0.03181113929748535,0.03200096435546875,0.032820097312927246,"[0.031295999526977536, 0.03064473533630371, 0.030342527389526367, 0.03030672073364258, 0.030359615325927736, 0.030328128814697267, 0.030259904861450197, 0.030259199142456054, 0.030373344421386717, 0.030900768280029297, 0.030535680770874023, 0.03041279983520508, 0.030271488189697264, 0.030484479904174806, 0.030265344619750976, 0.030273536682128906, 0.030321983337402342, 0.030414527893066406, 0.030537919998168947, 0.030482271194458007, 0.030521440505981445, 0.030699392318725587, 0.03036467170715332, 0.03034316825866699, 0.03056169509887695, 0.030266111373901367, 0.030356607437133788, 0.030251039505004882, 0.030630592346191407, 0.03155763244628906, 0.030877344131469725, 0.03136739158630371, 0.032847232818603515, 0.03070342445373535, 0.03068560028076172, 0.03046028709411621, 0.03051644706726074, 0.030753728866577148, 0.03057868766784668, 0.03055411148071289, 0.030582048416137694, 0.030397151947021483, 0.030441471099853516, 0.030631168365478516, 0.030535839080810548, 0.03054355239868164, 0.03041702461242676, 0.03036649513244629, 0.030369792938232422, 0.03047603225708008, 0.030771360397338868, 0.03052060890197754, 0.030612607955932618, 0.030951040267944336, 0.030668224334716797, 0.030501312255859375, 0.03067103958129883, 0.030707712173461913, 0.030455808639526367, 0.030468095779418947, 0.03094223976135254, 0.03068003273010254, 0.030486528396606444, 0.03146169662475586, 0.030797088623046875, 0.030644800186157225, 0.030489919662475586, 0.030522016525268553, 0.030382144927978517, 0.03049456024169922, 0.030545408248901368, 0.030421056747436524, 0.030468671798706055, 0.030765056610107422, 0.03079987144470215, 0.030881696701049805, 0.030933088302612304, 0.030842880249023437, 0.03080396842956543, 0.030893280029296876, 0.030690080642700197, 0.03075065612792969, 0.03058633613586426, 0.030779903411865234, 0.030482528686523437, 0.030552000045776368, 0.030399648666381836, 0.030585760116577147, 0.030426368713378907, 0.030746656417846678, 0.0302271671295166, 0.030244415283203124, 0.030304704666137695, 0.03042508888244629, 0.030328832626342773, 0.03050044822692871, 0.03047430419921875, 0.03054739189147949, 0.030847871780395508, 0.031014368057250975, 0.03096633529663086, 0.03093280029296875, 0.030965951919555663, 0.031053823471069338, 0.031171680450439453, 0.031089216232299804, 0.031082752227783204, 0.03093948745727539, 0.030899648666381837, 0.031053632736206056, 0.031164255142211914, 0.031223840713500976, 0.030935680389404297, 0.030910463333129884, 0.032753662109375, 0.031660032272338864, 0.030879743576049806, 0.031014720916748048, 0.030940895080566407, 0.031173088073730468, 0.030830591201782227, 0.03118284797668457, 0.030920703887939452, 0.03076300811767578, 0.03105574417114258, 0.03075676727294922, 0.03169705581665039, 0.03103059196472168, 0.031031839370727538, 0.03103740882873535, 0.034828319549560546, 0.03141222381591797, 0.03098419189453125, 0.03079510307312012, 0.030941856384277343, 0.031057920455932617, 0.030717023849487303, 0.030716127395629882, 0.030636192321777344, 0.03088150405883789, 0.031818464279174806, 0.032149055480957034, 0.030869279861450195, 0.030737152099609377, 0.030615488052368165, 0.030541887283325197, 0.030517087936401368, 0.031059455871582032, 0.030696096420288085, 0.030672224044799804, 0.03035103988647461, 0.030198751449584962, 0.030308351516723633, 0.030281280517578123, 0.03034671974182129, 0.030349727630615234, 0.030226816177368165, 0.03024505615234375, 0.030301664352416994, 0.030171520233154298, 0.030195871353149415, 0.030342880249023436, 0.03040412712097168, 0.030563072204589845, 0.03096575927734375, 0.030728191375732423, 0.030588735580444337, 0.030938880920410156, 0.030504735946655273, 0.030657184600830077, 0.03060736083984375, 0.03048758316040039, 0.03045680046081543, 0.03066783905029297, 0.03053664016723633, 0.030643295288085938, 0.030907295227050782, 0.030410751342773438, 0.030909856796264647, 0.030310720443725587, 0.030662944793701172, 0.03029631996154785, 0.030371583938598633, 0.030398624420166016, 0.030527328491210936, 0.03053919982910156, 0.030712383270263672, 0.03201180648803711, 0.030597312927246094, 0.03168316841125488, 0.03148988723754883, 0.030879104614257812, 0.031044384002685547, 0.03133369636535645, 0.03110403251647949, 0.030727840423583983, 0.03156582450866699, 0.03205295944213867, 0.03103267288208008, 0.03135548782348633, 0.031033695220947264, 0.031005792617797852, 0.030905248641967774, 0.031068159103393556, 0.03081363105773926, 0.03055059242248535, 0.03262416076660156, 0.03073004722595215, 0.03080463981628418, 0.030616928100585937, 0.030525344848632813, 0.030563072204589845, 0.03037321662902832, 0.030462080001831055, 0.030480127334594726, 0.03035830307006836, 0.030537727355957032, 0.030473663330078126, 0.03032940864562988, 0.030271488189697264, 0.030302207946777345, 0.030492671966552733, 0.03055001640319824, 0.030326431274414062, 0.03037628746032715, 0.030402559280395508, 0.0307509765625, 0.030234367370605468, 0.03057663917541504, 0.0302893123626709, 0.03017788887023926, 0.030074144363403322, 0.030597856521606445, 0.030298112869262695, 0.030248064041137696, 0.03049555206298828, 0.030572608947753908, 0.030279680252075194, 0.03038003158569336, 0.030524448394775392, 0.030441696166992188, 0.03030035209655762, 0.030146432876586915, 0.030222976684570312, 0.03047327995300293, 0.03091961669921875, 0.030924863815307617, 0.03096985626220703, 0.031268863677978515, 0.031018688201904298, 0.031239551544189455, 0.031465696334838866, 0.031987712860107424, 0.031082080841064452, 0.03112940788269043, 0.03120128059387207, 0.03079347229003906, 0.03048854446411133, 0.030513792037963866, 0.030662399291992187, 0.031203840255737306, 0.030791807174682616, 0.03103321647644043, 0.030532863616943358, 0.0311157112121582, 0.031162687301635742, 0.03077440071105957, 0.030757759094238283, 0.030393407821655272, 0.03040662384033203, 0.03037830352783203, 0.03037843132019043, 0.0303569278717041, 0.030290719985961913, 0.030451711654663087, 0.03041257667541504, 0.030654399871826173, 0.030275232315063478, 0.03027577590942383, 0.030541471481323242, 0.03038697624206543, 0.030664703369140626, 0.030682655334472658, 0.03034307289123535, 0.03078201675415039, 0.030384031295776368, 0.030718048095703124, 0.030621376037597656, 0.030861120223999023, 0.03089254379272461, 0.03081625556945801, 0.03097599983215332, 0.030699520111083983, 0.031094783782958983, 0.03241296005249023, 0.03208793640136719, 0.031927072525024414, 0.03226835250854492, 0.031850496292114255, 0.03204915237426758, 0.031895328521728515, 0.03179542350769043, 0.03175766372680664, 0.0320148811340332, 0.0318853759765625, 0.03170515251159668, 0.03183206367492676, 0.03167430305480957, 0.03180550384521484, 0.03163750457763672, 0.03189913558959961, 0.03143935966491699, 0.03139583969116211, 0.03162112045288086, 0.03117171287536621, 0.031715232849121096, 0.03126198387145996, 0.031084928512573242, 0.03148355293273926, 0.0312512321472168, 0.03232707214355469, 0.031154783248901367, 0.03159859275817871, 0.03122115135192871, 0.031103584289550783, 0.03119276809692383, 0.031077951431274415, 0.031306495666503904, 0.031212575912475587, 0.03141526412963867, 0.03138323211669922, 0.031694944381713865, 0.03151619148254395, 0.03139574432373047, 0.03149084854125977, 0.031531007766723636, 0.031491327285766604, 0.03137366485595703, 0.0318939208984375, 0.031595903396606444, 0.03143539237976074, 0.031108928680419923, 0.03141836738586426, 0.03103968048095703, 0.03125609588623047, 0.030885408401489258, 0.031071168899536133, 0.03127228736877442, 0.03137398338317871, 0.03078348731994629, 0.030681087493896485, 0.03062131118774414, 0.030775680541992187, 0.03052505683898926, 0.031015520095825196, 0.030981664657592774, 0.03098838424682617, 0.03070787239074707, 0.030925920486450195, 0.030902624130249023, 0.030654560089111327, 0.030572704315185547, 0.030333248138427735, 0.030262720108032225, 0.030371679306030273, 0.030186208724975586, 0.03016012763977051, 0.030335168838500976, 0.030336639404296876, 0.030423999786376953, 0.03055961608886719, 0.03103603172302246, 0.030695423126220703, 0.03068262481689453, 0.030697248458862306, 0.03039664077758789, 0.030498592376708985, 0.03040643119812012, 0.031819135665893554, 0.030986879348754885, 0.03119308853149414, 0.030932992935180665, 0.030803295135498048, 0.030937759399414063, 0.030840831756591795, 0.030652416229248046, 0.030547296524047852, 0.03048431968688965, 0.030446304321289062, 0.030630111694335938, 0.03095075225830078, 0.03105846405029297, 0.03088969612121582, 0.030954784393310546, 0.030640384674072266, 0.03050729560852051, 0.030562784194946287, 0.030793727874755858, 0.030529407501220702, 0.030572832107543944, 0.03087753677368164, 0.030709760665893555, 0.030947328567504883, 0.030709728240966797, 0.030754848480224608, 0.03222719955444336, 0.03201241683959961, 0.0313375358581543, 0.03125139236450195, 0.03164800071716309, 0.03192806434631348, 0.03165711975097656, 0.031597408294677734, 0.03177881622314453, 0.03161203193664551, 0.03171017646789551, 0.03189545631408691, 0.031680160522460935, 0.031785152435302735, 0.031797407150268554, 0.031698944091796875, 0.03171536064147949, 0.031897567749023435, 0.03184556770324707, 0.03162553596496582, 0.031732000350952146, 0.031860960006713866, 0.03175984001159668, 0.03614569473266602, 0.03178863906860351, 0.03144262313842774, 0.031353567123413084, 0.03148595237731933, 0.031135744094848632, 0.03135078430175781, 0.03130316734313965, 0.03192268753051758, 0.039360511779785154, 0.031899648666381834, 0.031464864730834964, 0.03111382484436035, 0.03204009628295899, 0.031572608947753905, 0.031530559539794924, 0.03154396820068359, 0.03157926368713379, 0.0313885440826416, 0.03155052757263183, 0.03166624069213867, 0.03183500862121582, 0.03190169525146484, 0.032045055389404296, 0.031800575256347656, 0.03228863906860351, 0.03195583915710449, 0.03198134422302246, 0.03186710357666016, 0.032398880004882814, 0.03172108840942383, 0.03169571113586426, 0.03190732765197754, 0.03178137588500977, 0.031268192291259767, 0.03166060829162597, 0.03253193664550781, 0.0312669448852539, 0.03136700820922852, 0.031328672409057616, 0.034261249542236326, 0.031649791717529296, 0.031054880142211912, 0.03132028770446777, 0.03126963233947754, 0.031139839172363282, 0.031047679901123046, 0.03126873588562012, 0.031131071090698244, 0.030974048614501953, 0.03111996841430664, 0.031031295776367186, 0.031057920455932617, 0.031100927352905275, 0.03116646385192871, 0.03115945625305176, 0.03109676742553711, 0.03140441513061523, 0.031226303100585936, 0.031247583389282227, 0.0316300163269043, 0.03150457572937012, 0.031416032791137694, 0.03287187194824219, 0.03442697525024414, 0.03202646255493164, 0.03204393768310547, 0.031815200805664065, 0.03177302360534668, 0.03159401512145996, 0.03181625556945801, 0.031511999130249026, 0.031433311462402344, 0.032220191955566406, 0.031534048080444334, 0.03150543975830078, 0.031819328308105466, 0.031054176330566407, 0.03113577651977539, 0.03144313621520996, 0.031245344161987303, 0.03104630470275879, 0.031238048553466798, 0.03107254409790039, 0.03130339241027832, 0.03142070388793945, 0.03160268783569336, 0.03127619171142578, 0.031155040740966797, 0.031168256759643555, 0.03117487907409668, 0.031153215408325195, 0.03130672073364258, 0.03128934478759766, 0.031455039978027344, 0.031496383666992187, 0.03161692810058594, 0.03158569526672363, 0.03189369583129883, 0.031758848190307616, 0.031696319580078125, 0.031855167388916014, 0.03179520034790039, 0.03178512001037598, 0.03173155212402344, 0.03266489410400391, 0.031735744476318356, 0.03166080093383789, 0.03181068801879883, 0.031978336334228516, 0.03152694320678711, 0.03140403175354004, 0.03148185539245606, 0.03156604766845703, 0.031399711608886716, 0.031270912170410156, 0.03145107269287109, 0.031590463638305664, 0.03143270492553711, 0.03142022323608398, 0.031303199768066406, 0.031294111251831055, 0.0316824951171875, 0.03156505584716797, 0.03142073631286621, 0.03136063957214356, 0.031142528533935548, 0.03149372863769531, 0.031324832916259766, 0.031107072830200196, 0.03231439971923828, 0.031140735626220703, 0.03104572868347168, 0.031121408462524414, 0.031088640213012695, 0.031071359634399415, 0.03122265625, 0.03153715133666992, 0.031514623641967776, 0.03217203140258789, 0.03169926452636719, 0.0316183032989502, 0.03146329689025879, 0.031857215881347656, 0.031936511993408204, 0.03134259223937988, 0.03120947265625, 0.03154876708984375, 0.031666847229003904, 0.03129360008239746, 0.031321952819824216, 0.031238048553466798, 0.031204639434814455, 0.031396127700805666, 0.03121331214904785, 0.031265567779541016, 0.03144195175170898, 0.03125551986694336, 0.031156223297119142, 0.03132563209533691, 0.03138611221313477, 0.03112454414367676, 0.03124083137512207, 0.03105734443664551, 0.031099359512329103, 0.0311177921295166, 0.031303680419921875, 0.03123200035095215, 0.031054079055786134, 0.03113484764099121, 0.0314968318939209, 0.03126198387145996, 0.031091424942016603, 0.03138070487976074, 0.031140640258789064, 0.031479711532592776, 0.030826847076416017, 0.03103104019165039, 0.03100057601928711, 0.030901248931884766, 0.03061619186401367, 0.030416351318359375, 0.03034204864501953, 0.030631935119628906, 0.0307640323638916, 0.03263590240478516, 0.030451711654663087, 0.030846975326538087, 0.030980064392089845, 0.030516895294189453, 0.030780927658081055, 0.0306014404296875, 0.030575071334838867, 0.030860607147216796, 0.03128790473937988, 0.03108246421813965, 0.031115583419799805, 0.03101286315917969, 0.030971519470214842, 0.031474048614501954, 0.031006816864013673, 0.0311724796295166]",tokens/s,32.1759522888339,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 351673 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 354653 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3162.947584,4431.151104,0.0,4028.628992,3944.723968,s,1,10.3563037109375,10.3563037109375,0.0,10.3563037109375,10.3563037109375,10.3563037109375,10.3563037109375,[10.3563037109375],,kWh,9.635384677909921e-05,1.0621268716066966e-05,2.9896412806013073e-05,0.00013687152830117925,,MB,2949.7344,4770.889728,0.0,4353.687552,4305.05728,s,10,1.151768440246582,0.11517684402465819,0.001460502679056841,0.11562615966796876,0.11584229583740234,0.11585278091430665,0.11586116897583008,"[0.11551551818847657, 0.11560066986083985, 0.1158399658203125, 0.11566355133056641, 0.1156400604248047, 0.11081043243408203, 0.11586326599121094, 0.11561225891113282, 0.11546774291992187, 0.11575497436523438]",tokens/s,2222.6689936493917,kWh,3.4484402547555047e-06,3.7996639105771827e-07,2.2964397456467565e-06,6.12484639145998e-06,tokens/kWh,41796966.59118618,MB,2951.626752,4770.889728,0.0,4353.687552,4305.05984,s,10,26.14082861328125,2.614082861328125,0.011914775413420791,2.610779541015625,2.6306078857421875,2.634568347167969,2.637736716308594,"[2.611528564453125, 2.6104423828125, 2.59789453125, 2.61111669921875, 2.63852880859375, 2.629727783203125, 2.6220380859375, 2.608635498046875, 2.600720947265625, 2.6101953125]",tokens/s,24.10023068970043,kWh,7.610696786274925e-05,8.394867219130538e-06,3.956487152115584e-05,0.00012406670660303562,tokens/kWh,507791.34648568596,,s,630,26.13824554824831,0.04148927864801316,0.0006184958777199364,0.041419855117797846,0.04189420585632324,0.04236502590179443,0.04382623149871827,"[0.04195667266845703, 0.04202963256835938, 0.041267200469970705, 0.04137078475952148, 0.0412660140991211, 0.042557086944580075, 0.04126927947998047, 0.04149484634399414, 0.041197856903076174, 0.04129148864746094, 0.04126617431640625, 0.041118881225585935, 0.041162464141845705, 0.04125699234008789, 0.04129391860961914, 0.04084086227416992, 0.041008670806884764, 0.04100566482543945, 0.041231998443603514, 0.04145180892944336, 0.0422935676574707, 0.044232704162597655, 0.04164931106567383, 0.04157891082763672, 0.041103233337402345, 0.041818687438964844, 0.04180748748779297, 0.04140428924560547, 0.04101603317260742, 0.04107651138305664, 0.0413941764831543, 0.04100710296630859, 0.04099407958984375, 0.04083375930786133, 0.04077151870727539, 0.041072704315185546, 0.040858848571777344, 0.04132534408569336, 0.041379711151123044, 0.04089251327514649, 0.040884254455566406, 0.040855457305908206, 0.041457664489746096, 0.04174800109863281, 0.041439712524414064, 0.046988800048828126, 0.04187811279296875, 0.041504032135009764, 0.04126793670654297, 0.04129740905761719, 0.041056766510009765, 0.041002399444580076, 0.041101921081542966, 0.041560062408447264, 0.04127651214599609, 0.041304832458496095, 0.04122639846801758, 0.04103372955322265, 0.04098428726196289, 0.041533729553222654, 0.04110559844970703, 0.041401248931884765, 0.04152028656005859, 0.04128121566772461, 0.041234752655029294, 0.04130799865722656, 0.041205921173095704, 0.04147814559936523, 0.04184896087646484, 0.041495487213134764, 0.04142176055908203, 0.0414865608215332, 0.04095772933959961, 0.04122608184814453, 0.04103798294067383, 0.041267425537109374, 0.041757568359375, 0.04132547378540039, 0.04164108657836914, 0.04168918228149414, 0.04189263916015625, 0.041783294677734374, 0.04181388854980469, 0.04174860763549805, 0.04185414505004883, 0.04139705657958984, 0.041094432830810546, 0.04138873672485351, 0.04115459060668945, 0.042151710510253904, 0.04319168090820313, 0.041517345428466794, 0.04183436965942383, 0.0419947509765625, 0.04145980834960938, 0.04141884613037109, 0.0413040657043457, 0.04101324844360352, 0.04084121704101563, 0.04096409606933594, 0.04149248123168945, 0.041014270782470705, 0.04082995223999023, 0.04078182220458984, 0.04091494369506836, 0.04139606475830078, 0.04163190460205078, 0.04177651214599609, 0.041849472045898437, 0.04139212799072266, 0.04104121780395508, 0.04083520126342773, 0.04112831878662109, 0.0414455680847168, 0.0430772476196289, 0.04150416183471679, 0.041157600402832034, 0.041226112365722656, 0.04152127838134766, 0.0418240966796875, 0.04128492736816406, 0.04090351867675781, 0.04076755142211914, 0.04133881759643555, 0.04142489624023438, 0.04114022445678711, 0.04160086441040039, 0.04104540634155274, 0.040891136169433594, 0.041119487762451175, 0.04092684936523437, 0.04109888076782227, 0.040941566467285154, 0.041073375701904294, 0.0413350715637207, 0.04105625534057617, 0.04153139114379883, 0.04155091094970703, 0.04127840042114258, 0.041267070770263675, 0.041181121826171875, 0.041316001892089844, 0.04127340698242187, 0.04132912063598633, 0.04118735885620117, 0.04121392059326172, 0.041728000640869144, 0.04143907165527344, 0.04128729629516602, 0.04127961730957031, 0.04143779373168945, 0.04115647888183594, 0.04159481430053711, 0.0413675537109375, 0.041064640045166016, 0.04096736145019531, 0.0409381103515625, 0.04096614456176758, 0.04293222427368164, 0.04140982437133789, 0.04238016128540039, 0.04139596939086914, 0.04135523223876953, 0.04134105682373047, 0.041232383728027344, 0.041529342651367186, 0.04123648071289063, 0.04115251159667969, 0.04118527984619141, 0.04100710296630859, 0.041240577697753904, 0.04130524826049805, 0.0412369613647461, 0.04092892837524414, 0.040813247680664064, 0.04087811279296875, 0.040736766815185545, 0.04090265655517578, 0.04134092712402344, 0.04091648101806641, 0.04093094253540039, 0.04081139373779297, 0.04093132781982422, 0.040681472778320314, 0.04148652648925781, 0.041312065124511715, 0.04105011367797851, 0.04110732650756836, 0.0414474868774414, 0.04245888137817383, 0.04284988784790039, 0.041732769012451175, 0.04717500686645508, 0.04209040069580078, 0.041661151885986326, 0.04148358535766602, 0.04119209671020508, 0.04107475280761719, 0.041342815399169924, 0.041418910980224606, 0.04107193756103516, 0.041022144317626956, 0.04152051162719726, 0.041468544006347655, 0.041390079498291016, 0.041826305389404295, 0.041436607360839844, 0.04120025634765625, 0.041271232604980466, 0.04088835144042969, 0.04126857757568359, 0.04126537704467773, 0.04102595138549805, 0.04132863998413086, 0.041644031524658204, 0.04127743911743164, 0.041265151977539063, 0.04096819305419922, 0.04093286514282227, 0.04085785675048828, 0.04082412719726562, 0.04081760025024414, 0.040818367004394535, 0.04082454299926758, 0.04116540908813476, 0.04097433471679687, 0.041035423278808596, 0.040829280853271484, 0.04089206314086914, 0.04097196960449219, 0.04098729705810547, 0.04107468795776367, 0.04119337463378906, 0.04117308807373047, 0.04121331024169922, 0.04113398361206055, 0.04108771133422852, 0.0409535026550293, 0.04130031967163086, 0.041207809448242184, 0.04159078216552734, 0.041766910552978515, 0.04173183822631836, 0.04177123260498047, 0.04184681701660156, 0.041715614318847655, 0.04176700973510742, 0.04167411041259766, 0.041874046325683596, 0.04177305603027344, 0.04168473434448242, 0.0417446403503418, 0.04325580978393555, 0.04167270278930664, 0.04229526519775391, 0.04178083038330078, 0.042840511322021484, 0.04359065628051758, 0.04166329574584961, 0.04171583938598633, 0.04182636642456055, 0.04230144119262695, 0.04182015991210938, 0.04172390365600586, 0.04234652709960938, 0.041770977020263673, 0.04189532852172852, 0.04167283248901367, 0.04177897644042969, 0.04202550506591797, 0.04169334411621094, 0.0417628173828125, 0.042237918853759764, 0.04176899337768555, 0.041578495025634765, 0.042266849517822266, 0.04587046432495117, 0.04177961730957031, 0.04220723342895508, 0.04155187225341797, 0.04184822463989258, 0.04149308776855469, 0.041582561492919924, 0.04239567947387695, 0.04180377578735352, 0.04188774490356445, 0.04262092971801758, 0.0416255989074707, 0.0417259521484375, 0.04148223876953125, 0.04351379013061524, 0.04166662216186524, 0.041404415130615234, 0.04155958557128906, 0.041412639617919925, 0.041320030212402346, 0.04124553680419922, 0.04131164932250977, 0.0417163200378418, 0.041875137329101565, 0.04139235305786133, 0.04144342422485352, 0.04141875076293945, 0.04134297561645508, 0.04148771286010742, 0.04122000122070312, 0.04111027145385742, 0.04134092712402344, 0.04151289749145508, 0.041412033081054685, 0.04159961700439453, 0.04177305603027344, 0.04169113540649414, 0.04173971176147461, 0.04158067321777344, 0.0421558723449707, 0.04183260726928711, 0.041621505737304686, 0.0415939826965332, 0.042621822357177735, 0.04240588760375977, 0.041788894653320315, 0.04158428955078125, 0.04176780700683594, 0.0413941764831543, 0.04171891021728515, 0.041702110290527346, 0.041968894958496095, 0.041745311737060545, 0.04151433563232422, 0.04140099334716797, 0.04169318389892578, 0.04173619079589844, 0.04228643035888672, 0.04189977645874023, 0.04183516693115234, 0.04171596908569336, 0.04177612686157227, 0.04172902297973633, 0.04173004913330078, 0.0414837760925293, 0.04160099029541016, 0.04161180877685547, 0.043265727996826174, 0.04388691329956055, 0.04172528076171875, 0.04184521484375, 0.04179167938232422, 0.041662464141845705, 0.04178700637817383, 0.041516990661621095, 0.041549697875976566, 0.04146201705932617, 0.041394111633300784, 0.041511295318603515, 0.04171980667114258, 0.04179763031005859, 0.041678848266601565, 0.04160102462768555, 0.041640960693359375, 0.041517375946044925, 0.04160550308227539, 0.041443359375, 0.04154390335083008, 0.041633857727050784, 0.04168703842163086, 0.04150207901000977, 0.041509502410888674, 0.041668159484863285, 0.04177145767211914, 0.04185059356689453, 0.04178963088989258, 0.04154377746582031, 0.041374752044677734, 0.04143155288696289, 0.041288158416748044, 0.041297824859619144, 0.041240352630615235, 0.042524833679199216, 0.04174393463134766, 0.04138848114013672, 0.04145971298217774, 0.04146176147460937, 0.04140188980102539, 0.04137622451782227, 0.04150886535644531, 0.04131804656982422, 0.04339315032958985, 0.04182777786254883, 0.041947166442871095, 0.04165708923339844, 0.04184665679931641, 0.041766944885253905, 0.04146966552734375, 0.04189408111572265, 0.04165840148925781, 0.04147011184692383, 0.0413873291015625, 0.04153414535522461, 0.04152489471435547, 0.041643585205078125, 0.041597728729248044, 0.0417259521484375, 0.0418581771850586, 0.04154025650024414, 0.04154185485839844, 0.041322494506835936, 0.0413675537109375, 0.04158399963378906, 0.04163993453979492, 0.04145420837402344, 0.041404415130615234, 0.04136470413208008, 0.041597728729248044, 0.04147609710693359, 0.04146921539306641, 0.04155839920043945, 0.041656383514404295, 0.04152348709106445, 0.04142655944824219, 0.04146342468261719, 0.041501697540283204, 0.04168220901489258, 0.04194761657714844, 0.04165206527709961, 0.04155615997314453, 0.041688190460205075, 0.041753440856933596, 0.041680736541748045, 0.041799102783203125, 0.0414502067565918, 0.04143539047241211, 0.04136067199707031, 0.041562591552734375, 0.041713600158691404, 0.04153760147094727, 0.04139199829101563, 0.041767040252685544, 0.04155206298828125, 0.04139206314086914, 0.04157632064819336, 0.04217446517944336, 0.04154508972167969, 0.04158118438720703, 0.041441280364990236, 0.04150067138671875, 0.04141670227050781, 0.04158259201049805, 0.041695232391357424, 0.041551456451416016, 0.04165264129638672, 0.041545726776123046, 0.04134089660644531, 0.04157817459106445, 0.04142524719238281, 0.04148223876953125, 0.04152896118164062, 0.041551456451416016, 0.04153731155395508, 0.04151193618774414, 0.04143308639526367, 0.04189574432373047, 0.041631935119628906, 0.04155327987670898, 0.041495166778564456, 0.04135116958618164, 0.041215999603271485, 0.04132361602783203, 0.04130902481079102, 0.04121401596069336, 0.04137472152709961, 0.04149116897583008, 0.041509151458740234, 0.041646080017089845, 0.04132863998413086, 0.041439231872558595, 0.04152524948120117, 0.041420799255371094, 0.04118527984619141, 0.04132863998413086, 0.04125900650024414, 0.0413675537109375, 0.041457313537597656, 0.042084705352783205, 0.04253696060180664, 0.041565567016601565, 0.04181670379638672, 0.04205363082885742, 0.04164198303222656, 0.041232383728027344, 0.04110707092285156, 0.04090099334716797, 0.04080230331420898, 0.04088627243041992, 0.040864990234375, 0.0407949104309082, 0.04093952178955078, 0.04098867034912109, 0.04085321426391601, 0.040828224182128905, 0.04123235321044922, 0.04130099105834961, 0.04086191940307617, 0.04070345687866211, 0.04162358474731445, 0.041076736450195314, 0.043443359375, 0.04140323257446289, 0.041076736450195314, 0.04140828704833984, 0.041326305389404294, 0.04130867385864258, 0.041479969024658205, 0.04140662384033203, 0.04117715072631836, 0.04106854248046875, 0.04105625534057617, 0.041060352325439455, 0.04103577423095703, 0.04108492660522461, 0.041025535583496094, 0.040976383209228515, 0.04091670227050781, 0.0415332145690918, 0.04091107177734375, 0.04096387100219727, 0.041337345123291014, 0.04109900665283203, 0.04262732696533203, 0.041398273468017575, 0.04101103973388672, 0.04135913467407227, 0.041183361053466795, 0.041453823089599606, 0.04111123275756836, 0.041100704193115234, 0.04078192138671875, 0.040775615692138674, 0.040771617889404296, 0.04081734466552735, 0.0413059196472168, 0.04179180908203125, 0.04178716659545898, 0.04197196960449219, 0.04163987350463867, 0.04173625564575195, 0.04138598251342773, 0.04157030487060547, 0.04190947341918945, 0.041543872833251956, 0.04121782302856445, 0.04128031921386719, 0.04367766571044922, 0.04148227310180664, 0.04111974334716797, 0.04088623809814453, 0.04075113677978515, 0.04100492858886719, 0.041381759643554686, 0.041197822570800784, 0.04097558212280274, 0.0407949104309082, 0.040521728515625, 0.040538047790527346, 0.04066019058227539, 0.04064716720581055, 0.04050495910644531, 0.04201232147216797, 0.0413842887878418, 0.04179558563232422, 0.041797439575195314, 0.04211321640014649, 0.04227859115600586, 0.043055423736572264, 0.04301824188232422, 0.04143692779541015, 0.041707775115966794, 0.04136140823364258, 0.04100259017944336, 0.04166902542114258, 0.04116656112670898, 0.04101763153076172, 0.04076512145996094, 0.04118355178833008, 0.041248767852783204, 0.04183555221557617, 0.042054271697998045, 0.041816062927246093, 0.04214204788208008, 0.041681919097900394, 0.041247455596923825, 0.04117942428588867, 0.04102143859863281, 0.041201663970947267, 0.04103577423095703, 0.04122592163085938, 0.04132675170898437, 0.040900768280029295, 0.041088863372802736, 0.04082089614868164, 0.04102089691162109, 0.04123654556274414, 0.04095024108886719, 0.04426259231567383, 0.04100998306274414, 0.040771583557128906, 0.04094889450073242, 0.04098748779296875, 0.041100353240966794, 0.04105926513671875, 0.04122320175170899, 0.04106953430175781, 0.041166847229003906, 0.04106752014160156, 0.04101011276245117, 0.0409431037902832, 0.04098515319824219, 0.04100505447387695, 0.040771488189697266, 0.04126115036010742, 0.04394803237915039, 0.041422847747802735, 0.04249948883056641, 0.040849567413330075, 0.040632030487060544, 0.04167478561401367, 0.041267200469970705, 0.04157884979248047, 0.04094153594970703, 0.040683841705322264]",tokens/s,24.102612351586114,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8221.282304,11251.089408,0.0,10848.567296,10616.027648,s,1,14.7554013671875,14.7554013671875,0.0,14.7554013671875,14.7554013671875,14.7554013671875,14.7554013671875,[14.7554013671875],,kWh,0.0002209302226541543,2.436287976282825e-05,6.9959778189993e-05,0.00031525288060697555,,MB,4064.768,11683.10272,0.0,11265.900544,11070.470656,s,10,3.879580230712891,0.38795802307128907,0.0011890479509772249,0.3883685150146484,0.3889663269042969,0.38932593688964845,0.3896136248779297,"[0.38569003295898435, 0.3864775695800781, 0.38696759033203126, 0.3882878112792969, 0.3875245361328125, 0.38844921875, 0.3887919921875, 0.389685546875, 0.38888641357421877, 0.38881951904296874]",tokens/s,659.8652039036677,kWh,1.137961524022779e-05,1.254076140663074e-06,7.593948382845729e-06,2.0227639763736594e-05,tokens/kWh,12655950.125182072,MB,4069.093376,11685.199872,0.0,11267.997696,11070.473216,s,10,29.413074218749998,2.941307421875,0.003709578984283107,2.941876342773438,2.9450629150390624,2.945871179199219,2.9465177905273436,"[2.933745849609375, 2.940679443359375, 2.93713037109375, 2.93880419921875, 2.944797119140625, 2.942601806640625, 2.942174560546875, 2.941578125, 2.94488330078125, 2.946679443359375]",tokens/s,21.41904635042851,kWh,8.608231636060668e-05,9.495982635104732e-06,5.7154992305154565e-05,0.000152733291300866,tokens/kWh,412483.7451181332,,s,630,29.4103555870056,0.04668310410635813,0.0005255910670798737,0.0466583194732666,0.04706921768188477,0.04724175472259521,0.048495202293395996,"[0.04829600143432617, 0.046514175415039063, 0.046229343414306644, 0.04603305435180664, 0.04623462295532227, 0.04603740692138672, 0.045771358489990234, 0.04579328155517578, 0.046129150390625, 0.04637843322753906, 0.04619244766235352, 0.046760704040527346, 0.04653862380981445, 0.046192798614501956, 0.046122974395751956, 0.04616191864013672, 0.04629449462890625, 0.046120830535888675, 0.04599465560913086, 0.0466431999206543, 0.04670803070068359, 0.04671939086914063, 0.04651651382446289, 0.04636467361450195, 0.04610047912597656, 0.04639744186401367, 0.046473217010498044, 0.046069759368896485, 0.046378273010253906, 0.04660092926025391, 0.046476894378662106, 0.0466981430053711, 0.046932735443115235, 0.04674150466918945, 0.04641334533691406, 0.04662255859375, 0.04647942352294922, 0.046137920379638674, 0.04638105773925781, 0.04660019302368164, 0.046530208587646484, 0.04653910446166992, 0.04682547378540039, 0.046884864807128904, 0.04677632141113281, 0.046820449829101565, 0.04669647979736328, 0.04642873764038086, 0.046897377014160156, 0.04684502410888672, 0.04650086212158203, 0.046721023559570314, 0.04682137680053711, 0.046698497772216796, 0.04672512054443359, 0.046925121307373044, 0.04910559844970703, 0.04643840026855469, 0.046671520233154296, 0.04653705596923828, 0.046800224304199216, 0.047094337463378905, 0.0469095344543457, 0.04945030212402344, 0.046708927154541016, 0.04609891128540039, 0.04603715133666992, 0.04636876678466797, 0.04570486450195312, 0.05181475067138672, 0.04499987030029297, 0.04573430252075195, 0.04624832153320312, 0.04659817504882813, 0.046029983520507814, 0.046076286315917966, 0.04653641510009766, 0.046285377502441404, 0.04629318237304687, 0.046773887634277346, 0.04653683090209961, 0.046168319702148436, 0.04622454452514648, 0.046793567657470704, 0.04655923080444336, 0.046276256561279296, 0.046592353820800785, 0.046745246887207034, 0.04664297485351562, 0.0469318733215332, 0.04649027252197266, 0.04614963150024414, 0.04642406463623047, 0.04652646255493164, 0.04603696060180664, 0.04611689758300781, 0.046884864807128904, 0.04630697631835937, 0.046428352355957034, 0.046778144836425783, 0.046965118408203124, 0.046542686462402345, 0.046536865234375, 0.046704254150390624, 0.04649203109741211, 0.046714366912841795, 0.04700739288330078, 0.046728031158447265, 0.04670217514038086, 0.04725372695922852, 0.046953983306884765, 0.04758393478393555, 0.04649574279785156, 0.046561279296875, 0.0466165771484375, 0.04693401718139648, 0.04668390274047852, 0.04667391967773438, 0.0469153938293457, 0.046815681457519534, 0.04658995056152344, 0.04704460906982422, 0.04695040130615234, 0.04686643218994141, 0.046833568572998044, 0.04683990478515625, 0.04942233657836914, 0.04676198577880859, 0.04587519836425781, 0.04580556869506836, 0.04603871917724609, 0.04590169525146484, 0.04571590423583984, 0.04631955337524414, 0.046359840393066405, 0.046113601684570314, 0.04618972778320313, 0.04638803100585937, 0.046317569732666014, 0.04597488021850586, 0.04650380706787109, 0.04635084915161133, 0.04623750305175781, 0.04629497528076172, 0.04658771133422852, 0.046461471557617186, 0.04665676879882812, 0.04743008041381836, 0.04673129653930664, 0.04617855834960938, 0.04678255844116211, 0.04662239837646484, 0.046219711303710935, 0.04641177749633789, 0.04650608062744141, 0.04638505554199219, 0.04642816162109375, 0.04669007873535156, 0.04639059066772461, 0.046488544464111325, 0.04674460983276367, 0.04697100830078125, 0.04639004898071289, 0.04655513763427734, 0.046575489044189455, 0.04659622573852539, 0.04673126220703125, 0.04710400009155274, 0.047233024597167966, 0.04712243270874023, 0.046809215545654294, 0.046646686553955076, 0.046524513244628904, 0.04679894256591797, 0.04665727996826172, 0.04689891052246094, 0.04712019348144531, 0.04648819351196289, 0.04659260940551758, 0.04669827270507813, 0.04675318527221679, 0.046672481536865235, 0.046841312408447265, 0.04689974212646485, 0.046729217529296874, 0.04729596710205078, 0.046878719329833986, 0.0469398078918457, 0.04704886245727539, 0.048091583251953125, 0.04640473556518555, 0.046363521575927734, 0.04606771087646484, 0.04585007858276367, 0.04629353713989258, 0.04628585433959961, 0.04609737777709961, 0.04614947128295899, 0.04623756790161133, 0.04603548812866211, 0.046257568359375, 0.046852447509765624, 0.04653875350952148, 0.04605132675170898, 0.04638719940185547, 0.04664934539794922, 0.046114303588867187, 0.04650390243530273, 0.04653110504150391, 0.04660838317871094, 0.04658380889892578, 0.046894367218017576, 0.04673353576660156, 0.04650774383544922, 0.04660713577270508, 0.04670444869995117, 0.04628704071044922, 0.04620431900024414, 0.04658646392822265, 0.04629417419433594, 0.04644905471801758, 0.04682387161254883, 0.046772224426269535, 0.0467432975769043, 0.046784767150878905, 0.04690124893188476, 0.046514175415039063, 0.04650131225585938, 0.046699073791503905, 0.04652032089233398, 0.046827518463134765, 0.04708508682250977, 0.046928352355957034, 0.04696473693847656, 0.04702207946777344, 0.04698432159423828, 0.04692671966552735, 0.04689920043945312, 0.04646912002563477, 0.046680065155029295, 0.04673126220703125, 0.046746910095214846, 0.04665212631225586, 0.047065086364746093, 0.046827518463134765, 0.047006942749023437, 0.047121185302734375, 0.0471176643371582, 0.04703913497924805, 0.04687036895751953, 0.0468966064453125, 0.04718422317504883, 0.04805686569213867, 0.046181598663330076, 0.046041057586669924, 0.0462564811706543, 0.04610496139526367, 0.04617020797729492, 0.04639539337158203, 0.047232894897460936, 0.04614771270751953, 0.046331455230712894, 0.046322113037109376, 0.04604108810424805, 0.04619417572021484, 0.04626483154296875, 0.04629462432861328, 0.0468504638671875, 0.046657470703125, 0.046569534301757816, 0.0465797119140625, 0.04675958251953125, 0.04655129623413086, 0.04619068908691406, 0.05128963088989258, 0.04571798324584961, 0.046427520751953125, 0.04671142578125, 0.046682113647460936, 0.04661840057373047, 0.046696670532226564, 0.04678451156616211, 0.04664710235595703, 0.046631103515625, 0.04646092987060547, 0.04645280075073242, 0.04637664031982422, 0.04681548690795898, 0.04649574279785156, 0.04681727981567383, 0.046943519592285154, 0.046758079528808595, 0.046836158752441404, 0.046929054260253907, 0.04675884628295898, 0.04671855926513672, 0.04821238327026367, 0.046698497772216796, 0.046413822174072264, 0.046650623321533205, 0.04716550445556641, 0.046610977172851564, 0.046934177398681644, 0.046968223571777344, 0.04665200042724609, 0.04673072052001953, 0.046995998382568356, 0.046626304626464846, 0.04695644760131836, 0.04686211013793945, 0.046771007537841795, 0.047065086364746093, 0.04719804763793945, 0.047115550994873044, 0.047137569427490235, 0.04835184097290039, 0.04641286468505859, 0.046349246978759764, 0.046080001831054686, 0.045856769561767576, 0.046358528137207033, 0.04624492645263672, 0.046140159606933594, 0.04638329696655273, 0.04625408172607422, 0.04628591918945312, 0.046443233489990236, 0.04643859100341797, 0.046369953155517576, 0.04664713668823242, 0.046430206298828124, 0.0464967041015625, 0.04662025451660156, 0.04647164916992187, 0.04671692657470703, 0.046651103973388675, 0.04656771087646484, 0.046607967376708984, 0.04638351821899414, 0.04640768051147461, 0.04667801666259765, 0.0468131217956543, 0.046470623016357425, 0.046700958251953126, 0.04656515121459961, 0.04626668930053711, 0.04656547164916992, 0.046811134338378906, 0.046460769653320313, 0.04639350509643555, 0.04671599960327148, 0.04675267028808594, 0.04673331069946289, 0.046999233245849606, 0.04676217651367188, 0.048410751342773437, 0.04656947326660156, 0.04686438369750977, 0.04684185409545898, 0.046704639434814454, 0.04702544021606445, 0.04688275146484375, 0.04683769607543945, 0.046965599060058594, 0.04697907257080078, 0.046843902587890625, 0.046751552581787106, 0.04665116882324219, 0.04695024108886719, 0.046774463653564455, 0.046258560180664064, 0.04680704116821289, 0.04688000106811523, 0.04687744140625, 0.04700364685058594, 0.04717148971557617, 0.04712403106689453, 0.04849692916870117, 0.04849097442626953, 0.04662435150146484, 0.046093921661376956, 0.04587177658081055, 0.04598515319824219, 0.04611663818359375, 0.04631232070922851, 0.046096511840820316, 0.04637286376953125, 0.04629094314575195, 0.04627865600585938, 0.0463419189453125, 0.047530208587646484, 0.046159870147705076, 0.04601036834716797, 0.04645862579345703, 0.046573825836181644, 0.046647071838378906, 0.0467355842590332, 0.04678451156616211, 0.04673843383789063, 0.04711244964599609, 0.04705104064941406, 0.04659791946411133, 0.046557888031005856, 0.04648550415039063, 0.04600627136230469, 0.04639641571044922, 0.046601215362548826, 0.04674950408935547, 0.04667724609375, 0.04648236846923828, 0.04693196868896484, 0.04674150466918945, 0.04664476776123047, 0.04692793655395508, 0.046110462188720704, 0.04660086441040039, 0.04693715286254883, 0.046787521362304685, 0.04681011199951172, 0.04710092926025391, 0.047058944702148435, 0.04697087860107422, 0.04683161544799805, 0.04703615951538086, 0.04656083297729492, 0.046871231079101565, 0.04668758392333985, 0.04655904006958008, 0.0468939208984375, 0.04678425598144531, 0.046661888122558594, 0.04700774383544922, 0.046929920196533206, 0.04675993728637695, 0.04675337600708008, 0.047497631072998044, 0.0468109130859375, 0.0470786247253418, 0.04710707092285156, 0.047168544769287106, 0.04705174255371094, 0.04799676895141602, 0.04636483383178711, 0.04616556930541992, 0.04591865539550781, 0.04605094528198242, 0.0463691520690918, 0.04638006210327148, 0.04674012756347656, 0.04618703842163086, 0.04644540786743164, 0.04647417449951172, 0.046411361694335934, 0.04647756958007813, 0.04626243209838867, 0.046279903411865234, 0.046647777557373045, 0.04662713623046875, 0.04635027313232422, 0.046537952423095705, 0.0465437126159668, 0.04663075256347656, 0.0465838737487793, 0.04677846527099609, 0.04639888000488281, 0.046441055297851565, 0.046659168243408204, 0.04667843246459961, 0.04629232025146485, 0.04635919952392578, 0.046561279296875, 0.046488670349121096, 0.04634307098388672, 0.04669615936279297, 0.04686249542236328, 0.04675801467895508, 0.04698112106323242, 0.04680214309692383, 0.046717121124267576, 0.04682198333740235, 0.04677571105957031, 0.046885471343994144, 0.04725350570678711, 0.04699504089355469, 0.046858657836914064, 0.04695449447631836, 0.04684799957275391, 0.04657766342163086, 0.04682547378540039, 0.04690118408203125, 0.046732990264892575, 0.04668460845947266, 0.04693119812011719, 0.04671900939941406, 0.046639328002929685, 0.04693446350097656, 0.04675174331665039, 0.04699545669555664, 0.04698505783081055, 0.04686249542236328, 0.047075294494628904, 0.04726732635498047, 0.047298080444335935, 0.04749414443969727, 0.0482529296875, 0.04642201614379883, 0.04611638259887695, 0.04609276962280273, 0.04594483184814453, 0.04662886428833008, 0.046179454803466795, 0.0458474235534668, 0.04632166290283203, 0.046929920196533206, 0.04665139389038086, 0.04672512054443359, 0.046635009765625, 0.046429759979248045, 0.04653919982910156, 0.04658713531494141, 0.0464719352722168, 0.046243839263916016, 0.04653827285766601, 0.046872638702392576, 0.04671321487426758, 0.04706467056274414, 0.0467685775756836, 0.04639334487915039, 0.046357601165771485, 0.04656793594360351, 0.046504352569580076, 0.046480960845947265, 0.046825408935546875, 0.04690995025634766, 0.04672249603271485, 0.04676051330566406, 0.04689481735229492, 0.04661423873901367, 0.046688480377197264, 0.04681763076782226, 0.046478431701660154, 0.046494529724121096, 0.04681299209594727, 0.047057022094726564, 0.04720655822753906, 0.04724889755249023, 0.04686899185180664, 0.04693302536010742, 0.046817310333251955, 0.04666054534912109, 0.04656947326660156, 0.04689673614501953, 0.046594017028808596, 0.04667596817016602, 0.046987617492675784, 0.04716553497314453, 0.04718307113647461, 0.04712323379516602, 0.04698051071166992, 0.0469692497253418, 0.04698336029052735, 0.046931072235107424, 0.04698406219482422, 0.04711199951171875, 0.04695391845703125, 0.047128894805908206, 0.04729651260375976, 0.049423583984375, 0.046863136291503904, 0.0461143684387207, 0.04599443054199219, 0.04634979248046875, 0.046217025756835936, 0.0458221435546875, 0.04631196975708008, 0.046534656524658206, 0.04654883193969726, 0.046543006896972654, 0.046508033752441405, 0.046446273803710934, 0.04648505783081055, 0.04655795288085938, 0.046233505249023435, 0.046336063385009764, 0.04646710586547852, 0.046548065185546876, 0.046535457611083984, 0.046983295440673825, 0.04687577438354492, 0.04660831832885742, 0.0465847053527832, 0.04685120010375977, 0.04669945526123047, 0.046704063415527346, 0.04653878402709961, 0.046714622497558596, 0.04644124984741211, 0.046745376586914064, 0.04666799926757813, 0.04632073593139648, 0.04641788864135742, 0.04682841491699219, 0.04682150268554688, 0.04680409622192383, 0.0468078727722168, 0.04660224151611328, 0.04668822479248047, 0.04706854248046875, 0.04689168167114258, 0.04682342529296875, 0.046884159088134765, 0.046905792236328125, 0.04687283325195313, 0.04692172622680664, 0.04688016128540039, 0.04698992156982422, 0.046855327606201175, 0.04713558578491211, 0.04704051208496094, 0.04698316955566406, 0.04688876724243164, 0.04687686538696289, 0.04705043029785156, 0.04703263854980469, 0.047527935028076174, 0.04738396835327149, 0.04708822250366211, 0.047167198181152344, 0.04733161544799805, 0.04726784133911133]",tokens/s,21.421026282264773,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1843.441664,2768.109568,0.0,2365.587456,2314.318336,s,1,9.0314384765625,9.0314384765625,0.0,9.0314384765625,9.0314384765625,9.0314384765625,9.0314384765625,[9.0314384765625],,kWh,5.576951102924189e-05,6.143028031684534e-06,1.747556953599072e-05,7.938810859691714e-05,,MB,1802.203136,3099.459584,0.0,2682.257408,2607.60832,s,10,0.5094228477478027,0.050942284774780265,0.000164140763643313,0.0509212646484375,0.05103930244445801,0.05121025829315186,0.051347022972106936,"[0.0513812141418457, 0.050778465270996095, 0.050880321502685545, 0.05085750579833984, 0.051001312255859375, 0.05083763122558594, 0.05079107284545899, 0.05096323013305664, 0.05096988677978516, 0.05096220779418945]",tokens/s,5025.294823971786,kWh,1.5732816776659665e-06,1.7349392900307585e-07,1.0388986805806188e-06,2.785674287249661e-06,tokens/kWh,91898755.41865762,MB,1802.203136,3099.459584,0.0,2682.257408,2607.61088,s,10,16.15611767578125,1.615611767578125,0.011111971023395102,1.6170904541015627,1.62726630859375,1.6286459716796875,1.6297497021484375,"[1.610062255859375, 1.5920372314453124, 1.6093472900390624, 1.6120970458984376, 1.6062078857421875, 1.626959716796875, 1.630025634765625, 1.6220838623046876, 1.6231119384765624, 1.624184814453125]",tokens/s,38.99451666809771,kWh,4.756923224399482e-05,5.246551573431485e-06,2.2803148170818837e-05,7.561893198824516e-05,tokens/kWh,833124.6996425876,,s,630,16.15389964866638,0.025641110553438703,0.0006638667839047792,0.025539455413818357,0.025951209449768066,0.02636874895095825,0.029257596092224142,"[0.025554975509643554, 0.025382911682128906, 0.025636320114135743, 0.025473567962646486, 0.025359712600708007, 0.02520319938659668, 0.025341503143310545, 0.025606399536132814, 0.02529110336303711, 0.025198591232299804, 0.02512895965576172, 0.025296096801757813, 0.025166624069213866, 0.025239551544189453, 0.025364479064941405, 0.025204736709594725, 0.025143295288085937, 0.029410688400268555, 0.025628576278686522, 0.02553932762145996, 0.025427936553955078, 0.0254334716796875, 0.025604991912841796, 0.026100608825683595, 0.028091264724731446, 0.026148832321166993, 0.02590105628967285, 0.028327968597412108, 0.025540607452392578, 0.025577472686767577, 0.025573375701904297, 0.025409536361694338, 0.025350143432617187, 0.02570240020751953, 0.025421472549438478, 0.025373023986816408, 0.02524569511413574, 0.025369983673095703, 0.025249664306640624, 0.02525823974609375, 0.02522163200378418, 0.02529689598083496, 0.02529280090332031, 0.025470176696777345, 0.02529795265197754, 0.025230623245239257, 0.02535036849975586, 0.025449951171875, 0.025445152282714843, 0.025536512374877928, 0.02529427146911621, 0.02525446319580078, 0.025181791305541993, 0.02530124855041504, 0.025196224212646483, 0.025329280853271484, 0.025613151550292968, 0.025433887481689454, 0.025241792678833006, 0.025327648162841797, 0.025255935668945313, 0.02534604835510254, 0.02520591926574707, 0.02554252815246582, 0.025452768325805664, 0.025509792327880858, 0.025296319961547853, 0.02553286361694336, 0.02535321617126465, 0.025218048095703126, 0.02533990478515625, 0.025297920227050782, 0.025084928512573244, 0.02513907241821289, 0.025146656036376953, 0.025074527740478515, 0.025020095825195314, 0.02520848083496094, 0.025094432830810545, 0.025137535095214845, 0.025140735626220705, 0.025035263061523438, 0.02509552001953125, 0.02516649627685547, 0.025163616180419922, 0.02523971176147461, 0.025286048889160157, 0.025368671417236328, 0.025086336135864258, 0.02508559989929199, 0.02549603271484375, 0.02531123161315918, 0.025104639053344726, 0.02537446403503418, 0.02532147216796875, 0.025188352584838865, 0.024968767166137697, 0.025176511764526368, 0.025525663375854494, 0.02523535919189453, 0.025600799560546873, 0.025216928482055666, 0.025364383697509766, 0.025143104553222655, 0.02506985664367676, 0.025309183120727538, 0.0251494083404541, 0.02504889678955078, 0.02530121612548828, 0.025405439376831054, 0.025435903549194335, 0.025058847427368164, 0.025078496932983398, 0.025433504104614257, 0.025506399154663087, 0.02553753662109375, 0.025539583206176757, 0.02547030448913574, 0.025391775131225584, 0.025177824020385743, 0.025076192855834963, 0.025124671936035157, 0.025192447662353516, 0.025829376220703124, 0.025208160400390624, 0.02532419204711914, 0.025854560852050783, 0.02523360061645508, 0.025069568634033205, 0.025116672515869142, 0.026666431427001952, 0.025267776489257814, 0.025244064331054687, 0.02504313659667969, 0.025049375534057616, 0.025100223541259764, 0.025241600036621094, 0.02519171142578125, 0.025297983169555664, 0.02526608085632324, 0.02555897521972656, 0.025460575103759767, 0.025335136413574218, 0.02522195243835449, 0.02523766326904297, 0.025073503494262694, 0.02511052894592285, 0.025075712203979493, 0.025350143432617187, 0.025056671142578125, 0.025098848342895507, 0.025135103225708007, 0.025274431228637696, 0.025137088775634767, 0.025251840591430662, 0.02532147216796875, 0.025650848388671876, 0.02584815979003906, 0.025609792709350584, 0.025670368194580077, 0.02665238380432129, 0.025738975524902345, 0.02562220764160156, 0.025666303634643554, 0.025579359054565428, 0.027954303741455078, 0.027660287857055665, 0.02556198310852051, 0.025446495056152343, 0.027240224838256836, 0.026191999435424804, 0.025650943756103516, 0.025628320693969725, 0.02544905662536621, 0.025464832305908205, 0.02548121643066406, 0.025450496673583983, 0.025657567977905273, 0.025644832611083985, 0.02612656021118164, 0.025417184829711913, 0.025221439361572267, 0.025624544143676757, 0.025507583618164062, 0.025454879760742188, 0.02524492835998535, 0.025281280517578126, 0.025126976013183595, 0.02527225685119629, 0.025606271743774413, 0.025235456466674806, 0.02520265579223633, 0.02522319984436035, 0.02513920021057129, 0.025276191711425783, 0.025286880493164063, 0.02521673583984375, 0.0257989444732666, 0.025289823532104492, 0.025186943054199218, 0.025387039184570314, 0.025138559341430663, 0.02556198310852051, 0.02591859245300293, 0.0295118408203125, 0.025297632217407228, 0.0251693115234375, 0.02547567939758301, 0.0251146240234375, 0.025238975524902344, 0.025882368087768556, 0.025215520858764648, 0.025155872344970704, 0.025726688385009765, 0.025530656814575194, 0.025490560531616212, 0.02525484848022461, 0.025345375061035156, 0.02554038429260254, 0.02542880058288574, 0.02520479965209961, 0.02514259147644043, 0.025037120819091797, 0.025106496810913086, 0.025176319122314453, 0.025133056640625, 0.025126911163330077, 0.025479167938232423, 0.025415679931640626, 0.025374496459960937, 0.025413856506347657, 0.02545254325866699, 0.025273408889770508, 0.025233760833740234, 0.025283039093017576, 0.02526630401611328, 0.02535206413269043, 0.02614630317687988, 0.02537651252746582, 0.025392192840576172, 0.025437311172485353, 0.02566828727722168, 0.0256484489440918, 0.026626752853393554, 0.03148956871032715, 0.02593235206604004, 0.02576688003540039, 0.025658239364624025, 0.025462848663330078, 0.0256975040435791, 0.02550454330444336, 0.02574127960205078, 0.025986623764038087, 0.02555254364013672, 0.025346879959106446, 0.025298751831054688, 0.02553251266479492, 0.02535433578491211, 0.025590848922729493, 0.025391359329223633, 0.025268543243408204, 0.02525404739379883, 0.025237695693969726, 0.025204736709594725, 0.025146560668945314, 0.02517888069152832, 0.025231424331665038, 0.02520230484008789, 0.025302623748779295, 0.026015743255615235, 0.02535513687133789, 0.025153472900390626, 0.025204511642456056, 0.02522502326965332, 0.026120576858520508, 0.02523664093017578, 0.025242240905761718, 0.025229536056518554, 0.025308256149291993, 0.025152416229248048, 0.025262176513671877, 0.025644927978515624, 0.025688095092773436, 0.02976972770690918, 0.02569011116027832, 0.025642431259155274, 0.02567625617980957, 0.025563167572021483, 0.02576799964904785, 0.02566886329650879, 0.02572159957885742, 0.025554943084716796, 0.025632255554199217, 0.02562873649597168, 0.025441856384277345, 0.025186239242553712, 0.025207744598388673, 0.025218463897705077, 0.02544246482849121, 0.02551785659790039, 0.025461408615112306, 0.025234432220458985, 0.025355104446411134, 0.026912927627563477, 0.025201663970947266, 0.02516080093383789, 0.02514678382873535, 0.02522572708129883, 0.025219072341918947, 0.025112319946289062, 0.025223424911499023, 0.025178112030029298, 0.02524278450012207, 0.025825567245483398, 0.025258047103881836, 0.026087551116943358, 0.025666271209716797, 0.02553772735595703, 0.02569215965270996, 0.025486112594604492, 0.026117792129516603, 0.030194080352783204, 0.025702207565307618, 0.025510080337524416, 0.025346336364746095, 0.025541727066040038, 0.02561199951171875, 0.025365440368652344, 0.02573030471801758, 0.025551584243774413, 0.025753599166870117, 0.025825279235839844, 0.02575564765930176, 0.025568416595458984, 0.025479391098022462, 0.025502336502075194, 0.02557257652282715, 0.025481248855590822, 0.025485439300537108, 0.025427743911743163, 0.02542854309082031, 0.025388832092285155, 0.025304800033569337, 0.025373472213745116, 0.0254935359954834, 0.02560406494140625, 0.025397247314453125, 0.025390464782714842, 0.025436447143554686, 0.025829727172851563, 0.026572799682617186, 0.02592972755432129, 0.026097471237182618, 0.026316415786743163, 0.02637606430053711, 0.025706367492675783, 0.025680479049682618, 0.025872095108032227, 0.025727487564086913, 0.025831424713134765, 0.02602614402770996, 0.02888278388977051, 0.02614236831665039, 0.025808576583862305, 0.026438304901123047, 0.02599078369140625, 0.025923967361450195, 0.025665023803710937, 0.025604639053344726, 0.02575152015686035, 0.025599744796752928, 0.025764095306396485, 0.0256265926361084, 0.025691808700561522, 0.02558195114135742, 0.025475072860717773, 0.025472160339355468, 0.02554147148132324, 0.025894912719726562, 0.02549068832397461, 0.02548931121826172, 0.025567487716674806, 0.02567206382751465, 0.025491680145263672, 0.02553241539001465, 0.02533171272277832, 0.025570720672607423, 0.02618003273010254, 0.025532447814941406, 0.02550592041015625, 0.02555084800720215, 0.025619712829589844, 0.025668352127075196, 0.025530080795288086, 0.025522016525268556, 0.025403263092041016, 0.027214431762695314, 0.025632736206054687, 0.028835840225219726, 0.030126079559326172, 0.025823232650756835, 0.02569830322265625, 0.02566531181335449, 0.025718687057495117, 0.02618400001525879, 0.02557542419433594, 0.025554943084716796, 0.025479135513305665, 0.025622047424316407, 0.02562713623046875, 0.02637648010253906, 0.02574300765991211, 0.025657024383544922, 0.025479551315307616, 0.025604095458984375, 0.025540128707885742, 0.025546527862548827, 0.025465087890625, 0.025550271987915037, 0.02559404754638672, 0.025633216857910156, 0.025718879699707032, 0.025606559753417968, 0.025548671722412108, 0.02554265594482422, 0.02549065589904785, 0.026178272247314453, 0.025806880950927733, 0.02590444755554199, 0.02584239959716797, 0.025915391921997072, 0.025866239547729493, 0.0256092472076416, 0.025572383880615234, 0.02551100730895996, 0.025725088119506835, 0.02565315246582031, 0.02544233512878418, 0.025725055694580078, 0.028067935943603517, 0.02755436706542969, 0.026952959060668944, 0.025617151260375975, 0.02555423927307129, 0.025725055694580078, 0.025662015914916993, 0.025795711517333984, 0.025860895156860353, 0.025661535263061523, 0.02578358459472656, 0.02604310417175293, 0.02570579147338867, 0.02569696044921875, 0.02545180892944336, 0.025725664138793944, 0.025663135528564453, 0.025712799072265625, 0.02565497589111328, 0.02582579231262207, 0.025777984619140625, 0.02558585548400879, 0.02548067283630371, 0.026247711181640626, 0.025571264266967774, 0.025485376358032226, 0.025632768630981444, 0.025707712173461916, 0.025674367904663088, 0.02549295997619629, 0.02567651176452637, 0.025677600860595704, 0.025706367492675783, 0.025441823959350587, 0.025453407287597655, 0.02553788757324219, 0.025599807739257813, 0.025489952087402342, 0.027807136535644532, 0.027714208602905275, 0.025868255615234374, 0.025565439224243165, 0.025556320190429686, 0.02552662467956543, 0.025454912185668945, 0.025338207244873047, 0.02556096076965332, 0.02552835273742676, 0.02546233558654785, 0.02538844871520996, 0.025594655990600585, 0.025581567764282227, 0.02592767906188965, 0.025833471298217774, 0.025911296844482422, 0.02589286422729492, 0.025751264572143554, 0.025635103225708007, 0.025607456207275392, 0.02573695945739746, 0.025487712860107422, 0.02551251220703125, 0.026218015670776366, 0.025487903594970704, 0.025577280044555666, 0.025834911346435546, 0.02557939147949219, 0.02561916732788086, 0.0260067195892334, 0.02689311981201172, 0.025577280044555666, 0.025790687561035155, 0.025650144577026367, 0.02567065620422363, 0.025860095977783205, 0.025634815216064453, 0.025683679580688477, 0.026175647735595702, 0.025950143814086914, 0.025837568283081053, 0.025849184036254882, 0.025713504791259764, 0.025605791091918944, 0.02555120086669922, 0.025488447189331055, 0.025457599639892577, 0.02548310470581055, 0.02557145690917969, 0.02541324806213379, 0.02547881507873535, 0.025514720916748047, 0.0257322883605957, 0.025604223251342772, 0.02565177536010742, 0.025900896072387696, 0.025935392379760742, 0.02568272018432617, 0.025636863708496094, 0.025932960510253907, 0.026614112854003905, 0.02576972770690918, 0.025711360931396483, 0.02570172882080078, 0.025733760833740234, 0.025538047790527343, 0.02556368064880371, 0.025605247497558593, 0.025830432891845702, 0.02676675224304199, 0.02588857650756836, 0.025794687271118163, 0.025739168167114256, 0.025487360000610353, 0.02565795135498047, 0.025734624862670898, 0.025707040786743164, 0.026023935317993165, 0.02634137535095215, 0.025894912719726562, 0.025813024520874025, 0.02561395263671875, 0.02555939292907715, 0.025559072494506837, 0.025577600479125977, 0.02541916847229004, 0.02556153678894043, 0.02588057518005371, 0.025821184158325194, 0.025907199859619142, 0.02565385627746582, 0.025479167938232423, 0.025638912200927736, 0.025517311096191406, 0.025844064712524414, 0.025931392669677734, 0.025785120010375976, 0.026359807968139647, 0.025757055282592773, 0.02571676826477051, 0.02564566421508789, 0.025608192443847655, 0.025660512924194336, 0.025767839431762696, 0.025607168197631838, 0.025640031814575196, 0.025960800170898437, 0.02563539123535156, 0.02540278434753418, 0.025399007797241212, 0.02545961570739746, 0.025526304244995118, 0.025536447525024413, 0.025788415908813478, 0.02977382469177246, 0.02631270408630371, 0.025806528091430664, 0.025757951736450194, 0.025654367446899414, 0.025439199447631837, 0.025417472839355467, 0.025481472015380858, 0.025298944473266603, 0.025392831802368163, 0.025284576416015624, 0.02546928024291992, 0.02607923126220703, 0.025622528076171876, 0.02548940849304199, 0.02556662368774414, 0.02560438346862793, 0.025727296829223634, 0.025665056228637694, 0.02558950424194336, 0.025688800811767578, 0.02572697639465332, 0.026355136871337892, 0.025649728775024413, 0.025483264923095703, 0.025460224151611328, 0.025790912628173828, 0.02600559997558594, 0.028651487350463866, 0.025843711853027345, 0.02566147232055664, 0.025777408599853516, 0.025567743301391603, 0.02556540870666504, 0.025466495513916016, 0.025462207794189454, 0.025564191818237304, 0.025575199127197266]",tokens/s,38.999870848647426,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8221.0816,11251.089408,0.0,10848.567296,10616.027648,s,1,14.7542509765625,14.7542509765625,0.0,14.7542509765625,14.7542509765625,14.7542509765625,14.7542509765625,[14.7542509765625],,kWh,0.0002222481717166071,2.450827772856752e-05,7.083338999999689e-05,0.00031758983944517153,,MB,4012.146688,11672.61696,0.0,11255.414784,11070.470656,s,10,3.778672210693359,0.3778672210693359,0.0012949928879469172,0.3780647277832031,0.3790864135742188,0.37954586181640626,0.37991342041015624,"[0.3753695068359375, 0.3783009643554687, 0.37747482299804686, 0.37762841796875, 0.37602972412109376, 0.37871096801757814, 0.3789843139648438, 0.37833969116210936, 0.3778284912109375, 0.3800053100585937]",tokens/s,677.4866559622166,kWh,1.107505091280933e-05,1.22137531370735e-06,7.3906334845190795e-06,1.9687059711035758e-05,tokens/kWh,13003465.41116533,MB,4016.467968,11674.714112,0.0,11257.511936,11070.473216,s,10,28.987114501953123,2.8987114501953126,0.004205360931723675,2.900492431640625,2.9018157470703128,2.9039100708007815,2.9055855297851565,"[2.892024658203125, 2.893275390625, 2.894080322265625, 2.897096435546875, 2.90109033203125, 2.900222412109375, 2.900762451171875, 2.901207763671875, 2.901350341796875, 2.90600439453125]",tokens/s,21.7337948541774,kWh,9.157713112927312e-05,1.0101173200335595e-05,6.064139316327997e-05,0.00016231969749288868,tokens/kWh,388122.95102238015,,s,630,28.984397979736347,0.04600698092021639,0.00042125973936417855,0.0459714241027832,0.046411430740356446,0.046513708877563474,0.04809023303985596,"[0.047215423583984374, 0.04560281753540039, 0.04542668914794922, 0.04531398391723633, 0.04536134338378906, 0.04542585754394531, 0.04531475067138672, 0.04557731246948242, 0.0458260498046875, 0.04583676910400391, 0.04577734375, 0.046104095458984376, 0.0458265266418457, 0.04588544082641602, 0.04595318222045899, 0.045587711334228516, 0.04579292678833008, 0.0459304313659668, 0.04583475112915039, 0.045822303771972654, 0.04593801498413086, 0.0460173454284668, 0.0456267204284668, 0.04554179382324219, 0.04584668731689453, 0.04568054580688476, 0.04539632034301758, 0.04533174514770508, 0.04558467102050781, 0.045754657745361325, 0.04564169692993164, 0.04593052673339844, 0.04597964859008789, 0.04591756820678711, 0.04639807891845703, 0.04619027328491211, 0.045941055297851564, 0.045965312957763675, 0.04582332611083984, 0.04646956634521485, 0.04606508636474609, 0.04610710525512695, 0.04607577514648437, 0.046045631408691404, 0.045758174896240233, 0.046061344146728515, 0.04604761505126953, 0.04604227066040039, 0.045904865264892576, 0.04637491226196289, 0.045927776336669925, 0.04568700790405274, 0.04611123275756836, 0.04598777770996094, 0.04599321746826172, 0.04603100967407227, 0.04622735977172852, 0.046037696838378904, 0.04577667236328125, 0.046200736999511716, 0.04642556762695312, 0.04631228637695312, 0.04618239974975586, 0.0484516487121582, 0.04614937591552734, 0.04523404693603516, 0.04517529678344727, 0.045363296508789064, 0.04542838287353516, 0.04551852798461914, 0.04544137573242187, 0.04568662261962891, 0.045728126525878904, 0.045671871185302734, 0.0460252799987793, 0.04572979354858398, 0.045530849456787106, 0.04534096145629883, 0.04572979354858398, 0.04576860809326172, 0.045633663177490236, 0.04564889526367188, 0.04585283279418945, 0.04582892990112305, 0.046002079010009765, 0.04587731170654297, 0.04623977661132812, 0.04579235076904297, 0.04565423965454102, 0.04583699035644531, 0.04569887924194336, 0.04678675079345703, 0.045537281036376956, 0.045663936614990235, 0.04594220733642578, 0.04578335952758789, 0.04594700622558594, 0.04593692779541016, 0.04597980880737305, 0.046266368865966793, 0.046058944702148434, 0.045969982147216794, 0.045778976440429685, 0.045631038665771485, 0.0464318733215332, 0.04655593490600586, 0.046118431091308594, 0.046006462097167966, 0.04615804672241211, 0.0459059829711914, 0.045658302307128903, 0.046024513244628903, 0.04610867309570312, 0.04590940856933594, 0.04613987350463867, 0.04611894226074219, 0.04586095809936523, 0.04569702529907226, 0.04597139358520508, 0.04614352035522461, 0.045953056335449216, 0.04583833694458008, 0.046450080871582033, 0.04619113540649414, 0.04622342300415039, 0.046241790771484374, 0.04868566513061524, 0.04623097610473633, 0.045478145599365236, 0.04537785720825195, 0.04529308700561523, 0.04516675186157226, 0.04538761520385742, 0.04546403121948242, 0.045728897094726564, 0.04581260681152344, 0.045520896911621096, 0.045467647552490234, 0.04556137466430664, 0.045488704681396486, 0.04550841522216797, 0.04561315155029297, 0.04618979263305664, 0.046233665466308596, 0.04590998458862305, 0.045835006713867185, 0.04603433609008789, 0.04599049758911133, 0.04589353561401367, 0.045809761047363284, 0.046325504302978514, 0.04585087966918945, 0.04566409683227539, 0.04592246246337891, 0.04586665725708008, 0.04566198348999023, 0.045857345581054684, 0.045878814697265624, 0.045916641235351566, 0.04592351913452149, 0.045963966369628906, 0.04578675079345703, 0.045720062255859374, 0.04593030548095703, 0.046006462097167966, 0.04597145462036133, 0.04594278335571289, 0.04631347274780274, 0.0462408332824707, 0.04606422424316406, 0.04610047912597656, 0.046276958465576175, 0.04607385635375977, 0.0458260498046875, 0.04606972885131836, 0.04607388687133789, 0.04583833694458008, 0.045764606475830076, 0.045962528228759764, 0.04609916687011719, 0.045742080688476565, 0.046077953338623044, 0.04614348983764648, 0.04614921569824219, 0.04605788803100586, 0.04665753555297852, 0.046282527923583984, 0.04595449447631836, 0.04617724609375, 0.04797849655151367, 0.04580524826049805, 0.04543257522583008, 0.04539411163330078, 0.04523196792602539, 0.045756671905517576, 0.045666656494140624, 0.04569491195678711, 0.0455840950012207, 0.046295326232910154, 0.045795326232910154, 0.0458342399597168, 0.04558761596679688, 0.04547055816650391, 0.04563529586791992, 0.04584710311889648, 0.045926113128662106, 0.04633929443359375, 0.0457674560546875, 0.045918144226074216, 0.045951038360595706, 0.04576019287109375, 0.045918529510498046, 0.04589363098144531, 0.04562944030761719, 0.04561100769042969, 0.04571750259399414, 0.045929630279541014, 0.04570169448852539, 0.04590991973876953, 0.0463507194519043, 0.046102176666259764, 0.045692768096923825, 0.04563804626464844, 0.04597155380249023, 0.045946304321289065, 0.04595769500732422, 0.04619059371948242, 0.04598374557495117, 0.0458568000793457, 0.046436321258544924, 0.04623097610473633, 0.04605356979370117, 0.04611520004272461, 0.04589990234375, 0.04570460891723633, 0.04583817672729492, 0.04611670303344727, 0.04615248107910156, 0.046243839263916016, 0.04611801528930664, 0.04655193710327148, 0.04629913711547851, 0.045963264465332034, 0.0459172477722168, 0.0461578254699707, 0.04604819107055664, 0.04621721649169922, 0.046511455535888674, 0.04625209426879883, 0.046389854431152344, 0.04648550415039063, 0.046448638916015625, 0.048458110809326174, 0.04598940658569336, 0.0455316162109375, 0.045459327697753904, 0.045467422485351565, 0.04533411026000977, 0.04543945693969727, 0.04580380630493164, 0.04587238311767578, 0.045505088806152345, 0.045706817626953125, 0.046199424743652344, 0.045832191467285156, 0.04574617767333984, 0.04559209442138672, 0.0459453125, 0.045840385437011716, 0.045830047607421875, 0.04602479934692383, 0.04592639923095703, 0.04639664077758789, 0.04653955078125, 0.04625612640380859, 0.04598988723754883, 0.04589977645874024, 0.04629094314575195, 0.0456921272277832, 0.0455332145690918, 0.04597427368164062, 0.045954238891601565, 0.04576726531982422, 0.04599420928955078, 0.04601391983032226, 0.04596380615234375, 0.046444385528564454, 0.046043296813964844, 0.04618854522705078, 0.04609843063354492, 0.04595219039916992, 0.04609724807739258, 0.046142593383789066, 0.045885726928710936, 0.04643708801269531, 0.04648944091796875, 0.04610047912597656, 0.046063232421875, 0.04626803207397461, 0.04616268920898438, 0.04591820907592774, 0.04606556701660156, 0.04625212860107422, 0.045922080993652345, 0.045875423431396486, 0.046159870147705076, 0.04631321716308594, 0.0458647689819336, 0.0461275520324707, 0.046274559020996094, 0.046343326568603516, 0.046160736083984376, 0.04646092987060547, 0.046599552154541014, 0.04633663940429687, 0.048397567749023436, 0.046269153594970705, 0.04548102569580078, 0.04529840087890625, 0.04539619064331055, 0.045445121765136716, 0.04556185531616211, 0.045453311920166016, 0.045658016204833986, 0.04583612823486328, 0.045873409271240236, 0.045893310546875, 0.04582841491699219, 0.045881343841552735, 0.045832191467285156, 0.04604108810424805, 0.046227455139160156, 0.04594483184814453, 0.04551375961303711, 0.04583932876586914, 0.04636671829223633, 0.046415870666503906, 0.04637081527709961, 0.04615068817138672, 0.04591072082519531, 0.045559104919433595, 0.04585980987548828, 0.04579328155517578, 0.04552009582519531, 0.045478111267089845, 0.04590576171875, 0.04571414566040039, 0.04577836990356445, 0.04596188735961914, 0.04622236633300781, 0.046010913848876955, 0.046206558227539066, 0.04642892837524414, 0.0464793586730957, 0.04617359924316406, 0.046055614471435545, 0.0464285774230957, 0.04652044677734375, 0.04620889663696289, 0.046542591094970706, 0.04627072143554688, 0.04583967971801758, 0.04603500747680664, 0.04606425476074219, 0.0459095344543457, 0.04577737426757812, 0.04609843063354492, 0.046063617706298826, 0.04570521545410156, 0.04590726470947266, 0.04622406387329102, 0.04617420959472656, 0.045914112091064455, 0.04625542449951172, 0.04638899230957031, 0.046416831970214845, 0.046653438568115234, 0.04651555252075195, 0.04844543838500977, 0.045791233062744144, 0.045658111572265625, 0.045475841522216794, 0.04528537750244141, 0.04526278305053711, 0.045365310668945315, 0.04538060760498047, 0.04534783935546875, 0.046155231475830075, 0.046140064239501954, 0.04593446350097656, 0.0457523193359375, 0.045813758850097655, 0.04634624099731445, 0.04627417755126953, 0.04594265747070313, 0.04592876815795898, 0.04597484970092774, 0.04599017715454102, 0.046234207153320314, 0.046048255920410154, 0.04599087905883789, 0.045574176788330076, 0.04579884719848633, 0.045906494140625, 0.0458666877746582, 0.045821983337402346, 0.045727550506591795, 0.04562483215332031, 0.04566883087158203, 0.0456135368347168, 0.04622127914428711, 0.04615379333496094, 0.045725566864013675, 0.04636275100708008, 0.04637231826782227, 0.04616352081298828, 0.04611119842529297, 0.04600016021728515, 0.04649622344970703, 0.0463007698059082, 0.04673487854003906, 0.04649619293212891, 0.04598419189453125, 0.045780990600585936, 0.04602265548706055, 0.04582195281982422, 0.04564787292480469, 0.04607350540161133, 0.04612665557861328, 0.045924671173095705, 0.046088417053222655, 0.04618675231933594, 0.0461475830078125, 0.045846527099609374, 0.04644454574584961, 0.046411392211914065, 0.046244224548339846, 0.046292991638183595, 0.046467071533203126, 0.046882080078125, 0.04678115081787109, 0.04842707061767578, 0.04619558334350586, 0.045453311920166016, 0.04537139129638672, 0.04523782348632813, 0.04574995040893555, 0.045540096282958985, 0.04545536041259766, 0.045699073791503904, 0.04595507049560547, 0.045914112091064455, 0.045864959716796876, 0.045778976440429685, 0.045766494750976563, 0.04593062210083008, 0.04616806411743164, 0.04598515319824219, 0.04593113708496094, 0.04581574249267578, 0.045787200927734376, 0.046018558502197264, 0.04597964859008789, 0.04585635375976563, 0.04624947357177735, 0.04604099273681641, 0.045466625213623046, 0.04582537460327148, 0.04568332672119141, 0.04560192108154297, 0.04574710464477539, 0.04593868637084961, 0.04632556915283203, 0.04614499282836914, 0.046035678863525394, 0.045948928833007815, 0.046094337463378904, 0.04634400177001953, 0.04631929779052734, 0.04642044830322266, 0.046225440979003905, 0.04612704086303711, 0.046098495483398436, 0.04624153518676758, 0.04586931228637695, 0.046464447021484376, 0.04623811340332031, 0.04608217620849609, 0.04601628875732422, 0.045922561645507814, 0.04624390411376953, 0.04595296096801758, 0.04611686325073242, 0.04630073547363281, 0.04626681518554687, 0.04622342300415039, 0.04627040100097656, 0.04633190536499023, 0.04593459320068359, 0.046144927978515625, 0.046422046661376955, 0.046346817016601566, 0.0463171501159668, 0.046690784454345706, 0.048135871887207034, 0.04584080123901367, 0.04542454528808594, 0.045411487579345704, 0.045507423400878905, 0.045692928314208986, 0.045428703308105466, 0.04572073745727539, 0.04573430252075195, 0.045596736907958985, 0.04570169448852539, 0.04596105575561524, 0.04577280044555664, 0.04561056137084961, 0.045735424041748046, 0.046344734191894534, 0.0460081901550293, 0.045736480712890625, 0.04583983993530273, 0.04625862503051758, 0.04622662353515625, 0.046232414245605466, 0.04601862335205078, 0.04582195281982422, 0.04549222564697265, 0.045805438995361325, 0.046035072326660154, 0.046071807861328126, 0.04562739181518555, 0.04565401458740234, 0.0459073600769043, 0.045986366271972653, 0.046141471862792965, 0.04615167999267578, 0.04596684646606446, 0.04577097702026367, 0.046206336975097656, 0.04609891128540039, 0.046152065277099606, 0.046363777160644534, 0.04642502212524414, 0.046260223388671876, 0.04622761535644531, 0.04670169448852539, 0.04642214584350586, 0.045814369201660154, 0.04576607894897461, 0.046378753662109376, 0.04607263946533203, 0.045997825622558594, 0.046235038757324216, 0.0459764175415039, 0.04608777618408203, 0.046117279052734376, 0.04658790588378906, 0.04633305740356446, 0.04640652847290039, 0.04625158309936524, 0.04638883209228516, 0.04628361511230469, 0.04640774536132813, 0.04641334533691406, 0.046333694458007814, 0.047859104156494144, 0.046113601684570314, 0.045763713836669925, 0.04535587310791016, 0.04540156936645508, 0.045623199462890625, 0.04589648056030273, 0.04575129699707031, 0.045679615020751956, 0.04558233642578125, 0.04614963150024414, 0.045811649322509765, 0.045758464813232425, 0.045819969177246095, 0.04588544082641602, 0.04593868637084961, 0.04617340850830078, 0.04591836929321289, 0.045529151916503904, 0.04598390579223633, 0.04672079849243164, 0.04630387115478515, 0.046036991119384765, 0.04612710571289062, 0.0459304313659668, 0.04589779281616211, 0.045792545318603516, 0.04641843032836914, 0.046180065155029294, 0.04597196960449219, 0.045987617492675784, 0.045854942321777344, 0.04611481475830078, 0.04644659042358398, 0.046384639739990234, 0.04614604949951172, 0.04616396713256836, 0.04666777420043945, 0.04609759902954102, 0.04562201690673828, 0.0461409912109375, 0.04626073455810547, 0.04621516799926758, 0.046548545837402346, 0.04636511993408203, 0.04614348983764648, 0.04599808120727539, 0.046102527618408204, 0.04630323028564453, 0.04610585784912109, 0.046107391357421874, 0.04622931289672851, 0.0466126708984375, 0.04621686553955078, 0.04618659210205078, 0.046340351104736326, 0.04615750503540039, 0.046626529693603515, 0.0466253776550293, 0.046325759887695314, 0.046358528137207033, 0.046481407165527344, 0.04641177749633789]",tokens/s,21.73583182374351,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8220.889088,11251.089408,0.0,10848.567296,10616.027648,s,1,14.6040859375,14.6040859375,0.0,14.6040859375,14.6040859375,14.6040859375,14.6040859375,[14.6040859375],,kWh,0.00021816509227910502,2.4058117512854613e-05,6.991561148800285e-05,0.0003121388212799625,,MB,4007.624704,11672.61696,0.0,11255.414784,11070.470656,s,10,3.773899871826172,0.3773899871826172,0.0014336255658235245,0.3770820770263672,0.37933291015625,0.3794244812011719,0.3794977380371094,"[0.375162841796875, 0.3783441162109375, 0.3757893981933594, 0.3767429504394531, 0.37742120361328124, 0.37870465087890626, 0.3764412841796875, 0.37646481323242187, 0.37951605224609375, 0.37931256103515626]",tokens/s,678.3433813683108,kWh,1.1050601440279528e-05,1.2186753214425493e-06,7.323822731481117e-06,1.9593099493203193e-05,tokens/kWh,13065824.531172615,MB,4011.941888,11674.714112,0.0,11257.511936,11070.473216,s,10,28.953698242187503,2.8953698242187498,0.0037722865065878642,2.8956547851562497,2.8999647949218748,2.900514990234375,2.900955146484375,"[2.89017041015625, 2.890867431640625, 2.89034228515625, 2.89427783203125, 2.89509130859375, 2.896823486328125, 2.89621826171875, 2.89899951171875, 2.899842529296875, 2.901065185546875]",tokens/s,21.758878424796436,kWh,9.091319783680876e-05,1.002794643280637e-05,6.0516703763121924e-05,0.00016145784803273705,tokens/kWh,390194.72120813956,,s,630,28.95099586868284,0.045953961696322,0.0004376500308542936,0.045919343948364263,0.046312018585205075,0.046438404273986815,0.04820088878631592,"[0.048327838897705075, 0.045926593780517576, 0.045456031799316406, 0.04514815902709961, 0.04519071960449219, 0.04532988739013672, 0.0454051513671875, 0.0456888313293457, 0.0457523193359375, 0.045532478332519534, 0.04558713531494141, 0.045723648071289064, 0.04599808120727539, 0.0457523193359375, 0.045445087432861325, 0.0454062385559082, 0.04582313537597656, 0.045988704681396486, 0.04590591812133789, 0.04528745651245117, 0.04615574264526367, 0.04640768051147461, 0.04593459320068359, 0.04580454254150391, 0.04571443176269531, 0.04545536041259766, 0.045557441711425783, 0.04591443252563476, 0.045698944091796874, 0.04542172622680664, 0.04568339157104492, 0.046284446716308596, 0.0460392951965332, 0.045599105834960935, 0.045778942108154294, 0.04590121459960937, 0.04585123062133789, 0.04584447860717773, 0.046193824768066404, 0.04593721771240234, 0.045621536254882813, 0.045854270935058596, 0.046543006896972654, 0.04619481658935547, 0.04603472137451172, 0.04613062286376953, 0.04606662368774414, 0.04582515335083008, 0.0458023681640625, 0.046061569213867185, 0.045862430572509764, 0.04563974380493164, 0.04613571166992188, 0.04606771087646484, 0.04607590484619141, 0.04593459320068359, 0.04632777786254883, 0.04611484909057617, 0.04593606567382812, 0.04609695816040039, 0.04602880096435547, 0.04578646469116211, 0.045886112213134767, 0.04796006393432617, 0.04551475143432617, 0.04518912124633789, 0.045174110412597654, 0.04542940902709961, 0.04566543960571289, 0.04548041534423828, 0.04521945571899414, 0.04546636962890625, 0.04576870346069336, 0.04546067047119141, 0.045300159454345706, 0.04530828857421875, 0.0457154541015625, 0.04578713607788086, 0.04588339233398438, 0.046192192077636716, 0.04611078262329102, 0.045691265106201175, 0.04573139190673828, 0.046301406860351564, 0.04598540878295899, 0.04583689498901367, 0.04580147171020508, 0.045625343322753906, 0.045416160583496096, 0.045518943786621094, 0.04554179382324219, 0.04587804794311524, 0.04582700729370117, 0.04583395385742187, 0.04607977676391602, 0.045898303985595704, 0.045723648071289064, 0.045907711029052736, 0.04603020858764648, 0.04635228729248047, 0.04606256103515625, 0.046185760498046874, 0.04592303848266602, 0.0457154541015625, 0.04580966567993164, 0.046395198822021484, 0.046145374298095704, 0.04605110549926758, 0.04600889587402344, 0.045823040008544924, 0.045566558837890625, 0.045888927459716795, 0.04607891082763672, 0.04611654281616211, 0.04578131103515625, 0.04603084945678711, 0.04607590484619141, 0.045888927459716795, 0.04593519973754883, 0.046565376281738284, 0.0462658576965332, 0.046108257293701174, 0.04624272155761719, 0.04617216110229492, 0.046024543762207035, 0.0461416015625, 0.0484249267578125, 0.045986560821533205, 0.0456703987121582, 0.045262847900390625, 0.045110305786132815, 0.04558681488037109, 0.045443679809570314, 0.04523622512817383, 0.04549216079711914, 0.045690433502197265, 0.04550403213500977, 0.04570211029052734, 0.04573388671875, 0.04542668914794922, 0.04616806411743164, 0.04581942367553711, 0.04574566268920898, 0.045687774658203124, 0.04559667205810547, 0.04575174331665039, 0.04599580764770508, 0.045957920074462894, 0.04550582504272461, 0.045636161804199216, 0.04576009750366211, 0.045606815338134765, 0.04553385543823242, 0.045846527099609374, 0.045700126647949216, 0.04561779022216797, 0.04569027328491211, 0.04581043243408203, 0.04584576034545899, 0.04576147079467773, 0.04628815841674805, 0.045933086395263674, 0.04579484939575195, 0.04576681518554687, 0.04593305587768555, 0.046399486541748046, 0.045948928833007815, 0.04572751998901367, 0.046052894592285155, 0.046200927734375, 0.046002784729003904, 0.045991294860839846, 0.04583078384399414, 0.045755615234375, 0.0457224006652832, 0.04594476699829102, 0.04616198348999023, 0.04602470397949219, 0.046163326263427736, 0.046084735870361326, 0.04610662460327149, 0.045742080688476565, 0.04610047912597656, 0.04615119934082031, 0.04613987350463867, 0.04617203140258789, 0.04640681457519531, 0.046180767059326173, 0.04603318405151367, 0.04778153610229492, 0.04567097473144531, 0.04562444686889648, 0.04538662338256836, 0.045350910186767575, 0.045203102111816405, 0.045807968139648436, 0.04561100769042969, 0.045434879302978515, 0.04575596618652344, 0.0458853759765625, 0.045684257507324216, 0.0458106575012207, 0.045577503204345705, 0.04596521759033203, 0.04576102447509765, 0.045785408020019534, 0.045962654113769534, 0.045741790771484374, 0.04536409759521484, 0.04576870346069336, 0.04604313659667969, 0.04593782424926758, 0.04585542297363281, 0.045808895111083985, 0.04566019058227539, 0.0457573127746582, 0.045524158477783204, 0.045890369415283204, 0.045836448669433594, 0.04595644760131836, 0.04608041763305664, 0.04609001541137695, 0.045914241790771484, 0.04580166244506836, 0.045879295349121094, 0.04593664169311523, 0.04595507049560547, 0.04610662460327149, 0.046239585876464845, 0.046043296813964844, 0.04552207946777344, 0.046232414245605466, 0.04624697494506836, 0.0461992301940918, 0.046100990295410156, 0.04634009552001953, 0.046088191986083986, 0.04569689559936523, 0.04568278503417969, 0.04624323272705078, 0.046027393341064454, 0.04600627136230469, 0.04643590545654297, 0.04619494247436524, 0.04588768005371094, 0.04614348983764648, 0.04658572769165039, 0.04626752090454102, 0.046185375213623044, 0.04642416000366211, 0.04621721649169922, 0.04601241683959961, 0.0482342414855957, 0.046257568359375, 0.04549923324584961, 0.045246463775634765, 0.04536463928222656, 0.04562799835205078, 0.045625343322753906, 0.04556595230102539, 0.04564787292480469, 0.04568678283691406, 0.045911487579345704, 0.04575209426879883, 0.04571625518798828, 0.04554492950439453, 0.045739681243896484, 0.04553388977050781, 0.0457504653930664, 0.04587014389038086, 0.04596012878417969, 0.04565606307983398, 0.0458072624206543, 0.0462749137878418, 0.04592639923095703, 0.0458526725769043, 0.04567244720458984, 0.04563328170776367, 0.04561452865600586, 0.04559715270996094, 0.04600166320800781, 0.0460296630859375, 0.04588470458984375, 0.04612374496459961, 0.045916160583496096, 0.04589904022216797, 0.04591689682006836, 0.04612908935546875, 0.04595513534545898, 0.04563299179077149, 0.04604367828369141, 0.04631119918823242, 0.046185791015625, 0.04642604827880859, 0.04653728103637695, 0.04617660903930664, 0.045803585052490235, 0.046106334686279296, 0.046089950561523436, 0.04576835250854492, 0.04561395263671875, 0.04633603286743164, 0.0460648307800293, 0.04583481597900391, 0.04601676940917969, 0.04605132675170898, 0.045862911224365234, 0.045916160583496096, 0.04629708862304688, 0.04613324737548828, 0.04593030548095703, 0.04616329574584961, 0.046572383880615235, 0.046335262298583986, 0.04619926452636719, 0.04906681442260742, 0.04613324737548828, 0.045416065216064457, 0.04532249450683594, 0.04518633651733398, 0.04584124755859375, 0.04540348815917969, 0.04534134292602539, 0.045663745880126956, 0.04568307113647461, 0.04573603057861328, 0.04569705581665039, 0.04552703857421875, 0.045856769561767576, 0.04569683074951172, 0.04571564865112305, 0.04589158248901367, 0.045758464813232425, 0.04574617767333984, 0.04557398223876953, 0.04641193771362305, 0.04587724685668945, 0.04589311981201172, 0.04598150253295898, 0.045668224334716796, 0.04571424102783203, 0.04589158248901367, 0.04580499267578125, 0.04570374298095703, 0.04629094314575195, 0.046088191986083986, 0.04586415863037109, 0.045519329071044924, 0.04593407821655274, 0.04632249450683594, 0.04600783920288086, 0.04577123260498047, 0.04646297454833984, 0.045963264465332034, 0.045696128845214845, 0.046114974975585935, 0.046205120086669924, 0.04611052703857422, 0.04621180725097656, 0.04628012847900391, 0.04602899169921875, 0.045811744689941404, 0.045881694793701175, 0.04629708862304688, 0.04604252624511719, 0.045816417694091796, 0.04601161575317383, 0.0461627197265625, 0.04603500747680664, 0.04657129669189453, 0.04637712097167969, 0.04616339111328125, 0.0461475830078125, 0.04655366516113281, 0.04614131164550781, 0.045879425048828124, 0.04613324737548828, 0.046450687408447267, 0.048895423889160156, 0.04614595031738281, 0.045596832275390624, 0.04561423873901367, 0.045425502777099606, 0.04543900680541992, 0.045620479583740235, 0.045640415191650394, 0.0458158073425293, 0.045502334594726565, 0.045682559967041014, 0.04608147048950195, 0.04578796768188476, 0.045434879302978515, 0.045725696563720705, 0.04573798370361328, 0.0458804817199707, 0.04581036758422852, 0.04566236877441406, 0.04622848129272461, 0.045892288208007816, 0.04623187255859375, 0.0460494384765625, 0.04583744049072266, 0.045546016693115234, 0.04581324768066406, 0.04626233673095703, 0.045750240325927734, 0.0458856315612793, 0.04594736099243164, 0.046063617706298826, 0.046087455749511716, 0.0461519660949707, 0.04591212844848633, 0.04572547149658203, 0.045773406982421876, 0.04590796661376953, 0.045981182098388675, 0.04575388717651367, 0.04583932876586914, 0.04618403244018555, 0.04613955307006836, 0.04606592178344727, 0.04604099273681641, 0.04619683074951172, 0.04604927825927734, 0.045846527099609374, 0.046321025848388674, 0.04609254455566406, 0.04583871841430664, 0.045727680206298825, 0.04605548858642578, 0.045889408111572265, 0.04579135894775391, 0.04612496185302734, 0.046178176879882814, 0.046096607208251955, 0.04621286392211914, 0.046405887603759764, 0.0462110710144043, 0.04580147171020508, 0.04615971374511719, 0.046384769439697264, 0.0490041618347168, 0.04615001678466797, 0.045663585662841795, 0.045588321685791015, 0.045367328643798825, 0.04523420715332031, 0.04532096099853516, 0.045625343322753906, 0.045655296325683596, 0.04567078399658203, 0.04553561782836914, 0.04618985748291016, 0.04603772735595703, 0.04570111846923828, 0.045722625732421876, 0.04576959991455078, 0.04554764938354492, 0.04571136093139649, 0.045751457214355466, 0.04605817413330078, 0.04595113754272461, 0.04626432037353516, 0.04628070449829102, 0.04622537612915039, 0.045921504974365236, 0.0459251823425293, 0.04572979354858398, 0.04566425704956055, 0.0457496337890625, 0.045916126251220706, 0.04586540985107422, 0.04576399993896484, 0.04588003158569336, 0.04599148941040039, 0.04593436813354492, 0.04580553436279297, 0.046213920593261716, 0.04622335815429687, 0.04611072158813476, 0.04623680114746094, 0.046058368682861325, 0.04625612640380859, 0.04624499130249023, 0.04640652847290039, 0.04647116851806641, 0.04644851303100586, 0.04628611373901367, 0.04621091079711914, 0.04602982330322265, 0.04563558578491211, 0.046034366607666015, 0.046266239166259764, 0.046102272033691404, 0.04582880020141602, 0.04606592178344727, 0.046137344360351565, 0.04564937591552734, 0.04590646362304687, 0.04630732727050781, 0.046166015625, 0.04634550476074219, 0.04650467300415039, 0.046405441284179685, 0.048695102691650394, 0.04598134231567383, 0.0456242561340332, 0.045628673553466795, 0.045228801727294925, 0.04535862350463867, 0.04533436965942383, 0.04546419143676758, 0.04551475143432617, 0.04563558578491211, 0.04566947174072265, 0.0459785270690918, 0.045795326232910154, 0.04582377624511719, 0.04596758270263672, 0.045969406127929685, 0.045963264465332034, 0.046243839263916016, 0.04636262512207031, 0.04599990463256836, 0.046145153045654294, 0.04641852951049805, 0.04620848083496094, 0.04577302551269531, 0.04583046340942383, 0.04581171035766601, 0.045550655364990235, 0.04563577651977539, 0.04583257675170899, 0.04585305786132812, 0.0455797119140625, 0.045677024841308596, 0.04593878555297851, 0.04605740737915039, 0.0459991683959961, 0.045917182922363284, 0.046152801513671876, 0.04604611206054687, 0.046034942626953124, 0.04631939315795899, 0.046162143707275394, 0.04628889465332031, 0.04635631942749024, 0.046491519927978515, 0.04613942337036133, 0.04618675231933594, 0.046440448760986325, 0.046159870147705076, 0.04577606582641602, 0.04605011367797852, 0.04627366256713867, 0.045835262298583986, 0.04564275360107422, 0.04603369522094727, 0.046270561218261716, 0.04604451370239258, 0.04611753463745117, 0.046282176971435544, 0.04640825653076172, 0.04624998474121094, 0.046655006408691406, 0.046442977905273436, 0.04625408172607422, 0.048119232177734374, 0.0459224967956543, 0.04559500885009766, 0.04541987228393555, 0.04533520126342774, 0.04559212875366211, 0.045480384826660156, 0.04538092803955078, 0.045478591918945314, 0.04611654281616211, 0.04608028793334961, 0.045722942352294925, 0.04571209716796875, 0.045723648071289064, 0.04550380706787109, 0.04575017547607422, 0.04616476821899414, 0.04630835342407227, 0.04600038528442383, 0.04620163345336914, 0.04600726318359375, 0.045949951171875, 0.04597350311279297, 0.04594483184814453, 0.04580556869506836, 0.04566425704956055, 0.045848575592041016, 0.04567612838745117, 0.04563561630249023, 0.045903457641601565, 0.04626716613769531, 0.04634828948974609, 0.04608153533935547, 0.045959678649902344, 0.04615577697753906, 0.045790271759033205, 0.04581027221679688, 0.04601686477661133, 0.04624140930175781, 0.04648294448852539, 0.04644512176513672, 0.04652012634277344, 0.04625600051879883, 0.04605811309814453, 0.04605977630615234, 0.0465645751953125, 0.04609270477294922, 0.04606291198730469, 0.046097217559814455, 0.045938209533691404, 0.04586928176879883, 0.0460761604309082, 0.04628275299072265, 0.04628684616088867, 0.046170112609863284, 0.04635612869262695, 0.04632390213012695, 0.045893791198730466, 0.04612710571289062, 0.04637283325195313, 0.046399486541748046, 0.046579742431640626, 0.04678246307373047]",tokens/s,21.76090946430929,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1172.3776,1109.262336,0.0,706.740224,681.6384,s,1,8.13638427734375,8.13638427734375,0.0,8.13638427734375,8.13638427734375,8.13638427734375,8.13638427734375,[8.13638427734375],,kWh,3.217124067499147e-05,3.541478614607225e-06,1.0050563596014639e-05,4.576328288561333e-05,,MB,1481.433088,1413.349376,0.0,996.1472,949.238272,s,10,0.34973065567016604,0.03497306556701659,0.000400760519890848,0.03498417663574219,0.035526938629150386,0.0355604606628418,0.035587278289794924,"[0.035085376739501954, 0.03483337783813477, 0.03515129470825195, 0.0355939826965332, 0.035519489288330076, 0.03450864028930664, 0.03451801681518555, 0.03488297653198242, 0.03438310241699219, 0.03525439834594726]",tokens/s,7319.918796064474,kWh,8.555630113025822e-07,9.435329168395995e-08,5.256635029677828e-07,1.4755798059543247e-06,tokens/kWh,173491124.61893114,MB,1502.16704,1421.737984,0.0,1004.535808,949.240832,s,10,17.58772900390625,1.7587729003906252,0.014309907393559867,1.7535071411132814,1.7797772338867188,1.7799074768066405,1.780011671142578,"[1.7414859619140626, 1.7516700439453126, 1.7760692138671874, 1.7800377197265624, 1.75534423828125, 1.74103076171875, 1.7500540771484374, 1.76288037109375, 1.7494083251953125, 1.779748291015625]",tokens/s,35.82042911055068,kWh,3.7213702561613084e-05,4.104216324588106e-06,1.5281126920232873e-05,5.659904580643406e-05,tokens/kWh,1113092.9700733274,,s,630,17.585252761840813,0.027913099621969556,0.0005253528450554025,0.027864671707153323,0.028449827957153322,0.028616238594055175,0.029959557762146004,"[0.027993919372558594, 0.027707136154174805, 0.02775904083251953, 0.027623424530029295, 0.027604991912841798, 0.027577760696411133, 0.027920064926147462, 0.02764687919616699, 0.02768403244018555, 0.0276693115234375, 0.02767395210266113, 0.027678752899169923, 0.027896448135375975, 0.027696767807006837, 0.027701631546020507, 0.02772921562194824, 0.028506175994873047, 0.028480127334594728, 0.02822889518737793, 0.028449504852294923, 0.027875328063964845, 0.028096511840820314, 0.027810815811157227, 0.02803993606567383, 0.02788991928100586, 0.028103967666625977, 0.027929311752319337, 0.027885087966918944, 0.028112991333007813, 0.028617088317871093, 0.027934207916259765, 0.02742937660217285, 0.0272992000579834, 0.027703935623168946, 0.027608831405639647, 0.02789606475830078, 0.027678720474243163, 0.02774220848083496, 0.027409599304199218, 0.027263551712036132, 0.02734467124938965, 0.02734886360168457, 0.027380319595336915, 0.02716057586669922, 0.02712918472290039, 0.02734707260131836, 0.027117631912231446, 0.027101663589477538, 0.027149408340454102, 0.027163135528564454, 0.027203744888305664, 0.02753561592102051, 0.02750070381164551, 0.027371360778808595, 0.027340799331665038, 0.027207616806030274, 0.02742851257324219, 0.027199392318725587, 0.027275423049926757, 0.027382368087768554, 0.027305728912353517, 0.027502016067504884, 0.027222591400146483, 0.027187488555908204, 0.027778112411499023, 0.027245439529418946, 0.027362464904785156, 0.027118783950805664, 0.027061983108520506, 0.02710483169555664, 0.02713209533691406, 0.026970367431640625, 0.027030912399291993, 0.02742255973815918, 0.02727510452270508, 0.027077568054199218, 0.02732784080505371, 0.02741321563720703, 0.027285215377807617, 0.027285728454589844, 0.027237951278686525, 0.027113248825073242, 0.02714067268371582, 0.027175008773803713, 0.027348991394042968, 0.027441343307495116, 0.02722969627380371, 0.027105600357055663, 0.027670495986938475, 0.027207040786743165, 0.027329120635986328, 0.03029408073425293, 0.027883167266845702, 0.027612735748291015, 0.027648319244384767, 0.027903839111328124, 0.027742847442626953, 0.027800735473632814, 0.028267360687255858, 0.028178112030029297, 0.02841779136657715, 0.029064992904663085, 0.02893084716796875, 0.02957107162475586, 0.028497919082641602, 0.028291072845458985, 0.02853193664550781, 0.028427040100097656, 0.028388799667358397, 0.028452735900878906, 0.02847567939758301, 0.028452959060668945, 0.028039487838745117, 0.027955039978027344, 0.027974912643432617, 0.027956127166748047, 0.02791164779663086, 0.02792019271850586, 0.02780022430419922, 0.02781804847717285, 0.02775654411315918, 0.028073984146118162, 0.02779702377319336, 0.0279003849029541, 0.027735584259033202, 0.02885478401184082, 0.027784767150878905, 0.027712383270263673, 0.027758079528808592, 0.02783292770385742, 0.028278751373291014, 0.027962528228759765, 0.028310400009155273, 0.028041215896606447, 0.028092416763305664, 0.02842844772338867, 0.029219839096069337, 0.028148384094238282, 0.028145856857299804, 0.02817638397216797, 0.02860032081604004, 0.028479040145874022, 0.0285413761138916, 0.02845699119567871, 0.02857980728149414, 0.028374624252319337, 0.028530464172363282, 0.028508800506591797, 0.028073728561401366, 0.02825587272644043, 0.02789344024658203, 0.02832009506225586, 0.027992671966552734, 0.027873056411743164, 0.028100032806396485, 0.027904800415039063, 0.02778848075866699, 0.028070560455322267, 0.02780931282043457, 0.027748992919921875, 0.028231456756591798, 0.028411935806274415, 0.027844480514526367, 0.02786911964416504, 0.027909952163696287, 0.02801273536682129, 0.028032928466796874, 0.027804128646850584, 0.027805696487426756, 0.027984128952026368, 0.027850591659545898, 0.028288000106811522, 0.02806399917602539, 0.02819715118408203, 0.028059392929077148, 0.02808310317993164, 0.028020448684692383, 0.028214847564697266, 0.028341791152954102, 0.028447647094726563, 0.02834636878967285, 0.02854911994934082, 0.02870681571960449, 0.028794527053833008, 0.028501728057861327, 0.02857638359069824, 0.02835171127319336, 0.02848204803466797, 0.02822083282470703, 0.028351200103759765, 0.02818771171569824, 0.02800147247314453, 0.028296319961547852, 0.028054079055786132, 0.028247615814208985, 0.028207616806030275, 0.02818662452697754, 0.02792448043823242, 0.027868511199951172, 0.027924736022949218, 0.027873983383178712, 0.028382688522338866, 0.028449024200439453, 0.028132768630981447, 0.028064224243164064, 0.028052928924560547, 0.028862720489501954, 0.028239360809326174, 0.027902816772460936, 0.028045375823974608, 0.028165504455566405, 0.02802140808105469, 0.028203008651733398, 0.028203008651733398, 0.028241567611694336, 0.028161632537841798, 0.028535776138305664, 0.029062944412231444, 0.02853273582458496, 0.028409311294555664, 0.028254304885864258, 0.02832649612426758, 0.028362592697143554, 0.028347936630249024, 0.028467679977416994, 0.02855731201171875, 0.028292160034179687, 0.028095264434814454, 0.028217504501342774, 0.030248960494995116, 0.029665279388427734, 0.028116960525512696, 0.028481504440307618, 0.02790153694152832, 0.02808470344543457, 0.028867967605590822, 0.028070528030395506, 0.028030975341796875, 0.028078367233276367, 0.027966400146484376, 0.02804374313354492, 0.028311647415161133, 0.028366655349731446, 0.028241823196411133, 0.02798028755187988, 0.028056768417358397, 0.027976512908935547, 0.027867136001586915, 0.027800832748413086, 0.02802560043334961, 0.027865087509155274, 0.027981151580810548, 0.02844700813293457, 0.02836992073059082, 0.028320480346679687, 0.028230688095092774, 0.028101600646972657, 0.028182559967041016, 0.028251775741577147, 0.028125343322753907, 0.028327968597412108, 0.028272224426269532, 0.02836128044128418, 0.029766687393188475, 0.029630624771118164, 0.02863577651977539, 0.02811087989807129, 0.028125343322753907, 0.028004352569580077, 0.028229631423950196, 0.027910144805908203, 0.028303136825561525, 0.028470943450927735, 0.027914751052856446, 0.02801446342468262, 0.027951295852661134, 0.028395519256591797, 0.02801161575317383, 0.028072799682617187, 0.028180639266967775, 0.028024288177490236, 0.02788761520385742, 0.02774003219604492, 0.02796121597290039, 0.027651872634887695, 0.027896223068237306, 0.02781439971923828, 0.028240991592407227, 0.02767660713195801, 0.027726816177368163, 0.0279466552734375, 0.02764771270751953, 0.02736137580871582, 0.02723484802246094, 0.02728473663330078, 0.027237119674682616, 0.02736128044128418, 0.027440736770629883, 0.027124128341674804, 0.027167903900146485, 0.027165536880493165, 0.027256288528442384, 0.027134687423706054, 0.027300895690917967, 0.02729792022705078, 0.02743510437011719, 0.027441728591918946, 0.027662111282348634, 0.027389663696289063, 0.027396608352661132, 0.02721177673339844, 0.027364479064941407, 0.027236736297607422, 0.027392511367797853, 0.027299327850341795, 0.0273305606842041, 0.027322368621826174, 0.027340799331665038, 0.027265024185180665, 0.02731772804260254, 0.027396543502807617, 0.02751456069946289, 0.02765158462524414, 0.027314592361450195, 0.02751535987854004, 0.027430944442749024, 0.02794268798828125, 0.027654367446899412, 0.0279300479888916, 0.027882144927978515, 0.027977088928222656, 0.027783231735229494, 0.031492191314697264, 0.028143999099731445, 0.027894880294799803, 0.028187551498413087, 0.027690208435058594, 0.027796255111694337, 0.027572223663330078, 0.027798688888549805, 0.027937631607055664, 0.027666431427001953, 0.027877376556396483, 0.027954463958740235, 0.027714271545410157, 0.027830047607421873, 0.027461856842041017, 0.02750985527038574, 0.027657119750976563, 0.027730016708374022, 0.027668384552001952, 0.027502592086791993, 0.027684064865112306, 0.02794576072692871, 0.027711488723754882, 0.027737247467041017, 0.02744563293457031, 0.02735487937927246, 0.027447296142578126, 0.02722006416320801, 0.027152992248535155, 0.027187360763549804, 0.027230079650878907, 0.02726911926269531, 0.027410432815551757, 0.027251743316650392, 0.02721686363220215, 0.027082496643066407, 0.02714035224914551, 0.027335807800292967, 0.027302783966064455, 0.02792038345336914, 0.027421695709228516, 0.027775999069213866, 0.027379711151123046, 0.027203264236450194, 0.02750454330444336, 0.027767423629760743, 0.027615232467651366, 0.027698976516723633, 0.027717632293701173, 0.02757040023803711, 0.027665855407714844, 0.02806227111816406, 0.027744064331054686, 0.027715232849121092, 0.02782467269897461, 0.027620960235595703, 0.027719968795776366, 0.027582592010498046, 0.027594751358032226, 0.027578367233276366, 0.027717632293701173, 0.027844608306884764, 0.027672767639160156, 0.027624288558959962, 0.02757526397705078, 0.02769715118408203, 0.02755583953857422, 0.02762678337097168, 0.028141504287719728, 0.028145471572875978, 0.027673791885375977, 0.027690784454345703, 0.02775040054321289, 0.02783171272277832, 0.02829955291748047, 0.027637632369995117, 0.02785228729248047, 0.02782489585876465, 0.02793199920654297, 0.027962303161621092, 0.027908000946044922, 0.02803875160217285, 0.02784009552001953, 0.02790073585510254, 0.027674623489379883, 0.02751692771911621, 0.02769273567199707, 0.02775196838378906, 0.028338623046875, 0.027934431076049804, 0.027836448669433595, 0.027828832626342774, 0.027673824310302735, 0.0277860164642334, 0.02775593566894531, 0.027716192245483398, 0.027665407180786132, 0.027614208221435548, 0.02774963188171387, 0.02778009605407715, 0.027638879776000977, 0.027729568481445314, 0.02760601615905762, 0.027596799850463868, 0.02794905662536621, 0.028231136322021483, 0.027863584518432617, 0.02772687911987305, 0.027697856903076173, 0.02776473617553711, 0.030619104385375975, 0.027863744735717774, 0.02801033592224121, 0.02792588806152344, 0.027961984634399414, 0.02781123161315918, 0.02798396873474121, 0.027843008041381834, 0.028610624313354493, 0.02777497673034668, 0.027885568618774413, 0.027801631927490234, 0.02782169532775879, 0.028403615951538085, 0.02803321647644043, 0.02788083267211914, 0.027827072143554687, 0.027708831787109374, 0.027774816513061525, 0.027859872817993164, 0.027719520568847657, 0.027673919677734374, 0.02792518424987793, 0.02778678321838379, 0.027738048553466798, 0.027733695983886718, 0.027980640411376954, 0.027648000717163085, 0.02775359916687012, 0.02794175910949707, 0.02775654411315918, 0.02806755256652832, 0.02803536033630371, 0.029105344772338868, 0.027928703308105467, 0.027637887954711914, 0.027542335510253906, 0.02763132858276367, 0.028170272827148436, 0.030996320724487304, 0.028483743667602538, 0.02785411262512207, 0.028299999237060548, 0.027836416244506838, 0.027864255905151368, 0.027841087341308593, 0.027771135330200196, 0.027653535842895507, 0.027830879211425782, 0.028520448684692383, 0.027994144439697267, 0.027991231918334962, 0.027828031539916993, 0.02783535957336426, 0.027842559814453126, 0.02771958351135254, 0.027674720764160155, 0.0277258243560791, 0.027752447128295898, 0.027711488723754882, 0.027399871826171877, 0.02730006408691406, 0.027394048690795897, 0.027312128067016602, 0.027262432098388672, 0.027916479110717773, 0.027328863143920898, 0.027707391738891602, 0.02765779113769531, 0.028074432373046875, 0.027881471633911133, 0.027815359115600586, 0.027634239196777342, 0.02759440040588379, 0.02789321517944336, 0.027661184310913085, 0.02757427215576172, 0.027580064773559572, 0.027578271865844727, 0.027543359756469727, 0.0275599365234375, 0.02758428764343262, 0.027947872161865235, 0.02778112030029297, 0.027910432815551757, 0.02798348808288574, 0.02778041648864746, 0.027920576095581056, 0.027828832626342774, 0.027889663696289063, 0.027807743072509765, 0.02791219139099121, 0.027694847106933592, 0.027801151275634765, 0.027696992874145506, 0.027904863357543944, 0.027696416854858397, 0.027914976119995116, 0.027778112411499023, 0.027867935180664063, 0.027791040420532227, 0.02791881561279297, 0.027772607803344725, 0.027936159133911134, 0.028008575439453124, 0.02818076705932617, 0.027980031967163085, 0.027779327392578126, 0.027677791595458984, 0.027590944290161134, 0.02742927932739258, 0.027358943939208985, 0.02742323112487793, 0.02795315170288086, 0.02759065628051758, 0.027805696487426756, 0.027691007614135742, 0.02793267250061035, 0.027809791564941407, 0.02790755271911621, 0.02780828857421875, 0.028100608825683594, 0.027939935684204102, 0.028105791091918946, 0.027999103546142576, 0.028047359466552735, 0.027962591171264647, 0.028123455047607424, 0.02783020782470703, 0.027637535095214844, 0.02775948715209961, 0.027592416763305663, 0.02765020751953125, 0.027586559295654296, 0.027639711380004883, 0.02755183982849121, 0.027596799850463868, 0.027467775344848632, 0.02749849510192871, 0.02938275146484375, 0.028225439071655273, 0.02796659278869629, 0.02788150405883789, 0.027779935836791992, 0.027883295059204102, 0.02790403175354004, 0.027860671997070312, 0.028099071502685546, 0.028438528060913085, 0.02833558464050293, 0.028543071746826174, 0.028420543670654295, 0.028337631225585937, 0.02861520004272461, 0.028641151428222655, 0.02865951919555664, 0.028494144439697267, 0.02863532829284668, 0.028256063461303712, 0.02834774398803711, 0.028354911804199218, 0.02861292839050293, 0.02832758331298828, 0.02895292854309082, 0.029161088943481444, 0.028442975997924804, 0.028233760833740233, 0.028107967376708985, 0.02837945556640625, 0.028272192001342775, 0.028138431549072265, 0.028117151260375978, 0.028229087829589845, 0.02878892707824707, 0.03170291137695312, 0.030038335800170898, 0.028227584838867188, 0.028014495849609376, 0.028026847839355468, 0.028000127792358397, 0.027883359909057617, 0.027856319427490235, 0.028637567520141603, 0.02861942481994629, 0.027985343933105467, 0.02801100730895996, 0.02791628837585449, 0.027807743072509765]",tokens/s,35.8254731127363,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 352890 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 353469 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1846.5792,2768.109568,0.0,2365.587456,2314.318336,s,1,9.036365234375,9.036365234375,0.0,9.036365234375,9.036365234375,9.036365234375,9.036365234375,[9.036365234375],,kWh,5.618690720416453e-05,6.190405181315809e-06,1.7523347351999785e-05,7.990065973748012e-05,,MB,1898.520576,3099.459584,0.0,2682.257408,2607.60832,s,10,0.48352464675903317,0.048352464675903324,0.0003477804899576911,0.04826857757568359,0.048533411788940425,0.048943969154357905,0.04927241504669189,"[0.04935452651977539, 0.04826486587524414, 0.04807254409790039, 0.048272289276123044, 0.04824908828735352, 0.04844217681884765, 0.0482872314453125, 0.048120929718017576, 0.048173152923583984, 0.048287841796875]",tokens/s,5294.456067874008,kWh,1.484856418464715e-06,1.6375179276808438e-07,9.805789006497542e-07,2.6291871118825534e-06,tokens/kWh,97368497.98289883,MB,1898.520576,3099.459584,0.0,2682.257408,2607.61088,s,10,13.706199218750001,1.370619921875,0.010775047816095241,1.3732237548828126,1.3850309814453126,1.385641162109375,1.386129306640625,"[1.359454345703125, 1.376085693359375, 1.3724990234375, 1.3862513427734375, 1.373948486328125, 1.3848953857421875, 1.3637572021484374, 1.352936279296875, 1.3774613037109376, 1.35891015625]",tokens/s,45.964602582031915,kWh,3.923490884486323e-05,4.327100776787809e-06,2.002191012354968e-05,6.358391974520073e-05,tokens/kWh,990816.5500406288,,s,630,13.703950056076053,0.021752301676311193,0.00038698172565860715,0.02173595237731934,0.02205950412750244,0.02218456687927246,0.02328615364074707,"[0.022113216400146483, 0.021847488403320313, 0.021741695404052734, 0.02152620887756348, 0.021475488662719727, 0.021395647048950195, 0.021684032440185547, 0.02177903938293457, 0.021575679779052736, 0.02152448081970215, 0.022189376831054687, 0.02159401512145996, 0.021440927505493163, 0.02174208068847656, 0.02150592041015625, 0.021481184005737303, 0.021358528137207032, 0.02136016082763672, 0.021324607849121095, 0.02169036865234375, 0.021366687774658204, 0.0213668155670166, 0.02159212875366211, 0.02167353630065918, 0.021545024871826173, 0.02181158447265625, 0.021507871627807616, 0.021455072402954103, 0.021639167785644533, 0.02146892738342285, 0.021688575744628905, 0.021890687942504882, 0.021784608840942382, 0.021705055236816408, 0.021489664077758788, 0.02146406364440918, 0.02140585517883301, 0.021485824584960938, 0.021361087799072264, 0.02125555229187012, 0.02128348731994629, 0.02136793518066406, 0.021346368789672852, 0.021375936508178713, 0.02146713638305664, 0.021370975494384766, 0.021511743545532227, 0.021590368270874023, 0.021419103622436524, 0.02140611267089844, 0.021342144012451172, 0.021410367965698243, 0.02153267288208008, 0.021444063186645507, 0.02155120086669922, 0.02159222412109375, 0.022290367126464844, 0.02169625663757324, 0.022049375534057617, 0.021871871948242187, 0.021784959793090822, 0.021670143127441407, 0.02155708885192871, 0.021880895614624023, 0.021630975723266603, 0.0215882568359375, 0.02152988815307617, 0.021589824676513672, 0.021264415740966797, 0.02130156707763672, 0.021420320510864257, 0.02157788848876953, 0.02173936080932617, 0.0216463680267334, 0.02170159912109375, 0.02160358428955078, 0.021596799850463866, 0.02200998306274414, 0.02166169548034668, 0.02147737693786621, 0.0213338565826416, 0.021708959579467772, 0.021520191192626954, 0.021563583374023438, 0.021388864517211913, 0.02153228759765625, 0.021490495681762697, 0.021413888931274414, 0.02133225631713867, 0.021624544143676757, 0.02145715141296387, 0.02160111999511719, 0.021435264587402345, 0.021477407455444335, 0.02148524856567383, 0.02147974395751953, 0.02135980796813965, 0.021373695373535156, 0.021458911895751952, 0.021544160842895507, 0.022178688049316407, 0.02151571273803711, 0.02149782371520996, 0.022001632690429686, 0.023290496826171875, 0.021934080123901366, 0.021843967437744142, 0.02202828788757324, 0.021788576126098632, 0.02231100845336914, 0.02183123207092285, 0.023376319885253908, 0.025311040878295898, 0.02213497543334961, 0.022189823150634766, 0.025481056213378907, 0.021620351791381835, 0.02407846450805664, 0.021790815353393556, 0.021451328277587892, 0.021331968307495116, 0.02149580764770508, 0.021757951736450197, 0.021604352951049805, 0.021565216064453125, 0.021655647277832032, 0.022220895767211913, 0.021987360000610353, 0.021792032241821288, 0.02164192008972168, 0.021485055923461914, 0.021321407318115236, 0.021478208541870117, 0.021550912857055664, 0.02177039909362793, 0.02163100814819336, 0.02169046401977539, 0.02163907241821289, 0.021639167785644533, 0.021583871841430666, 0.02161836814880371, 0.02159823989868164, 0.021625120162963866, 0.021606399536132814, 0.022487039566040038, 0.021807104110717773, 0.021699647903442382, 0.021861312866210937, 0.02188697624206543, 0.021835199356079103, 0.02184454345703125, 0.021896383285522462, 0.021809343338012696, 0.021741695404052734, 0.0217271671295166, 0.02168275260925293, 0.02173084831237793, 0.021811679840087892, 0.02176790428161621, 0.021667680740356444, 0.021731807708740235, 0.021850080490112306, 0.02214297676086426, 0.022058464050292968, 0.02196944046020508, 0.021794815063476563, 0.021817312240600586, 0.021723167419433594, 0.021790847778320313, 0.021987199783325195, 0.022108160018920898, 0.021843551635742187, 0.021857791900634766, 0.021967775344848634, 0.021772287368774415, 0.021685472488403322, 0.0217423038482666, 0.021757535934448242, 0.0217640323638916, 0.02161033630371094, 0.021787328720092772, 0.021817344665527344, 0.02178793525695801, 0.021744287490844727, 0.0219169921875, 0.021811967849731446, 0.02176345634460449, 0.02177292823791504, 0.021760000228881835, 0.02267225646972656, 0.02319548797607422, 0.022157312393188477, 0.02213408088684082, 0.02198940849304199, 0.02187868881225586, 0.022054655075073242, 0.022266431808471678, 0.022024576187133788, 0.0217825927734375, 0.021893119812011717, 0.022142080307006835, 0.02184441566467285, 0.021795263290405275, 0.021741472244262695, 0.021828927993774415, 0.021928735733032226, 0.02170863914489746, 0.021809312820434572, 0.02187161636352539, 0.021752832412719726, 0.02185740852355957, 0.021852319717407226, 0.021889759063720704, 0.022147071838378905, 0.022116352081298828, 0.022144128799438476, 0.022041343688964845, 0.021997312545776367, 0.021926271438598634, 0.02188287925720215, 0.021931232452392577, 0.02183247947692871, 0.021792543411254882, 0.02208188819885254, 0.021955808639526366, 0.021880960464477538, 0.021770784378051758, 0.02185539245605469, 0.021836671829223633, 0.022155231475830078, 0.02184124755859375, 0.0218753604888916, 0.021764095306396485, 0.022568063735961916, 0.022780351638793946, 0.022839744567871093, 0.02205286407470703, 0.02191564750671387, 0.02190937614440918, 0.021950592041015626, 0.021843967437744142, 0.02203647994995117, 0.021873695373535156, 0.021896160125732422, 0.021821407318115233, 0.02189014434814453, 0.02193027114868164, 0.021974815368652343, 0.021952447891235353, 0.022079999923706056, 0.021768096923828126, 0.022030879974365234, 0.022345727920532226, 0.022107391357421874, 0.022027008056640623, 0.021941728591918945, 0.021836256027221678, 0.02177030372619629, 0.02183193588256836, 0.021939968109130858, 0.02188630485534668, 0.021770591735839843, 0.02189958381652832, 0.022007295608520508, 0.021850624084472657, 0.021774335861206053, 0.021882207870483398, 0.021962432861328124, 0.021905471801757812, 0.021838752746582032, 0.021833728790283204, 0.021831680297851562, 0.021733375549316408, 0.021731103897094727, 0.021780704498291014, 0.021587520599365233, 0.021322175979614257, 0.021784576416015625, 0.022792192459106447, 0.023480319976806642, 0.02150601577758789, 0.021346336364746095, 0.021358591079711914, 0.021348352432250976, 0.021327871322631836, 0.021547008514404296, 0.021589536666870118, 0.02161097526550293, 0.02170591926574707, 0.021949247360229494, 0.021703744888305666, 0.021541536331176756, 0.021635360717773437, 0.02166783905029297, 0.021950464248657226, 0.021691455841064453, 0.021668575286865235, 0.02165782356262207, 0.02171238327026367, 0.02176870346069336, 0.0216494083404541, 0.021698463439941407, 0.0218768310546875, 0.021832927703857422, 0.021756704330444337, 0.02162816047668457, 0.021785343170166015, 0.022032159805297852, 0.021858528137207033, 0.021786624908447266, 0.021812864303588867, 0.021899200439453124, 0.021762208938598634, 0.021700416564941406, 0.021862911224365233, 0.022339679718017577, 0.021975648880004882, 0.022191551208496092, 0.022165983200073243, 0.02191548728942871, 0.021868831634521486, 0.02187414360046387, 0.021846176147460938, 0.021784128189086913, 0.02170150375366211, 0.02170675277709961, 0.022673408508300782, 0.021925888061523437, 0.021803007125854493, 0.021898815155029297, 0.02197110366821289, 0.02198944091796875, 0.021792800903320312, 0.021921983718872072, 0.02180019187927246, 0.02171500778198242, 0.021869056701660155, 0.022068864822387697, 0.021942047119140624, 0.022038719177246095, 0.02196131134033203, 0.02213478469848633, 0.022033824920654296, 0.02193164825439453, 0.02195964813232422, 0.02197699165344238, 0.021977439880371093, 0.021862112045288085, 0.021907487869262696, 0.021984800338745118, 0.021932287216186522, 0.022001888275146483, 0.02202828788757324, 0.022037599563598635, 0.021942655563354493, 0.02189523124694824, 0.021840351104736328, 0.02189030456542969, 0.02200150489807129, 0.021864799499511717, 0.02180726432800293, 0.02202822494506836, 0.021942432403564454, 0.02194576072692871, 0.022332319259643553, 0.02327552032470703, 0.02210358428955078, 0.022129119873046874, 0.022108160018920898, 0.02194396781921387, 0.021948768615722657, 0.021897216796875, 0.022040576934814454, 0.022024192810058595, 0.021977088928222657, 0.02172313690185547, 0.021704704284667968, 0.021784576416015625, 0.02228486442565918, 0.022071296691894532, 0.0221265926361084, 0.022103935241699218, 0.02204649543762207, 0.02178492736816406, 0.02189833641052246, 0.02177305603027344, 0.021874048233032226, 0.02167046356201172, 0.02180512046813965, 0.021911712646484376, 0.021716543197631836, 0.02171129608154297, 0.02177872085571289, 0.021934911727905272, 0.021728160858154297, 0.021731456756591796, 0.021692287445068358, 0.021858047485351563, 0.021780736923217775, 0.02166374397277832, 0.021755712509155273, 0.02176630401611328, 0.02165353584289551, 0.021562400817871093, 0.02171388816833496, 0.02143027114868164, 0.021374975204467773, 0.02126643180847168, 0.021567520141601564, 0.021467103958129882, 0.02137606430053711, 0.02143942451477051, 0.021342079162597657, 0.021622432708740234, 0.02148121643066406, 0.021607135772705077, 0.021505184173583984, 0.021606815338134765, 0.021805503845214843, 0.02160767936706543, 0.021561376571655272, 0.02154364776611328, 0.021448703765869142, 0.02137638473510742, 0.02144895935058594, 0.021465471267700195, 0.021575679779052736, 0.021274560928344725, 0.02138323211669922, 0.021546207427978515, 0.02137107276916504, 0.021258495330810548, 0.02210646438598633, 0.02153023910522461, 0.021516511917114258, 0.02161065673828125, 0.021554399490356445, 0.02161267280578613, 0.021481695175170897, 0.021497631072998048, 0.02149033546447754, 0.021733024597167968, 0.021656448364257813, 0.021641088485717774, 0.02155465507507324, 0.021490207672119142, 0.021474720001220703, 0.021463647842407226, 0.021525632858276366, 0.02156224060058594, 0.021831680297851562, 0.02177964782714844, 0.021539648056030272, 0.021391359329223633, 0.021364736557006835, 0.021522432327270507, 0.021598207473754884, 0.021538816452026367, 0.021362688064575194, 0.021481472015380858, 0.021468576431274415, 0.021344064712524414, 0.02149238395690918, 0.021493888854980468, 0.021432191848754882, 0.02138934326171875, 0.021987424850463868, 0.021657791137695313, 0.021700416564941406, 0.021602304458618164, 0.021331968307495116, 0.02137014389038086, 0.0215817928314209, 0.021410560607910155, 0.021368831634521485, 0.021309440612792968, 0.021493759155273438, 0.021263935089111327, 0.021410240173339843, 0.021399551391601563, 0.021449823379516602, 0.02150432014465332, 0.021281375885009765, 0.02140105628967285, 0.021383712768554688, 0.021347360610961916, 0.02125632095336914, 0.02147190475463867, 0.021403839111328125, 0.02127257537841797, 0.021483327865600584, 0.021546207427978515, 0.021508960723876952, 0.02139967918395996, 0.021321727752685548, 0.021407743453979493, 0.02142617607116699, 0.021468832015991212, 0.021340511322021485, 0.021440511703491212, 0.021415935516357423, 0.021431423187255858, 0.021441312789916993, 0.02129929542541504, 0.021865215301513672, 0.021549087524414062, 0.02187868881225586, 0.02158758354187012, 0.02158016014099121, 0.022040416717529297, 0.021681983947753905, 0.0214814395904541, 0.021481855392456055, 0.021415647506713868, 0.021596128463745118, 0.021593984603881837, 0.021856704711914064, 0.0215897274017334, 0.021627456665039062, 0.021655263900756835, 0.021704704284667968, 0.021532127380371094, 0.021784223556518555, 0.02189299201965332, 0.021816320419311523, 0.021952512741088868, 0.021805055618286134, 0.021987327575683592, 0.021798912048339843, 0.021741535186767576, 0.02201935958862305, 0.02189516830444336, 0.02190617561340332, 0.021909183502197265, 0.022010175704956055, 0.02203830337524414, 0.022242624282836913, 0.02203126335144043, 0.02206924819946289, 0.02211840057373047, 0.021856000900268555, 0.02194047927856445, 0.02191702461242676, 0.021805728912353516, 0.021851327896118163, 0.021794815063476563, 0.022108352661132813, 0.02333555221557617, 0.02200124740600586, 0.021759904861450196, 0.02176438331604004, 0.021584096908569335, 0.021825023651123047, 0.02156729507446289, 0.021649280548095704, 0.021608800888061525, 0.022323488235473633, 0.021681663513183593, 0.021723648071289063, 0.021819583892822264, 0.021608448028564452, 0.022855680465698244, 0.022665216445922853, 0.021786624908447266, 0.021866079330444335, 0.02173583984375, 0.02204876708984375, 0.02254198455810547, 0.022151840209960937, 0.021970943450927736, 0.022011775970458985, 0.021886911392211914, 0.02177452850341797, 0.022042623519897463, 0.02189516830444336, 0.02213478469848633, 0.021888959884643556, 0.021878463745117187, 0.021909759521484374, 0.021761760711669922, 0.02175222396850586, 0.02190336036682129, 0.021871904373168945, 0.021736064910888673, 0.02174166488647461, 0.021548479080200196, 0.021510976791381836, 0.02157542419433594, 0.021407743453979493, 0.021522432327270507, 0.02184579277038574, 0.021383487701416015, 0.021511520385742187, 0.02141971206665039, 0.021407743453979493, 0.021452768325805664, 0.021328800201416014, 0.021307392120361326, 0.02147737693786621, 0.021329919815063478, 0.021360639572143555, 0.021403167724609377, 0.021352415084838868, 0.021305055618286134, 0.02138175964355469, 0.02141814422607422, 0.02184601593017578, 0.021569536209106444, 0.02128099250793457, 0.021372095108032226, 0.021475936889648436, 0.02126438331604004, 0.021395456314086913, 0.02130646324157715, 0.021375616073608397, 0.021270015716552734, 0.02126313591003418, 0.02132156753540039, 0.021452959060668946, 0.021331968307495116, 0.021200063705444337, 0.022085664749145507, 0.02141263961791992, 0.0212807674407959, 0.02122444725036621, 0.02125312042236328, 0.021364736557006835, 0.021311487197875977, 0.02130121612548828, 0.02163871955871582]",tokens/s,45.97214652870622,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1174.13888,1109.262336,0.0,706.740224,681.6384,s,1,8.08989892578125,8.08989892578125,0.0,8.08989892578125,8.08989892578125,8.08989892578125,8.08989892578125,[8.08989892578125],,kWh,3.102778032082369e-05,3.4153921805478033e-06,9.87973012597787e-06,4.432290262734936e-05,,MB,1535.066112,1413.349376,0.0,996.1472,949.238272,s,10,0.31854323005676266,0.03185432300567627,0.002472548886372582,0.030983920097351077,0.032472121429443356,0.03585321197509765,0.0385580844116211,"[0.031720767974853514, 0.039234302520751954, 0.031104320526123046, 0.03102582359313965, 0.030994176864624023, 0.030785856246948243, 0.03096700859069824, 0.030973663330078127, 0.03077731132507324, 0.030959999084472656]",tokens/s,8036.585802008166,kWh,9.126173521354274e-07,1.0064308611548481e-07,5.729535833624323e-07,1.5862140216133447e-06,tokens/kWh,161390579.3996332,MB,1568.722944,1423.835136,0.0,1006.63296,949.240832,s,10,15.38084375,1.538084375,0.0555053323866767,1.5145836791992187,1.5985275512695312,1.6442640197753906,1.6808531945800782,"[1.5883638916015625, 1.69000048828125, 1.5092664794921875, 1.508010009765625, 1.5152440185546876, 1.51392333984375, 1.512622314453125, 1.5093424072265624, 1.5177325439453124, 1.5163382568359376]",tokens/s,40.960041610201,kWh,4.8426861233699076e-05,5.341145395471431e-06,1.8901839774239284e-05,7.26698464034098e-05,tokens/kWh,866934.5418768345,,s,630,15.378701023101828,0.02441063654460604,0.0017473389796751598,0.02392523193359375,0.02450701103210449,0.029884153652191152,0.03190432140350342,"[0.024265663146972656, 0.02428927993774414, 0.024591360092163086, 0.02390323257446289, 0.023883775711059572, 0.02391769599914551, 0.023864191055297853, 0.023863296508789062, 0.023937023162841797, 0.024133087158203125, 0.023974111557006836, 0.024223072052001953, 0.02394211196899414, 0.02405990409851074, 0.02397920036315918, 0.023853504180908203, 0.023927135467529295, 0.023978015899658204, 0.023963647842407225, 0.023910400390625, 0.0238919677734375, 0.0238919677734375, 0.024436735153198243, 0.024303615570068358, 0.02403055953979492, 0.023990943908691408, 0.02415542411804199, 0.023904991149902344, 0.024164287567138672, 0.02397100830078125, 0.02391155242919922, 0.023995199203491212, 0.024101823806762696, 0.024001728057861327, 0.023939903259277345, 0.02387139129638672, 0.023848543167114256, 0.02387388801574707, 0.024029375076293946, 0.023827743530273438, 0.024133567810058595, 0.02407222366333008, 0.024097503662109374, 0.02405580711364746, 0.024002559661865236, 0.024057695388793945, 0.02400668716430664, 0.024029184341430664, 0.02391872024536133, 0.023975936889648438, 0.02395356750488281, 0.024039264678955077, 0.024084224700927734, 0.029773696899414063, 0.031717279434204104, 0.031771392822265626, 0.031771839141845705, 0.031709728240966795, 0.03168220710754394, 0.031668031692504886, 0.03167900848388672, 0.03175833511352539, 0.031596256256103517, 0.03155580711364746, 0.031887168884277346, 0.031462528228759765, 0.03152508735656738, 0.031406719207763674, 0.03155532836914063, 0.0314483528137207, 0.03206460952758789, 0.03159187126159668, 0.031529407501220706, 0.0315570240020752, 0.03164630317687988, 0.031643648147583005, 0.03159244728088379, 0.031366559982299806, 0.03140873527526856, 0.031911327362060544, 0.032248001098632816, 0.032093727111816406, 0.032121185302734376, 0.03221148681640625, 0.03214950561523437, 0.02997452735900879, 0.02409676742553711, 0.024131839752197265, 0.023906047821044923, 0.02386105537414551, 0.02381843185424805, 0.02395110321044922, 0.023961856842041017, 0.02386534309387207, 0.02395136070251465, 0.02397750473022461, 0.023879487991333007, 0.026251935958862306, 0.025161727905273438, 0.02390630340576172, 0.02395955276489258, 0.023889120101928712, 0.023978784561157228, 0.023875167846679687, 0.024652191162109375, 0.025300064086914063, 0.02384783935546875, 0.02391811180114746, 0.02394563293457031, 0.023863359451293944, 0.02386534309387207, 0.02391801643371582, 0.02397145652770996, 0.024025184631347656, 0.023827295303344726, 0.023903583526611327, 0.023796384811401367, 0.02369945526123047, 0.023656160354614257, 0.023904544830322266, 0.023759008407592774, 0.02379724884033203, 0.02427120018005371, 0.02389756774902344, 0.023783008575439454, 0.023819263458251954, 0.0240120964050293, 0.02381862449645996, 0.023783744812011717, 0.023752927780151367, 0.02386288070678711, 0.02380348777770996, 0.02379427146911621, 0.023851007461547852, 0.02392412757873535, 0.023925344467163087, 0.023836671829223634, 0.024184831619262694, 0.02429702377319336, 0.025004159927368163, 0.02395359992980957, 0.023990400314331056, 0.023977983474731446, 0.023834400177001953, 0.02413590431213379, 0.023810047149658203, 0.02392064094543457, 0.024147968292236328, 0.02469660758972168, 0.02395683288574219, 0.02391075134277344, 0.0238573112487793, 0.024217151641845704, 0.02400499153137207, 0.023901920318603515, 0.024064096450805664, 0.02394537544250488, 0.024002368927001954, 0.02383647918701172, 0.023900672912597655, 0.023849376678466795, 0.023970880508422852, 0.023737279891967774, 0.023918399810791014, 0.023799936294555665, 0.024031488418579102, 0.023789312362670897, 0.02390425682067871, 0.02393087959289551, 0.024016895294189454, 0.02395267105102539, 0.023835487365722656, 0.02391539192199707, 0.02385817527770996, 0.023874656677246094, 0.023829408645629883, 0.023893888473510743, 0.023997983932495116, 0.023939680099487305, 0.0238919677734375, 0.023828031539916993, 0.02408083152770996, 0.02385651206970215, 0.023814144134521483, 0.023898399353027344, 0.023979999542236327, 0.023986560821533203, 0.02389811134338379, 0.023873151779174803, 0.023885055541992186, 0.023841632843017577, 0.023764991760253908, 0.023850656509399413, 0.02414124870300293, 0.023943872451782228, 0.023838943481445312, 0.023887712478637694, 0.023860544204711915, 0.023836896896362304, 0.023865760803222655, 0.023812320709228514, 0.024219200134277342, 0.023967552185058593, 0.023999103546142576, 0.023953407287597657, 0.023874752044677733, 0.023869632720947265, 0.023853696823120118, 0.023977983474731446, 0.023969343185424805, 0.024125888824462892, 0.0240883846282959, 0.023930335998535158, 0.023937759399414064, 0.0239554557800293, 0.023967744827270508, 0.024048864364624025, 0.024410911560058594, 0.025051136016845704, 0.02417804718017578, 0.024146591186523438, 0.024279008865356444, 0.02390163230895996, 0.023875520706176757, 0.02381273651123047, 0.023908287048339843, 0.023959775924682618, 0.02397987174987793, 0.023870559692382814, 0.0239401912689209, 0.024012607574462892, 0.02389811134338379, 0.023840608596801757, 0.02378563117980957, 0.023830528259277343, 0.02397558403015137, 0.02390870475769043, 0.023870784759521483, 0.023761600494384767, 0.02380303955078125, 0.02378428840637207, 0.023789567947387694, 0.023858720779418946, 0.02382464027404785, 0.02374870491027832, 0.023741664886474608, 0.023847135543823242, 0.023808319091796874, 0.023773248672485352, 0.023765216827392577, 0.02370915222167969, 0.02388435173034668, 0.023916383743286133, 0.02388582420349121, 0.02381318473815918, 0.023850976943969728, 0.02379465675354004, 0.023895423889160158, 0.024009344100952148, 0.023908351898193358, 0.023884992599487304, 0.0238907527923584, 0.02393824005126953, 0.02403536033630371, 0.02405855941772461, 0.02399001693725586, 0.02400265693664551, 0.02401417541503906, 0.024049823760986327, 0.024054527282714844, 0.024130783081054687, 0.023945951461791994, 0.023995744705200196, 0.024001087188720703, 0.024045087814331054, 0.024306304931640627, 0.02407151985168457, 0.02405407905578613, 0.02408041572570801, 0.024035776138305664, 0.024057727813720703, 0.02405295944213867, 0.023984928131103516, 0.023975936889648438, 0.02402639961242676, 0.024552160263061524, 0.023946399688720702, 0.023941984176635744, 0.024071903228759767, 0.024481184005737306, 0.02427788734436035, 0.02410495948791504, 0.024205184936523436, 0.02444816017150879, 0.02452374458312988, 0.02429952049255371, 0.024254016876220703, 0.02409926414489746, 0.02409062385559082, 0.024002559661865236, 0.023852031707763673, 0.02402799987792969, 0.02388803291320801, 0.024940256118774415, 0.0237857608795166, 0.023882911682128905, 0.023826656341552736, 0.023879743576049803, 0.023832351684570312, 0.023890623092651365, 0.023865440368652343, 0.023797760009765623, 0.024252416610717774, 0.023848960876464844, 0.024412160873413087, 0.024151935577392578, 0.023816192626953125, 0.023784671783447266, 0.023800607681274413, 0.02384649658203125, 0.023839136123657227, 0.027742048263549805, 0.025999519348144533, 0.024276895523071287, 0.023881824493408203, 0.023855104446411132, 0.023869056701660157, 0.024076000213623047, 0.02382054328918457, 0.023994720458984375, 0.023871551513671874, 0.024057855606079103, 0.023834623336791993, 0.023876863479614256, 0.024295200347900392, 0.024384479522705078, 0.024184831619262694, 0.024157760620117187, 0.02397612762451172, 0.023887519836425782, 0.023947872161865235, 0.023755840301513672, 0.02383148765563965, 0.02393907165527344, 0.024051328659057618, 0.023925119400024415, 0.023842464447021483, 0.023901983261108397, 0.024240703582763673, 0.02394060707092285, 0.02385971260070801, 0.023962623596191408, 0.024146944046020507, 0.02393087959289551, 0.02378748893737793, 0.02386128044128418, 0.023815839767456055, 0.024026912689208986, 0.02382912063598633, 0.023941055297851562, 0.023901920318603515, 0.023951648712158203, 0.02387459182739258, 0.023862239837646484, 0.023805952072143553, 0.023832576751708984, 0.023717952728271485, 0.023830463409423828, 0.023842975616455077, 0.023865184783935546, 0.023873567581176758, 0.023906272888183595, 0.02386944007873535, 0.023992191314697264, 0.023896192550659178, 0.023863296508789062, 0.023950687408447267, 0.023980031967163085, 0.02413529586791992, 0.024267744064331055, 0.02430156707763672, 0.025257984161376954, 0.02378758430480957, 0.023916479110717773, 0.023768543243408203, 0.02392323112487793, 0.02367692756652832, 0.023745695114135743, 0.024021856307983397, 0.023824127197265624, 0.02386089515686035, 0.023876192092895508, 0.02400841522216797, 0.026515296936035156, 0.023976383209228517, 0.02408857536315918, 0.023891199111938478, 0.024255231857299806, 0.023858720779418946, 0.023822687149047853, 0.02424838447570801, 0.023961664199829102, 0.02397929573059082, 0.02381916809082031, 0.023899967193603516, 0.02390425682067871, 0.02388387107849121, 0.02380124855041504, 0.023857887268066407, 0.02381932830810547, 0.023761632919311524, 0.023795616149902343, 0.023844480514526367, 0.02386582374572754, 0.023822463989257813, 0.02377289581298828, 0.023855072021484375, 0.023890111923217775, 0.023875583648681642, 0.02380931282043457, 0.023769792556762696, 0.02452694320678711, 0.023961599349975587, 0.02390115165710449, 0.02395369529724121, 0.023906143188476562, 0.02402118492126465, 0.023859872817993164, 0.02386124801635742, 0.02393497657775879, 0.024321216583251953, 0.023911231994628905, 0.023908351898193358, 0.024176288604736328, 0.02393942451477051, 0.023812095642089845, 0.023754751205444336, 0.023826431274414063, 0.023785120010375978, 0.02502195167541504, 0.024014911651611327, 0.024398399353027345, 0.023921728134155273, 0.023877887725830077, 0.02369753646850586, 0.023763519287109375, 0.023785472869873047, 0.02369264030456543, 0.0237410888671875, 0.023688352584838868, 0.02366361618041992, 0.023875423431396484, 0.02368511962890625, 0.023737920761108398, 0.02373472023010254, 0.023752704620361328, 0.024254335403442382, 0.02376710319519043, 0.023722047805786132, 0.023969791412353517, 0.02382009506225586, 0.023769567489624024, 0.023772991180419922, 0.02387548828125, 0.023949312210083007, 0.023863296508789062, 0.024199039459228515, 0.024029312133789064, 0.023871488571166992, 0.023965696334838867, 0.023901695251464843, 0.023943679809570313, 0.023870752334594725, 0.023931488037109375, 0.024074464797973632, 0.024040735244750977, 0.023876096725463865, 0.0241329288482666, 0.023944000244140624, 0.02395955276489258, 0.023967744827270508, 0.023777280807495117, 0.02376851272583008, 0.023971839904785155, 0.024076032638549804, 0.024099039077758788, 0.02444963264465332, 0.024543424606323243, 0.024422208786010743, 0.024451072692871095, 0.024475135803222657, 0.02449443244934082, 0.024174400329589844, 0.023976287841796874, 0.023879680633544922, 0.02393087959289551, 0.023816192626953125, 0.02380771255493164, 0.02384899139404297, 0.023918848037719726, 0.023848960876464844, 0.024045568466186523, 0.02388198471069336, 0.023975936889648438, 0.023785408020019532, 0.025782047271728517, 0.02493062400817871, 0.023984064102172853, 0.023846559524536133, 0.023855072021484375, 0.023769535064697266, 0.023795488357543946, 0.023791711807250978, 0.02372825622558594, 0.023834623336791993, 0.023980031967163085, 0.023924192428588866, 0.02393247985839844, 0.0240631046295166, 0.023845727920532227, 0.024020992279052734, 0.023893247604370116, 0.023866111755371094, 0.02376803207397461, 0.023846431732177733, 0.023951839447021485, 0.024034912109375, 0.023785919189453126, 0.023861408233642578, 0.023854047775268554, 0.023722335815429686, 0.023808544158935546, 0.023758111953735353, 0.023874271392822267, 0.023808000564575195, 0.024319999694824217, 0.023791616439819335, 0.023932640075683593, 0.02374684715270996, 0.023771135330200196, 0.02385305595397949, 0.02386124801635742, 0.023829952239990234, 0.023788095474243164, 0.023713151931762697, 0.023811840057373048, 0.024259456634521483, 0.028274688720703125, 0.029049087524414062, 0.024027135848999022, 0.023875328063964845, 0.02386944007873535, 0.02371942329406738, 0.023804416656494142, 0.023721343994140626, 0.025045631408691406, 0.024223487854003908, 0.023953664779663087, 0.02381011199951172, 0.02383251190185547, 0.023698816299438475, 0.023947904586791992, 0.023967744827270508, 0.023775232315063476, 0.02376825523376465, 0.023787712097167967, 0.02398681640625, 0.023945568084716796, 0.02392278480529785, 0.02419705581665039, 0.02395577621459961, 0.02389401626586914, 0.023887903213500976, 0.023948831558227537, 0.023906751632690428, 0.02384486389160156, 0.023848960876464844, 0.02388172721862793, 0.023797760009765623, 0.024112575531005858, 0.023982400894165038, 0.023941375732421874, 0.02383577537536621, 0.024505151748657226, 0.023908416748046876, 0.024051071166992188, 0.02397792053222656, 0.02453945541381836, 0.024195455551147462, 0.024131584167480468, 0.024032800674438477, 0.024852479934692383, 0.024039392471313478, 0.024000032424926758, 0.024856704711914063, 0.02396860885620117, 0.023863296508789062, 0.023899391174316408, 0.023965536117553712, 0.02413481521606445, 0.023918336868286132, 0.02380419158935547, 0.02399411201477051, 0.023857120513916016, 0.023742464065551756, 0.023873823165893555, 0.024063711166381837, 0.02403708839416504, 0.024017248153686523, 0.024018239974975587, 0.023850784301757813, 0.024879968643188477, 0.023795007705688476, 0.023741119384765624, 0.0242108154296875, 0.024304256439208985, 0.023758464813232422, 0.02380633544921875, 0.02373219108581543, 0.024098207473754883, 0.02375129508972168, 0.023836671829223634, 0.023777280807495117, 0.023967744827270508, 0.02389811134338379, 0.02383625602722168, 0.025031232833862306, 0.025331743240356447, 0.024405216217041014, 0.02496771240234375]",tokens/s,40.96574860605049,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1844.191232,2768.109568,0.0,2365.587456,2314.318336,s,1,9.0901767578125,9.0901767578125,0.0,9.0901767578125,9.0901767578125,9.0901767578125,9.0901767578125,[9.0901767578125],,kWh,5.615339820413586e-05,6.1869261097778005e-06,1.844723697999595e-05,8.078756129390961e-05,,MB,1896.177664,3099.459584,0.0,2682.257408,2607.60832,s,10,0.4847334403991699,0.04847334403991699,0.0004979703883766398,0.04830492782592774,0.04874606323242187,0.04933855247497558,0.04981254386901855,"[0.049931041717529295, 0.048318401336669925, 0.048207008361816406, 0.04824272155761719, 0.048214462280273436, 0.04831631851196289, 0.04832207870483399, 0.04829353713989258, 0.04827347183227539, 0.04861439895629883]",tokens/s,5281.253131395026,kWh,1.4958004754465124e-06,1.6495918835224292e-07,9.940795934488052e-07,2.65483925724756e-06,tokens/kWh,96427683.6351333,MB,1896.177664,3099.459584,0.0,2682.257408,2607.61088,s,10,13.784520263671874,1.3784520263671873,0.013034876196884818,1.372310363769531,1.393464599609375,1.3998223876953124,1.4049086181640624,"[1.3683370361328124, 1.3920517578125, 1.36633935546875, 1.364835205078125, 1.3732437744140624, 1.371376953125, 1.3841221923828124, 1.3689954833984375, 1.389038330078125, 1.40618017578125]",tokens/s,45.70344037727018,kWh,3.957900431537869e-05,4.365166276127635e-06,2.003079833755413e-05,6.397496892906045e-05,tokens/kWh,984760.1500183368,,s,630,13.78239563369751,0.021876818466186523,0.0004994660624197005,0.02175241661071777,0.02238352870941162,0.022705539417266843,0.023641034183502205,"[0.02203241539001465, 0.022072383880615234, 0.02175200080871582, 0.021639104843139648, 0.02176902389526367, 0.021731327056884766, 0.021679359436035155, 0.021725215911865235, 0.02219059181213379, 0.02244233512878418, 0.021636735916137694, 0.02168556785583496, 0.02158278465270996, 0.02177020835876465, 0.021730720520019533, 0.02190332794189453, 0.02186739158630371, 0.021752895355224608, 0.021527263641357423, 0.02164246368408203, 0.021545183181762694, 0.021539392471313475, 0.02175699234008789, 0.02151308822631836, 0.021424192428588867, 0.021693727493286134, 0.021680896759033202, 0.02169647979736328, 0.021547008514404296, 0.02185215950012207, 0.02185625648498535, 0.021702335357666015, 0.02152435111999512, 0.021524927139282227, 0.021598207473754884, 0.02167747116088867, 0.021502559661865234, 0.02150399971008301, 0.02170675277709961, 0.021550495147705077, 0.021561887741088866, 0.021507904052734374, 0.021459199905395507, 0.021618688583374023, 0.021549055099487305, 0.02168230438232422, 0.022791744232177735, 0.02167430305480957, 0.021563392639160156, 0.02177039909362793, 0.021681184768676757, 0.021658432006835936, 0.021656639099121095, 0.021957088470458984, 0.021742048263549803, 0.022648448944091796, 0.022011392593383788, 0.02162345504760742, 0.021509920120239258, 0.021604799270629884, 0.02140348815917969, 0.02151260757446289, 0.02142742347717285, 0.021674240112304687, 0.02145280075073242, 0.021405759811401366, 0.02162719917297363, 0.021796064376831056, 0.021789472579956056, 0.02169856071472168, 0.021838016510009765, 0.022156415939331056, 0.02200953674316406, 0.022178464889526368, 0.022154592514038087, 0.02210508728027344, 0.021869951248168946, 0.022231679916381836, 0.022052928924560546, 0.022099807739257814, 0.022003807067871094, 0.022266944885253905, 0.022164096832275392, 0.02211846351623535, 0.022141183853149414, 0.02200371170043945, 0.021739391326904296, 0.021953887939453125, 0.022156063079833983, 0.023690784454345703, 0.022280672073364257, 0.022103168487548827, 0.021846912384033204, 0.021938175201416017, 0.021940223693847655, 0.021975040435791016, 0.022691839218139647, 0.02335683250427246, 0.02283958435058594, 0.02202025604248047, 0.02212428855895996, 0.022126272201538087, 0.021961023330688476, 0.02184774398803711, 0.021946367263793946, 0.02198192024230957, 0.02184592056274414, 0.021964895248413087, 0.022092992782592774, 0.02217884826660156, 0.022045503616333006, 0.022385631561279297, 0.021884223937988282, 0.02203513526916504, 0.023199743270874023, 0.022349119186401367, 0.022008224487304686, 0.021986879348754883, 0.0222906551361084, 0.02279257583618164, 0.02223321533203125, 0.021895103454589844, 0.02188038444519043, 0.021860864639282225, 0.021733375549316408, 0.021815296173095702, 0.021661407470703126, 0.021635360717773437, 0.02159324836730957, 0.021481695175170897, 0.021511808395385742, 0.022188127517700194, 0.022752159118652342, 0.02289356803894043, 0.02190438461303711, 0.021673055648803712, 0.021709632873535157, 0.021581920623779297, 0.02142348861694336, 0.02149849510192871, 0.02145894432067871, 0.021563392639160156, 0.022326751708984374, 0.021581663131713866, 0.021690975189208983, 0.02174166488647461, 0.02184121513366699, 0.02145555114746094, 0.021489664077758788, 0.021449920654296874, 0.02159452819824219, 0.0214304313659668, 0.021528831481933595, 0.021525951385498048, 0.021525056838989257, 0.02175939178466797, 0.021746623992919923, 0.021597856521606444, 0.021720895767211913, 0.021734624862670898, 0.021941215515136718, 0.021751808166503905, 0.021816896438598632, 0.021854656219482422, 0.022241247177124022, 0.021702816009521484, 0.021651327133178713, 0.021444095611572265, 0.021540416717529296, 0.021519296646118163, 0.021534303665161132, 0.021696287155151366, 0.02154764747619629, 0.02170252799987793, 0.021592191696166992, 0.0215001277923584, 0.02164873504638672, 0.021559743881225585, 0.02166783905029297, 0.021569183349609375, 0.021538400650024415, 0.021416704177856447, 0.02139948844909668, 0.021454559326171876, 0.02147567939758301, 0.021535776138305665, 0.02154185676574707, 0.02169856071472168, 0.022326271057128907, 0.021752832412719726, 0.021625471115112305, 0.02183964729309082, 0.02165519905090332, 0.021514848709106447, 0.021518688201904296, 0.02148054313659668, 0.02148406410217285, 0.02147724723815918, 0.021361152648925782, 0.021788671493530275, 0.021456064224243163, 0.021445440292358398, 0.021436159133911132, 0.02154092788696289, 0.021643455505371095, 0.02152448081970215, 0.021549055099487305, 0.02153843116760254, 0.02229088020324707, 0.021484928131103517, 0.021526304244995118, 0.021457439422607423, 0.021335744857788087, 0.021360479354858398, 0.021272703170776366, 0.021627487182617186, 0.021452032089233398, 0.021363264083862306, 0.021394752502441407, 0.022018943786621094, 0.023388320922851563, 0.02173526382446289, 0.021984832763671875, 0.02187129592895508, 0.02152627182006836, 0.02147327995300293, 0.021778432846069336, 0.021713247299194337, 0.021451520919799804, 0.021617183685302733, 0.021438848495483397, 0.02166783905029297, 0.022193824768066406, 0.021618175506591796, 0.021531679153442382, 0.021458751678466798, 0.02167100715637207, 0.021527456283569335, 0.021760000228881835, 0.021727231979370116, 0.021640768051147462, 0.021782976150512695, 0.02168422317504883, 0.021794336318969727, 0.0218239688873291, 0.02208083152770996, 0.021676736831665037, 0.02188287925720215, 0.021811199188232423, 0.021712736129760744, 0.02176425552368164, 0.021626880645751953, 0.022841344833374022, 0.02205695915222168, 0.021933984756469727, 0.021768287658691408, 0.02172662353515625, 0.021807712554931642, 0.021823488235473632, 0.021774335861206053, 0.021734687805175783, 0.02207539176940918, 0.021903743743896486, 0.022088159561157228, 0.022112127304077148, 0.021743295669555664, 0.02193440055847168, 0.021526399612426757, 0.021510080337524416, 0.02151456069946289, 0.021593984603881837, 0.02169241523742676, 0.021538816452026367, 0.02176838493347168, 0.02164512062072754, 0.02172217559814453, 0.022262720108032225, 0.021440448760986327, 0.021645376205444336, 0.02158083152770996, 0.02159715270996094, 0.021474527359008788, 0.021515039443969725, 0.021510143280029297, 0.022331392288208008, 0.021526527404785157, 0.021501344680786134, 0.02147327995300293, 0.021420255661010742, 0.021864383697509766, 0.021819391250610352, 0.021612159729003905, 0.02157241630554199, 0.02158768081665039, 0.021796512603759765, 0.021602943420410158, 0.021727231979370116, 0.02194384002685547, 0.022538719177246095, 0.022340768814086913, 0.021865312576293944, 0.02170675277709961, 0.021719039916992186, 0.022509567260742186, 0.02174492835998535, 0.02154364776611328, 0.021792768478393554, 0.02212236785888672, 0.021743743896484376, 0.021589696884155272, 0.021667360305786133, 0.021480224609375, 0.02230665588378906, 0.021847583770751952, 0.021870880126953124, 0.02186150360107422, 0.02176089668273926, 0.021579776763916016, 0.021547008514404296, 0.02172867202758789, 0.022466943740844725, 0.021548479080200196, 0.021487424850463867, 0.021418336868286134, 0.021508384704589843, 0.021653087615966796, 0.02149247932434082, 0.021405696868896484, 0.021462432861328123, 0.021606559753417968, 0.021671775817871095, 0.02154147148132324, 0.02147727966308594, 0.02167203140258789, 0.02152448081970215, 0.02148521614074707, 0.021559648513793946, 0.021644832611083985, 0.021563871383666992, 0.02145894432067871, 0.021372928619384765, 0.021630975723266603, 0.021557247161865235, 0.022114303588867186, 0.02186617660522461, 0.021843679428100587, 0.021853919982910155, 0.021920352935791015, 0.02175823974609375, 0.02169148826599121, 0.021904287338256837, 0.02192793655395508, 0.021841567993164064, 0.021780832290649414, 0.02184294319152832, 0.021832704544067383, 0.021882688522338867, 0.021946559906005858, 0.02188083267211914, 0.021837791442871093, 0.021890560150146485, 0.022040992736816405, 0.02197491264343262, 0.021968896865844727, 0.021731584548950196, 0.021913568496704103, 0.022327327728271486, 0.021659040451049806, 0.021518047332763673, 0.021812095642089843, 0.02202841567993164, 0.022069120407104494, 0.021995519638061522, 0.02204876708984375, 0.022044416427612304, 0.021944576263427735, 0.021802047729492187, 0.021975072860717773, 0.021887712478637696, 0.021617664337158202, 0.021580799102783203, 0.021547008514404296, 0.02146918487548828, 0.021411840438842773, 0.021562688827514647, 0.021689376831054687, 0.021955360412597658, 0.022092672348022462, 0.02206105613708496, 0.021876735687255858, 0.021976608276367188, 0.021746143341064453, 0.02209587287902832, 0.021800800323486327, 0.022319007873535156, 0.021907199859619142, 0.021858816146850587, 0.021740671157836913, 0.021638015747070312, 0.021657312393188476, 0.021977312088012697, 0.022845504760742187, 0.021592063903808592, 0.021622783660888673, 0.021784576416015625, 0.021661792755126953, 0.02618953514099121, 0.025788223266601563, 0.022838687896728514, 0.02209686470031738, 0.021600448608398437, 0.024810848236083986, 0.02226633644104004, 0.022140928268432617, 0.021663135528564453, 0.021404064178466797, 0.021498079299926757, 0.02157379150390625, 0.021503807067871094, 0.021591903686523438, 0.02149737548828125, 0.02145692825317383, 0.021809856414794923, 0.021606239318847656, 0.021596223831176757, 0.021544607162475585, 0.02268400001525879, 0.02351923179626465, 0.021683456420898438, 0.02150067138671875, 0.021476543426513672, 0.02158675193786621, 0.021448703765869142, 0.022063104629516602, 0.021550432205200195, 0.0215251522064209, 0.02150726318359375, 0.021664575576782228, 0.02146099281311035, 0.02128281593322754, 0.021501951217651367, 0.021645471572875975, 0.021466175079345704, 0.021426719665527345, 0.0217542724609375, 0.02143846321105957, 0.021292959213256836, 0.0215184326171875, 0.02171900749206543, 0.021473087310791016, 0.02160201644897461, 0.021723104476928712, 0.021448640823364257, 0.02139174461364746, 0.021252256393432617, 0.02147260856628418, 0.021395328521728516, 0.021506879806518556, 0.021481664657592773, 0.02195644760131836, 0.02192793655395508, 0.02180054473876953, 0.021854623794555664, 0.02192083168029785, 0.02159507179260254, 0.021646944046020508, 0.022032703399658203, 0.02231235122680664, 0.0223832950592041, 0.02229452705383301, 0.022380544662475587, 0.022358015060424806, 0.022163455963134765, 0.022080959320068358, 0.021967424392700195, 0.022116352081298828, 0.02203411293029785, 0.02226777648925781, 0.021821887969970703, 0.021677280426025392, 0.021930112838745117, 0.0219116153717041, 0.021809759140014647, 0.0219015998840332, 0.02196886444091797, 0.02157529640197754, 0.021563295364379884, 0.021548479080200196, 0.02142083168029785, 0.021552127838134767, 0.021295167922973632, 0.02149843215942383, 0.02162112045288086, 0.021720319747924804, 0.021463808059692384, 0.021737279891967772, 0.02160054397583008, 0.02185206413269043, 0.021765247344970703, 0.021734272003173828, 0.02133955192565918, 0.02150806427001953, 0.02137868881225586, 0.02148643112182617, 0.022290271759033205, 0.021716672897338866, 0.021447999954223633, 0.02157788848876953, 0.021385759353637696, 0.021299392700195312, 0.021993087768554687, 0.021717695236206053, 0.02179257583618164, 0.021796863555908205, 0.021914623260498048, 0.021617664337158202, 0.02160576057434082, 0.021376640319824218, 0.021473983764648437, 0.02136636734008789, 0.02135491180419922, 0.02129657554626465, 0.02182643127441406, 0.021393407821655275, 0.021442432403564454, 0.02159014320373535, 0.02143574333190918, 0.02155120086669922, 0.02135443115234375, 0.021268320083618165, 0.021508895874023437, 0.02129715156555176, 0.022017183303833007, 0.022366975784301756, 0.021620927810668947, 0.02148659133911133, 0.022016063690185547, 0.021672576904296876, 0.021751487731933594, 0.022131231307983397, 0.022008928298950195, 0.022020416259765627, 0.022028671264648438, 0.022040800094604494, 0.02233263969421387, 0.02240342330932617, 0.022792608261108398, 0.022934656143188476, 0.02282383918762207, 0.022761472702026365, 0.022788095474243163, 0.022726688385009765, 0.022588672637939452, 0.022538911819458007, 0.02261612892150879, 0.022744991302490233, 0.022583391189575194, 0.022534143447875975, 0.023016767501831056, 0.022715072631835937, 0.02260700798034668, 0.022491968154907227, 0.022640447616577148, 0.023998687744140625, 0.022468608856201173, 0.022392255783081055, 0.022417856216430665, 0.022952320098876954, 0.0226779842376709, 0.02280195236206055, 0.022354400634765625, 0.0224849910736084, 0.022417407989501953, 0.022421503067016603, 0.023382015228271484, 0.024639392852783205, 0.024649696350097658, 0.022325376510620117, 0.022650623321533205, 0.021991647720336915, 0.022067232131958006, 0.02226790428161621, 0.02205606460571289, 0.02204761505126953, 0.02211840057373047, 0.022599456787109375, 0.022255008697509765, 0.023241535186767578, 0.021941343307495118, 0.022153760910034178, 0.022279647827148436, 0.022141984939575195, 0.02194819259643555, 0.022065248489379883, 0.02191535949707031, 0.022069536209106445, 0.02189926338195801, 0.022058784484863283, 0.022417312622070314, 0.022233312606811523, 0.022127744674682617, 0.022319616317749022, 0.022290624618530274, 0.02251580810546875, 0.022294208526611327, 0.022262271881103517, 0.022148096084594726, 0.022246400833129884, 0.022427648544311524, 0.022173696517944336, 0.02269388771057129, 0.022212608337402344, 0.02267673683166504, 0.022170368194580077, 0.022245439529418945, 0.02217158317565918, 0.022173696517944336, 0.02208563232421875, 0.022021951675415038, 0.02212063980102539, 0.021962751388549806, 0.02184556770324707, 0.021791168212890625, 0.02183286476135254, 0.02236851119995117, 0.021842527389526366, 0.021874048233032226, 0.02194905662536621, 0.02209689521789551, 0.021508703231811522]",tokens/s,45.71048580695728,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4975.325184,7346.192384,0.0,6943.670272,6539.1744,s,1,11.610884765625,11.610884765625,0.0,11.610884765625,11.610884765625,11.610884765625,11.610884765625,[11.610884765625],,kWh,0.00013150182546660288,1.4495563407165315e-05,4.1937255772006576e-05,0.00018793464464577476,,MB,5058.383872,7641.890816,0.0,7224.68864,6917.39904,s,10,2.0870283813476562,0.20870283813476562,0.0003155553283194226,0.2087106246948242,0.20905210113525388,0.20906222915649414,0.2090703315734863,"[0.20811203002929687, 0.20895455932617188, 0.2085798034667969, 0.20858934020996095, 0.20856150817871094, 0.20899449157714844, 0.20904985046386718, 0.20828253173828126, 0.2088319091796875, 0.20907235717773437]",tokens/s,1226.6244306399572,kWh,6.133132390364595e-06,6.759877575955095e-07,4.076999789375558e-06,1.0886119937335663e-05,tokens/kWh,23516184.04662323,MB,5062.709248,7643.987968,0.0,7226.785792,6917.4016,s,10,21.80282080078125,2.180282080078125,0.00542946884505395,2.1797763671875,2.186784912109375,2.1880472412109375,2.1890571044921874,"[2.1766376953125, 2.170473388671875, 2.17936181640625, 2.180831298828125, 2.17493505859375, 2.178594482421875, 2.1893095703125, 2.185982177734375, 2.18019091796875, 2.18650439453125]",tokens/s,28.895343669357935,kWh,6.368841650463267e-05,7.0251264313698245e-06,4.2065176013220944e-05,0.00011277871894922341,tokens/kWh,558616.0278018818,,s,630,21.800411769866923,0.034603828206138,0.0003936771880204999,0.03450404739379882,0.034978083038330075,0.035205082893371584,0.03625874851226807,"[0.03558873748779297, 0.0347210578918457, 0.0344106559753418, 0.03439673614501953, 0.03434396743774414, 0.034398273468017576, 0.034270111083984374, 0.034481822967529295, 0.03458636856079102, 0.03455855941772461, 0.035160064697265625, 0.03517030334472656, 0.03530960083007813, 0.03591574478149414, 0.03483852767944336, 0.0348037109375, 0.03478915023803711, 0.03465024185180664, 0.03444950485229492, 0.03500572967529297, 0.035119808197021485, 0.03470134353637695, 0.03450835037231445, 0.03424915313720703, 0.03425667190551758, 0.03419948959350586, 0.03428995132446289, 0.03451696014404297, 0.034408481597900394, 0.03417440032958984, 0.034164833068847655, 0.03424099349975586, 0.034409568786621096, 0.03444854354858398, 0.03444915390014648, 0.03439996719360352, 0.03433033752441406, 0.03449641418457031, 0.03468348693847656, 0.03448553466796875, 0.034501407623291014, 0.03442403030395508, 0.034345184326171875, 0.03442335891723633, 0.0347586555480957, 0.0348221435546875, 0.0349780158996582, 0.034469470977783204, 0.03449260711669922, 0.03438735961914063, 0.03439676666259766, 0.03430179214477539, 0.034275264739990235, 0.03427107238769531, 0.03433305740356445, 0.0343551025390625, 0.034307552337646485, 0.034423423767089845, 0.03449398422241211, 0.03434096145629883, 0.034280960083007815, 0.0342841911315918, 0.03435951995849609, 0.035917728424072266, 0.0347770881652832, 0.034648063659667966, 0.034203647613525394, 0.03434086227416992, 0.03521535873413086, 0.034662593841552736, 0.03485676956176758, 0.03467036819458008, 0.03644380950927734, 0.03442335891723633, 0.03440435028076172, 0.034236415863037106, 0.03413622283935547, 0.034223583221435545, 0.03415488052368164, 0.03425484848022461, 0.03422003173828125, 0.0343633918762207, 0.03425414276123047, 0.03427113723754883, 0.034245407104492184, 0.03432400131225586, 0.03438582229614258, 0.03425513458251953, 0.03423875045776367, 0.034463584899902346, 0.03435708618164063, 0.03427094268798828, 0.034253406524658206, 0.034285568237304685, 0.034557952880859374, 0.03437180709838867, 0.03431939315795898, 0.03451302337646484, 0.03463151931762695, 0.03438671875, 0.03434832000732422, 0.034596767425537106, 0.03438406372070312, 0.03436435317993164, 0.034576000213623045, 0.034436607360839845, 0.03444179153442383, 0.03439820861816406, 0.03436246490478516, 0.034449630737304685, 0.034351806640625, 0.034498817443847654, 0.03450790405273438, 0.03435171127319336, 0.03427331161499023, 0.03442073440551758, 0.034402305603027344, 0.03441459274291992, 0.034516990661621096, 0.03425689697265625, 0.03418092727661133, 0.03420121765136719, 0.03423904037475586, 0.0343633918762207, 0.03417494583129883, 0.03419055938720703, 0.03597260665893555, 0.03480438232421875, 0.03488479995727539, 0.03472019195556641, 0.03465977478027344, 0.03454576110839844, 0.034777599334716795, 0.034765151977539065, 0.034653278350830076, 0.03470537567138672, 0.034609153747558595, 0.03436435317993164, 0.034264095306396486, 0.03415548706054688, 0.03417702484130859, 0.03444121551513672, 0.034463455200195316, 0.03428524780273438, 0.03420630264282227, 0.03586899185180664, 0.036456127166748044, 0.03449651336669922, 0.03435446548461914, 0.03421571350097656, 0.03421689605712891, 0.03413372802734375, 0.03426332855224609, 0.034606239318847654, 0.03441267013549805, 0.03440627288818359, 0.03429782485961914, 0.03426387023925781, 0.03455347061157227, 0.03441043090820312, 0.03445129776000976, 0.03448899078369141, 0.03454889678955078, 0.0342639045715332, 0.03434905624389648, 0.034490367889404294, 0.034508800506591795, 0.034557952880859374, 0.03440639877319336, 0.03458867263793945, 0.034580032348632814, 0.034406848907470707, 0.034410015106201175, 0.03433420944213867, 0.03541910552978516, 0.03616057586669922, 0.0347309455871582, 0.034560001373291016, 0.03445529556274414, 0.034668384552001955, 0.034398624420166016, 0.034667552947998045, 0.03439715194702148, 0.034428382873535154, 0.034365985870361326, 0.034256256103515625, 0.0345483512878418, 0.03469833755493164, 0.0345711669921875, 0.03511289596557617, 0.034834495544433595, 0.03438617706298828, 0.03447747039794922, 0.03496755218505859, 0.03458902359008789, 0.0344637451171875, 0.034409534454345705, 0.034196414947509766, 0.0348359375, 0.03431887817382812, 0.03494319915771484, 0.03436326217651367, 0.03451894378662109, 0.03421593475341797, 0.034088958740234376, 0.034278816223144534, 0.03425263977050781, 0.03419990539550781, 0.03423680114746094, 0.03416617584228516, 0.03421043014526367, 0.03463091278076172, 0.03443484878540039, 0.034794273376464846, 0.03424620819091797, 0.03421187210083008, 0.03409366226196289, 0.034367488861083983, 0.03502854537963867, 0.03484716796875, 0.03477212905883789, 0.03476736068725586, 0.03468304061889648, 0.03444345474243164, 0.035305374145507815, 0.03629884719848633, 0.034840576171875, 0.035076095581054685, 0.034497760772705076, 0.03470931243896484, 0.034538463592529295, 0.03460457611083984, 0.03520905685424805, 0.035153888702392576, 0.03481782531738281, 0.03497868728637695, 0.03490544128417969, 0.03463644790649414, 0.0344719352722168, 0.03471763229370117, 0.03449657440185547, 0.03450668716430664, 0.034487743377685544, 0.03451968002319336, 0.03461324691772461, 0.03455088043212891, 0.03445427322387695, 0.034414623260498045, 0.034498687744140624, 0.03455385589599609, 0.03472300720214844, 0.034633567810058594, 0.035014591217041015, 0.034827041625976565, 0.03443503952026367, 0.03440643310546875, 0.03444736099243164, 0.03429779052734375, 0.03443072128295899, 0.03449273681640625, 0.03462758255004883, 0.034609153747558595, 0.03474460983276367, 0.034602718353271486, 0.0347770881652832, 0.034759807586669925, 0.034680992126464846, 0.0345300178527832, 0.034473983764648435, 0.0343900146484375, 0.03441254425048828, 0.034461921691894534, 0.03452073669433594, 0.03481817626953125, 0.03490614318847656, 0.03478953552246094, 0.034660160064697264, 0.03447382354736328, 0.03451100921630859, 0.0345863037109375, 0.03442105484008789, 0.0344640007019043, 0.03456707382202148, 0.034349086761474606, 0.03444595336914062, 0.03446393585205078, 0.03441664123535156, 0.03473612976074219, 0.034544769287109374, 0.034519935607910154, 0.0344925765991211, 0.03432575988769531, 0.034398815155029294, 0.03435712051391602, 0.0345928955078125, 0.034793792724609376, 0.03495289611816406, 0.03460095977783203, 0.03444736099243164, 0.03434905624389648, 0.03444326400756836, 0.034366687774658206, 0.034470687866210936, 0.03492681503295898, 0.03440588760375977, 0.034296161651611326, 0.03431142425537109, 0.03429983901977539, 0.034361217498779295, 0.03441756820678711, 0.03425417709350586, 0.034437568664550784, 0.03430998229980469, 0.03424086380004883, 0.034236415863037106, 0.03502899169921875, 0.03464988708496094, 0.035471935272216794, 0.03436268615722656, 0.03441446304321289, 0.03447420883178711, 0.03437206268310547, 0.03436531066894531, 0.03432233428955078, 0.03440787124633789, 0.034542144775390624, 0.03468806457519531, 0.03459372711181641, 0.034410369873046874, 0.0343392333984375, 0.03476860809326172, 0.034396190643310544, 0.034418113708496095, 0.03427110290527344, 0.03570665740966797, 0.03431705474853516, 0.035104736328125, 0.034590206146240234, 0.03440707015991211, 0.03417702484130859, 0.03444736099243164, 0.03454534530639648, 0.03429369735717774, 0.034548446655273436, 0.034666145324707034, 0.034315582275390624, 0.034337120056152345, 0.03434902572631836, 0.0342694091796875, 0.03427139282226563, 0.034602848052978516, 0.0346069450378418, 0.034464065551757815, 0.03483004760742187, 0.03439235305786133, 0.034531326293945314, 0.034350177764892575, 0.03448748779296875, 0.0343590087890625, 0.03428966522216797, 0.03444326400756836, 0.034342910766601564, 0.03437071990966797, 0.03454006576538086, 0.034807552337646486, 0.0346363525390625, 0.0349224967956543, 0.03468265533447266, 0.03451107025146485, 0.03477913665771484, 0.03500339126586914, 0.035353599548339845, 0.03510067367553711, 0.03508019256591797, 0.03480166244506836, 0.03459411239624023, 0.03438662338256836, 0.03444652938842773, 0.03558396911621094, 0.03469311904907227, 0.03471952056884765, 0.03470358276367187, 0.034799617767333986, 0.03427532958984375, 0.03428511810302735, 0.034255294799804686, 0.0344637451171875, 0.034508800506591795, 0.034987297058105465, 0.034869247436523435, 0.03482697677612305, 0.03457843017578125, 0.034541568756103515, 0.034713600158691404, 0.03509036636352539, 0.03488924789428711, 0.03488191986083984, 0.03525785446166992, 0.03507267379760742, 0.03492454528808594, 0.03501795196533203, 0.034557857513427735, 0.03442572784423828, 0.03437353515625, 0.03446700668334961, 0.03455456161499024, 0.034817726135253906, 0.0347509765625, 0.03457231903076172, 0.034609153747558595, 0.03681280136108398, 0.03542784118652344, 0.03745363235473633, 0.03501964950561524, 0.035049278259277346, 0.03462144088745117, 0.03447395324707031, 0.034651905059814456, 0.03491254425048828, 0.03459267044067383, 0.03436528015136719, 0.034495777130126956, 0.03435139083862305, 0.03475116729736328, 0.034664447784423826, 0.03457622528076172, 0.03454291152954102, 0.03441532897949219, 0.03432201766967773, 0.03443084716796875, 0.034431873321533205, 0.03431196975708008, 0.0343573112487793, 0.03426230239868164, 0.034362014770507814, 0.034394241333007815, 0.03420467376708984, 0.03410009765625, 0.034272640228271485, 0.03567884826660156, 0.03566582489013672, 0.035399681091308595, 0.034787105560302733, 0.03443491363525391, 0.03435472106933594, 0.03442720031738281, 0.03440079879760742, 0.03432243347167969, 0.03452928161621094, 0.03448627090454102, 0.03440582275390625, 0.034427455902099606, 0.03439820861816406, 0.03434086227416992, 0.034351104736328124, 0.034496318817138674, 0.03446726226806641, 0.03460540771484375, 0.03441292953491211, 0.034480159759521484, 0.03458636856079102, 0.03466889572143555, 0.0348056640625, 0.03486675262451172, 0.034681278228759764, 0.03474431991577148, 0.03463782501220703, 0.03452284622192383, 0.03722873687744141, 0.0345068473815918, 0.03434598541259765, 0.03444131088256836, 0.03451587295532227, 0.03457843017578125, 0.034756446838378904, 0.034646175384521485, 0.03438406372070312, 0.03443487930297852, 0.03458035278320312, 0.03446918487548828, 0.034509536743164065, 0.03434662246704102, 0.034633697509765624, 0.034961887359619144, 0.03475199890136719, 0.03483292770385742, 0.03484262466430664, 0.03505110549926758, 0.034636192321777344, 0.03721555328369141, 0.03485065460205078, 0.03475132751464844, 0.03447788619995117, 0.034406593322753906, 0.03473408126831055, 0.034707584381103516, 0.034603038787841794, 0.03557360076904297, 0.03492582321166992, 0.0347061767578125, 0.03465420913696289, 0.03462963104248047, 0.034481857299804686, 0.034533695220947264, 0.03553030395507813, 0.03493523025512695, 0.03447555160522461, 0.03437807846069336, 0.03437343978881836, 0.03448368072509766, 0.034466655731201175, 0.0342745590209961, 0.034582271575927734, 0.03431478500366211, 0.03415702438354492, 0.03425465774536133, 0.03422601699829102, 0.034410400390625, 0.0348389778137207, 0.03451289749145508, 0.03433785629272461, 0.03433772659301758, 0.03431628799438476, 0.03419340896606445, 0.034195457458496094, 0.03417292785644531, 0.0344637451171875, 0.03470131301879883, 0.034344959259033206, 0.034395614624023435, 0.03443561553955078, 0.03440639877319336, 0.034465694427490236, 0.034305793762207035, 0.03430665588378906, 0.03430342483520508, 0.034543327331542965, 0.0343353271484375, 0.03463711929321289, 0.03486495971679687, 0.03491884613037109, 0.03479939270019531, 0.034713470458984375, 0.03481884765625, 0.034944190979003906, 0.03510995101928711, 0.035925758361816405, 0.035200225830078126, 0.03497420883178711, 0.03515011215209961, 0.034692577362060543, 0.034465343475341796, 0.03432137680053711, 0.03457228851318359, 0.0342999038696289, 0.03461119842529297, 0.03467203140258789, 0.034680992126464846, 0.03455430221557617, 0.03452435302734375, 0.03485747146606445, 0.034691390991210935, 0.034670272827148435, 0.03480607986450195, 0.03493478393554687, 0.03497740936279297, 0.03477951812744141, 0.0354856948852539, 0.03499622344970703, 0.03487241744995117, 0.03477344131469726, 0.03478982543945312, 0.03498787307739258, 0.03466873550415039, 0.03448831939697266, 0.03436544036865234, 0.0351907844543457, 0.034452545166015626, 0.034606014251708984, 0.03452928161621094, 0.03466854476928711, 0.0349409294128418, 0.03452057647705078, 0.03471206283569336, 0.035127296447753906, 0.03501849746704101, 0.034902271270751954, 0.0349409294128418, 0.03474822235107422, 0.03489401626586914, 0.03448115158081055, 0.035084671020507815, 0.03484048080444336, 0.03461795043945313, 0.03442905426025391, 0.0348322868347168, 0.03465430450439453, 0.03461939239501953, 0.03602793502807617, 0.03460758590698242, 0.034422592163085936, 0.03467488098144531, 0.03460300827026367, 0.034472000122070315, 0.03440224075317383, 0.034454879760742185, 0.0343661117553711, 0.03460710525512695, 0.034732032775878906, 0.03496691131591797, 0.03460179138183594, 0.0345098876953125, 0.034494655609130856, 0.034855487823486325, 0.03478073501586914, 0.03468080139160156, 0.034748897552490235, 0.03451084899902344, 0.03440332794189453, 0.034579456329345705, 0.03441049575805664, 0.034522239685058596, 0.03463257598876953, 0.03445468902587891, 0.03477315139770508, 0.03446787261962891, 0.034382495880126956, 0.03470336151123047, 0.034592159271240236, 0.03455231857299805]",tokens/s,28.898536718044994,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8224.391168,11251.089408,0.0,10848.567296,10616.027648,s,1,14.707181640625,14.707181640625,0.0,14.707181640625,14.707181640625,14.707181640625,14.707181640625,[14.707181640625],,kWh,0.0002217485047749809,2.4453100640080384e-05,6.92994998839902e-05,0.0003155011052990515,,MB,3927.830528,11683.10272,0.0,11265.900544,11070.470656,s,10,3.869327514648438,0.38693275146484374,0.0015723757777633528,0.38700979614257813,0.38842025756835935,0.38910264587402343,0.3896485565185547,"[0.3837105712890625, 0.38558758544921873, 0.38826861572265625, 0.38587115478515627, 0.3866356506347656, 0.38675909423828125, 0.387260498046875, 0.3877727966308594, 0.3897850341796875, 0.387676513671875]",tokens/s,661.6136758411877,kWh,1.135294146875087e-05,1.2516110310114135e-06,7.526939782230889e-06,2.0131492281993172e-05,tokens/kWh,12716394.612682633,MB,3927.830528,11685.199872,0.0,11267.997696,11070.473216,s,10,29.395304443359375,2.9395304443359374,0.0063517143109843195,2.938293823242187,2.9485116455078124,2.9486101928710937,2.948689030761719,"[2.931767822265625, 2.9330380859375, 2.93488330078125, 2.93354296875, 2.934613037109375, 2.941704345703125, 2.945873291015625, 2.948708740234375, 2.94848974609375, 2.94268310546875]",tokens/s,21.43199439263919,kWh,8.604304739916469e-05,9.491161779877193e-06,5.7227056465370925e-05,0.0001527612656444128,tokens/kWh,412408.20920302585,,s,630,29.392640850067146,0.046654985476297046,0.0006162115645370318,0.04658526420593262,0.047064745330810544,0.04731296463012695,0.049377217025756835,"[0.04797983932495117, 0.04630521774291992, 0.04594895935058594, 0.04579388809204102, 0.04568880081176758, 0.04603100967407227, 0.046243839263916016, 0.04603289413452148, 0.046176254272460936, 0.046209022521972655, 0.04637081527709961, 0.04669635009765625, 0.04658287811279297, 0.046418495178222656, 0.04607804870605469, 0.04602207946777344, 0.04611779022216797, 0.04601824188232422, 0.046063934326171875, 0.046357601165771485, 0.046598880767822266, 0.04624345779418945, 0.04633184051513672, 0.04660492706298828, 0.0461495361328125, 0.04623942565917969, 0.046680225372314456, 0.04628096008300781, 0.04749702453613281, 0.04642425537109375, 0.04654489517211914, 0.04646297454833984, 0.04700902557373047, 0.0467217903137207, 0.046321376800537106, 0.04655542373657227, 0.04631548690795898, 0.04627609634399414, 0.04634064102172852, 0.046569313049316406, 0.046825630187988285, 0.046655487060546875, 0.04655923080444336, 0.04676809692382813, 0.046397472381591795, 0.0465849609375, 0.04664559936523437, 0.04633411026000977, 0.04637712097167969, 0.04660041427612305, 0.04674560165405273, 0.04674969482421875, 0.046954113006591795, 0.04691392135620117, 0.04697903823852539, 0.047030017852783206, 0.04669865417480469, 0.04683555221557617, 0.04690761566162109, 0.04667587280273437, 0.04870364761352539, 0.046571422576904296, 0.046679935455322265, 0.04938137435913086, 0.046523998260498044, 0.04575324630737305, 0.04593993759155274, 0.04607056045532226, 0.04611008071899414, 0.04607859039306641, 0.046130302429199216, 0.04627241516113281, 0.046126049041748045, 0.04621660614013672, 0.04611667251586914, 0.04593718338012695, 0.04595702362060547, 0.046276161193847656, 0.04629993438720703, 0.04605132675170898, 0.04605747222900391, 0.0466328010559082, 0.04623580932617188, 0.0463721923828125, 0.046643169403076175, 0.04645878219604492, 0.046230304718017576, 0.046653438568115234, 0.04677017593383789, 0.04649267196655273, 0.046265342712402346, 0.04653859329223633, 0.04653276824951172, 0.0465016975402832, 0.04626655960083008, 0.04612496185302734, 0.046194080352783204, 0.04633670425415039, 0.04639539337158203, 0.04632166290283203, 0.04657766342163086, 0.04686188888549805, 0.046653694152832034, 0.04692768096923828, 0.0469587516784668, 0.0467437744140625, 0.04641321563720703, 0.04668188858032227, 0.046838367462158206, 0.04648681640625, 0.04647417449951172, 0.047426910400390626, 0.04834108734130859, 0.046717342376708985, 0.046784671783447265, 0.046456832885742184, 0.04658367919921875, 0.04663488006591797, 0.046524097442626956, 0.04653113555908203, 0.04680294418334961, 0.04670054244995117, 0.04679065704345703, 0.04718796920776367, 0.047306049346923826, 0.047079135894775394, 0.04832972717285156, 0.04652134323120117, 0.04625612640380859, 0.04604089736938476, 0.04566239929199219, 0.04596736145019531, 0.045995105743408204, 0.04585123062133789, 0.045891902923583985, 0.0462081298828125, 0.04607254409790039, 0.046116321563720704, 0.0460164794921875, 0.04633673477172852, 0.04651212692260742, 0.04614115142822266, 0.04639081573486328, 0.04683852767944336, 0.046404830932617186, 0.04665628814697265, 0.04696031951904297, 0.04682572937011719, 0.04666988754272461, 0.04666716766357422, 0.046475486755371095, 0.04642784118652344, 0.04636947250366211, 0.04637900924682617, 0.04598988723754883, 0.04640563201904297, 0.04655923080444336, 0.04624998474121094, 0.046208446502685546, 0.046666046142578126, 0.046607681274414066, 0.046314430236816403, 0.04679065704345703, 0.04851251220703125, 0.04634019088745117, 0.04655878448486328, 0.047125217437744144, 0.046801025390625, 0.04692582321166992, 0.04714700698852539, 0.04657561492919922, 0.046430335998535154, 0.046873950958251955, 0.04662531280517578, 0.04653670501708984, 0.04651152038574219, 0.04641417694091797, 0.046424320220947266, 0.04664115142822266, 0.04658995056152344, 0.04639644622802734, 0.04664828872680664, 0.04659404754638672, 0.04701388931274414, 0.04770611190795898, 0.047529022216796876, 0.046678974151611326, 0.047150943756103514, 0.04708367919921875, 0.04881203079223633, 0.04652851104736328, 0.04602470397949219, 0.045864959716796876, 0.04610639953613281, 0.04599772644042969, 0.04603267288208008, 0.04612156677246094, 0.04638937759399414, 0.04648556900024414, 0.04658790588378906, 0.046730785369873046, 0.0463744010925293, 0.04593139266967773, 0.04627024078369141, 0.04605148696899414, 0.046200702667236325, 0.0462400016784668, 0.04650156784057617, 0.046387359619140624, 0.046600383758544923, 0.046659072875976565, 0.04678847885131836, 0.04646361541748047, 0.04639744186401367, 0.04631347274780274, 0.04610867309570312, 0.046204544067382815, 0.04642649459838867, 0.04691763305664062, 0.04652563095092774, 0.04666003036499024, 0.04678876876831055, 0.046260448455810545, 0.046690048217773436, 0.04649795150756836, 0.04618035125732422, 0.046600288391113284, 0.04679270553588867, 0.04648492813110352, 0.04646937561035156, 0.04672748947143555, 0.046931774139404296, 0.04684163284301758, 0.04687913513183594, 0.046671871185302735, 0.046358528137207033, 0.04652592086791992, 0.04664969635009766, 0.04621126556396484, 0.046606464385986326, 0.0467372817993164, 0.04667795181274414, 0.046649406433105466, 0.04697020721435547, 0.04705913543701172, 0.04688304138183594, 0.04676992034912109, 0.04678688049316406, 0.04671903991699219, 0.0467191047668457, 0.04727580642700195, 0.04715068817138672, 0.049161727905273435, 0.04652048110961914, 0.04620102310180664, 0.04595507049560547, 0.04593664169311523, 0.04594278335571289, 0.04614963150024414, 0.04620492935180664, 0.04588479995727539, 0.046225345611572266, 0.04632236862182617, 0.04602880096435547, 0.046159870147705076, 0.046525951385498046, 0.04628483200073242, 0.04626889419555664, 0.046519905090332034, 0.046397857666015625, 0.046063617706298826, 0.04628636932373047, 0.04691737747192383, 0.04696752166748047, 0.04676348876953125, 0.04685878372192383, 0.04640563201904297, 0.046204193115234375, 0.0463135986328125, 0.04650249481201172, 0.04623155212402344, 0.04642611312866211, 0.04650188827514649, 0.04620697784423828, 0.046343711853027346, 0.0467542724609375, 0.046440223693847656, 0.046465248107910154, 0.04677836990356445, 0.04695449447631836, 0.04639744186401367, 0.046476448059082034, 0.046742366790771483, 0.046604286193847655, 0.04685398483276367, 0.0469587516784668, 0.04691273498535156, 0.0469532470703125, 0.04673331069946289, 0.04653670501708984, 0.04635443115234375, 0.047005630493164065, 0.046700607299804686, 0.046738494873046876, 0.0465601921081543, 0.047099903106689454, 0.046941184997558595, 0.04682854461669922, 0.04663868713378906, 0.046583297729492185, 0.046741729736328126, 0.04706374359130859, 0.04679673767089844, 0.04715116882324219, 0.046908702850341794, 0.04932799911499024, 0.046895233154296875, 0.04595711898803711, 0.04588729476928711, 0.0462174072265625, 0.04599193572998047, 0.04581577682495117, 0.04610796737670898, 0.04642684936523438, 0.04646627044677734, 0.046443294525146485, 0.04630527877807617, 0.04626227188110352, 0.04623952102661133, 0.046491870880126955, 0.04607148742675781, 0.04576224136352539, 0.046240383148193356, 0.046548606872558594, 0.04628313446044922, 0.04637900924682617, 0.047091712951660154, 0.04676198577880859, 0.04630646514892578, 0.04660924911499023, 0.046430206298828124, 0.04595507049560547, 0.04621311950683594, 0.046781726837158207, 0.04673199844360352, 0.04677632141113281, 0.046643169403076175, 0.04658560180664063, 0.04700124740600586, 0.04680742263793945, 0.04657980728149414, 0.046491809844970707, 0.04664460754394531, 0.047264385223388675, 0.046243839263916016, 0.04653055953979492, 0.04711423873901367, 0.04688649749755859, 0.04687299346923828, 0.04694630432128906, 0.046752960205078124, 0.04667679977416992, 0.04657049560546875, 0.04697763061523438, 0.04689347076416016, 0.04672512054443359, 0.0465428466796875, 0.04668127822875977, 0.04684006500244141, 0.04677280044555664, 0.04658950424194336, 0.04681772613525391, 0.04692582321166992, 0.04683145523071289, 0.05233270263671875, 0.04627180862426758, 0.04685251235961914, 0.04698863983154297, 0.049285823822021485, 0.04665900802612305, 0.04614995193481446, 0.04593875122070312, 0.04614163208007813, 0.04609228897094726, 0.04602675247192383, 0.04659321594238281, 0.04658585739135742, 0.04613536071777344, 0.04841052627563477, 0.046295520782470706, 0.04623193740844726, 0.04613711929321289, 0.046532833099365234, 0.047156448364257815, 0.04576131057739258, 0.04635347366333008, 0.046597057342529294, 0.0465428466796875, 0.046677120208740236, 0.04687673568725586, 0.046656318664550785, 0.046585567474365236, 0.04651651382446289, 0.04690700912475586, 0.04625993728637695, 0.04652671813964844, 0.046709152221679685, 0.04648550415039063, 0.04686988830566406, 0.0471701774597168, 0.04930879974365234, 0.04648796844482422, 0.04656745529174805, 0.04648185729980469, 0.04639750289916992, 0.046604286193847655, 0.046319137573242186, 0.046437950134277345, 0.046771041870117186, 0.04698316955566406, 0.04684799957275391, 0.0469381103515625, 0.0469703369140625, 0.046957088470458985, 0.046951839447021484, 0.04674176025390625, 0.04664559936523437, 0.04694009780883789, 0.04669036865234375, 0.046622718811035156, 0.04689715194702149, 0.04680499267578125, 0.04913971328735352, 0.046456512451171876, 0.04655251312255859, 0.046448734283447264, 0.0467496337890625, 0.047061279296875, 0.04677027130126953, 0.04707376098632812, 0.04712607955932617, 0.05009328079223633, 0.04690409469604492, 0.04597350311279297, 0.04605952072143555, 0.046441982269287106, 0.04790937423706055, 0.04580966567993164, 0.046516223907470705, 0.04610047912597656, 0.04603673553466797, 0.04641817474365234, 0.046516223907470705, 0.04626979064941406, 0.0461965446472168, 0.046158016204833986, 0.04650870513916015, 0.04652646255493164, 0.04664303970336914, 0.04635049438476563, 0.046233024597167965, 0.04709228897094726, 0.04722073745727539, 0.046639102935791016, 0.046548416137695316, 0.046661792755126955, 0.04663132858276367, 0.04667391967773438, 0.049582080841064455, 0.04601174545288086, 0.04639401626586914, 0.046534656524658206, 0.04619673538208008, 0.04638467025756836, 0.04631804656982422, 0.04636822509765625, 0.046493663787841796, 0.046756416320800784, 0.046811134338378906, 0.04689920043945312, 0.04681932830810547, 0.04708761596679688, 0.04711324691772461, 0.04712076950073242, 0.04689958572387695, 0.046763553619384765, 0.046921886444091794, 0.047120929718017575, 0.046843902587890625, 0.046835712432861325, 0.050036670684814454, 0.04608620834350586, 0.0466632308959961, 0.046755550384521484, 0.04642425537109375, 0.046438945770263675, 0.04665280151367188, 0.046803585052490236, 0.047001598358154296, 0.0470200309753418, 0.04698934555053711, 0.04772415924072266, 0.04728854370117187, 0.04714713668823242, 0.04981488037109375, 0.04610934448242188, 0.046048545837402345, 0.04620486450195312, 0.04616649627685547, 0.046129470825195314, 0.045956478118896485, 0.0462276496887207, 0.04594019317626953, 0.04648649597167969, 0.04668201446533203, 0.046273792266845706, 0.046293472290039064, 0.04648783874511719, 0.04648764801025391, 0.046309375762939455, 0.046516223907470705, 0.04681523132324219, 0.04662051010131836, 0.04667407989501953, 0.04699955368041992, 0.04670873641967774, 0.04936703872680664, 0.04613324737548828, 0.046327808380126956, 0.04645382308959961, 0.046185375213623044, 0.046852127075195316, 0.046682113647460936, 0.04656857681274414, 0.046881664276123045, 0.046685855865478514, 0.04652796936035156, 0.04692233657836914, 0.046491168975830076, 0.046777088165283205, 0.04659318542480469, 0.046588768005371095, 0.04722012710571289, 0.04705545425415039, 0.04686643218994141, 0.0474337272644043, 0.0470384635925293, 0.04680294418334961, 0.04962303924560547, 0.046300830841064455, 0.046421920776367184, 0.04693423843383789, 0.04661884689331055, 0.04658377456665039, 0.04701187133789062, 0.04677171325683594, 0.04675532913208008, 0.04684492874145508, 0.04674560165405273, 0.046698497772216796, 0.04699135971069336, 0.04708761596679688, 0.04701984024047851, 0.047417537689208984, 0.0472022705078125, 0.04731862258911133, 0.04748128128051758, 0.04869132614135742, 0.04668758392333985, 0.04635923385620117, 0.04601241683959961, 0.04594851303100586, 0.04639692687988281, 0.046271392822265625, 0.046235649108886716, 0.046622718811035156, 0.046539806365966795, 0.04662371063232422, 0.04647727966308594, 0.04633980941772461, 0.04650608062744141, 0.04624563217163086, 0.04616444778442383, 0.04648291015625, 0.04609487915039062, 0.046266014099121094, 0.04662511825561524, 0.046845951080322266, 0.04702816009521484, 0.046823486328125, 0.046357921600341793, 0.04650985717773438, 0.04681203079223633, 0.04693385696411133, 0.046423969268798826, 0.04642425537109375, 0.04758095932006836, 0.04645296096801758, 0.04673292922973633, 0.04671267318725586, 0.047022624969482424, 0.046473217010498044, 0.046495105743408205, 0.04676774215698242, 0.04632463836669922, 0.04674979019165039, 0.046744991302490234, 0.04655984115600586, 0.046851425170898436, 0.04712287902832031, 0.0471693115234375, 0.04685184097290039, 0.04680099105834961, 0.047053409576416017, 0.04687257766723633, 0.04688083267211914, 0.046636993408203126, 0.04692940902709961, 0.046970752716064455, 0.047043201446533206, 0.047042560577392575, 0.046835678100585934, 0.046745376586914064, 0.04696271896362305, 0.04686870574951172, 0.04676723098754883, 0.046711681365966796, 0.047034366607666016, 0.04692326354980469, 0.04697708892822266]",tokens/s,21.433936583434317,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 352306 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3163.377664,4431.151104,0.0,4028.628992,3944.723968,s,1,10.3440693359375,10.3440693359375,0.0,10.3440693359375,10.3440693359375,10.3440693359375,10.3440693359375,[10.3440693359375],,kWh,9.552016602917016e-05,1.0529324165995e-05,3.056530223002896e-05,0.0001366147924251941,,MB,3218.006016,4770.889728,0.0,4353.687552,4305.05728,s,10,1.0984847106933593,0.10984847106933593,0.0002605002368488148,0.10989107131958008,0.11014441680908203,0.1101818733215332,0.11021183853149413,"[0.1100123519897461, 0.10947843170166016, 0.109482177734375, 0.10986265563964843, 0.11021932983398437, 0.10954656219482421, 0.10991948699951172, 0.11013609313964844, 0.10974729919433594, 0.110080322265625]",tokens/s,2330.4830509512853,kWh,3.2821042248123672e-06,3.617759558636077e-07,2.177841942022553e-06,5.821722122698528e-06,tokens/kWh,43973242.728620134,MB,3220.365312,4770.889728,0.0,4353.687552,4305.05984,s,10,21.5583271484375,2.1558327148437497,0.007466216733338763,2.1534361572265626,2.1672942138671876,2.1687994506835935,2.1700036401367186,"[2.152592041015625, 2.159719482421875, 2.157880859375, 2.166959716796875, 2.148681396484375, 2.15079931640625, 2.1542802734375, 2.1703046875, 2.15074169921875, 2.14636767578125]",tokens/s,29.223046652098937,kWh,6.291325999268587e-05,6.939341831054376e-06,3.525141022357967e-05,0.00010510401204731992,tokens/kWh,599406.2336234715,,s,630,21.555751449584964,0.0342154784914047,0.0005858940224112002,0.03409353637695313,0.034640785598754886,0.034891537284851074,0.0361589853668213,"[0.03465465545654297, 0.03443548965454102, 0.03428044891357422, 0.034067104339599606, 0.03439411163330078, 0.03402060699462891, 0.033968894958496094, 0.03413577651977539, 0.034183456420898435, 0.03390214538574219, 0.03392147064208984, 0.03384060668945312, 0.033786399841308594, 0.03391692733764649, 0.03381043243408203, 0.03407430267333984, 0.033803966522216795, 0.03396876907348633, 0.03393644714355469, 0.035597248077392576, 0.03631718444824219, 0.034714817047119144, 0.03456476974487305, 0.0342501106262207, 0.03434921646118164, 0.03402352142333984, 0.034199649810791016, 0.03420409774780273, 0.03439206314086914, 0.033980415344238284, 0.03411286544799805, 0.03375785446166992, 0.03400447845458984, 0.03402092742919922, 0.03391494369506836, 0.03391139221191406, 0.03383148956298828, 0.033844959259033205, 0.033933311462402346, 0.033928768157958984, 0.03397795104980469, 0.0339505615234375, 0.033989696502685546, 0.033887359619140626, 0.034001792907714844, 0.034177982330322265, 0.03404800033569336, 0.033982463836669925, 0.03432243347167969, 0.03410927963256836, 0.03413148880004883, 0.034089599609375, 0.034080768585205076, 0.03405209732055664, 0.03450406265258789, 0.03558259201049805, 0.03409475326538086, 0.034006656646728514, 0.03414499282836914, 0.03433881759643555, 0.033882110595703126, 0.03377151870727539, 0.03425468826293945, 0.03514064025878906, 0.03446697616577148, 0.03458614349365234, 0.034652446746826174, 0.034988033294677735, 0.03487900924682617, 0.035828193664550784, 0.03468697738647461, 0.03420889663696289, 0.03409539031982422, 0.03454966354370117, 0.03454022216796875, 0.03462758255004883, 0.03447824096679687, 0.03562176132202149, 0.03437871932983398, 0.03413967895507813, 0.03414473724365234, 0.03466035079956055, 0.03448627090454102, 0.0348135986328125, 0.034302303314208984, 0.03415244674682617, 0.03398195266723633, 0.03391743850708008, 0.03387564849853516, 0.03386838531494141, 0.033877727508544925, 0.033826560974121095, 0.03379571151733399, 0.033716865539550785, 0.033855392456054685, 0.03393759918212891, 0.03377449417114258, 0.03403059387207031, 0.03400076675415039, 0.03412732696533203, 0.03420947265625, 0.03428451156616211, 0.03419340896606445, 0.03436544036865234, 0.03432620620727539, 0.034141792297363284, 0.034004768371582034, 0.03394041442871094, 0.03413401412963867, 0.03405619049072266, 0.03396940612792969, 0.033780479431152345, 0.03424051284790039, 0.034092769622802735, 0.034073982238769535, 0.0345179214477539, 0.034325599670410156, 0.03455414581298828, 0.0344438705444336, 0.034275360107421875, 0.03410124969482422, 0.03414947128295898, 0.03392835235595703, 0.034031360626220704, 0.03404390335083008, 0.034266849517822266, 0.035157440185546875, 0.03461289596557617, 0.034392990112304685, 0.034539039611816404, 0.034501087188720705, 0.03458867263793945, 0.03465625762939453, 0.034402305603027344, 0.03583180618286133, 0.034665760040283204, 0.034802398681640624, 0.03464128112792969, 0.03452934265136719, 0.034562686920166015, 0.03411552047729492, 0.03421184158325195, 0.0339947509765625, 0.03411737442016602, 0.034422462463378906, 0.034501121520996096, 0.03422598266601563, 0.034493694305419924, 0.03408588790893555, 0.03399606323242187, 0.03392729568481445, 0.03392940902709961, 0.03394806289672852, 0.03386899185180664, 0.03397305679321289, 0.033920703887939455, 0.03387209701538086, 0.03373027038574219, 0.034120063781738284, 0.03376073455810547, 0.0341099853515625, 0.034088958740234376, 0.034170879364013675, 0.03401318359375, 0.033849086761474606, 0.033759422302246093, 0.033943614959716796, 0.033971649169921875, 0.03403424072265625, 0.034487648010253905, 0.034511520385742185, 0.0342421760559082, 0.03439606475830078, 0.03451132965087891, 0.03442617416381836, 0.03423302459716797, 0.03398774337768555, 0.03396489715576172, 0.03383091354370117, 0.03385696029663086, 0.034087486267089843, 0.03402137756347656, 0.034338176727294924, 0.035867103576660155, 0.03410550308227539, 0.033974273681640625, 0.033936737060546875, 0.033823455810546875, 0.03400902557373047, 0.035160545349121095, 0.03466976165771484, 0.03421676635742187, 0.03421334457397461, 0.03453094482421875, 0.03391990280151367, 0.03364364624023437, 0.034159358978271485, 0.03379987335205078, 0.03365727996826172, 0.034082656860351564, 0.03372208023071289, 0.03372281646728516, 0.03442800140380859, 0.03449654388427734, 0.03453222274780274, 0.03474982452392578, 0.03462617492675781, 0.03452419281005859, 0.034742462158203126, 0.034478622436523436, 0.0342154541015625, 0.03407843017578125, 0.033905025482177734, 0.03376927947998047, 0.03381683349609375, 0.03388844680786133, 0.039704063415527346, 0.03464076614379883, 0.03423551940917969, 0.034358142852783206, 0.034326080322265626, 0.03414265441894531, 0.034202720642089846, 0.034312286376953126, 0.034218814849853514, 0.035192832946777344, 0.03424051284790039, 0.03411289596557617, 0.033767295837402345, 0.033979137420654296, 0.03379516983032226, 0.03369052886962891, 0.03367663955688477, 0.03364694213867187, 0.03377388763427734, 0.03433868789672852, 0.034501953125, 0.0342597770690918, 0.03445555114746094, 0.03410124969482422, 0.03395174407958984, 0.03388809585571289, 0.03384905624389648, 0.033951713562011716, 0.03403996658325195, 0.03907411193847656, 0.03578265762329102, 0.034592769622802735, 0.034852863311767575, 0.03440591812133789, 0.03446217727661133, 0.03436742401123047, 0.03442192077636719, 0.03438800048828125, 0.034081600189208985, 0.03397017669677734, 0.03396198272705078, 0.034105342864990236, 0.0343059196472168, 0.034103424072265624, 0.0340882568359375, 0.033882816314697264, 0.03383910369873047, 0.03374460983276367, 0.03391222381591797, 0.033987457275390626, 0.03386368179321289, 0.033809696197509766, 0.03382550430297852, 0.0339865608215332, 0.03413119888305664, 0.03416726303100586, 0.03434067153930664, 0.03423075103759766, 0.03466649627685547, 0.03443478393554687, 0.034248992919921874, 0.034266719818115236, 0.034019744873046875, 0.035588096618652344, 0.03417264175415039, 0.03385772705078125, 0.033835105895996094, 0.03384873580932617, 0.03400150299072266, 0.03385139083862305, 0.033883518218994144, 0.034202239990234376, 0.0338671989440918, 0.0338458251953125, 0.03380806350708008, 0.03364255905151367, 0.03383475112915039, 0.03376153564453125, 0.033568286895751955, 0.03360636901855469, 0.03383910369873047, 0.03375823974609375, 0.03436982345581055, 0.03696915054321289, 0.034210975646972654, 0.034275360107421875, 0.03409183883666992, 0.03413151931762695, 0.03417337417602539, 0.03407257461547852, 0.034318336486816405, 0.03433881759643555, 0.03402048110961914, 0.034003841400146485, 0.03381155014038086, 0.033893280029296875, 0.03387798309326172, 0.03401302337646484, 0.03428700637817383, 0.03429532623291016, 0.03415315246582031, 0.03384239959716797, 0.034271808624267576, 0.034252799987792966, 0.03408486557006836, 0.03407462310791016, 0.03422208023071289, 0.03435520172119141, 0.034121726989746096, 0.034179073333740234, 0.033982463836669925, 0.03409100723266602, 0.034325790405273435, 0.03459878540039062, 0.03441910552978516, 0.034509246826171874, 0.034049758911132814, 0.03399708938598633, 0.0339568977355957, 0.03384624099731445, 0.034146305084228515, 0.03439168167114258, 0.034299488067626956, 0.03425564956665039, 0.03528908920288086, 0.034353153228759765, 0.03470940780639648, 0.033984607696533206, 0.03389164733886719, 0.034091678619384766, 0.033798175811767577, 0.0337256965637207, 0.03394620895385742, 0.03373788833618164, 0.033905281066894534, 0.033888286590576175, 0.03413654327392578, 0.034326400756835934, 0.034076416015625, 0.034177150726318356, 0.034049983978271484, 0.03406867218017578, 0.03391823959350586, 0.03400719833374023, 0.03392291259765625, 0.03382153701782226, 0.03448409652709961, 0.034152000427246094, 0.03400748825073242, 0.034139167785644534, 0.03415289688110352, 0.03426067352294922, 0.03432124710083008, 0.034423904418945314, 0.03393360137939453, 0.03419375991821289, 0.03418342590332031, 0.03406671905517578, 0.03398015975952148, 0.033990657806396485, 0.03389344024658203, 0.033813152313232425, 0.03453628921508789, 0.0337520637512207, 0.03390771102905273, 0.033801502227783206, 0.033917022705078126, 0.03383135986328125, 0.03423001480102539, 0.034183616638183596, 0.03398860931396484, 0.03439206314086914, 0.033987808227539065, 0.033653182983398436, 0.03364694213867187, 0.033732608795166014, 0.033698974609375, 0.03454038238525391, 0.033890304565429685, 0.034092479705810544, 0.03407689666748047, 0.03397052764892578, 0.03401929473876953, 0.03403369522094726, 0.03402035140991211, 0.033958206176757814, 0.03392134475708008, 0.03403014373779297, 0.033979198455810544, 0.03412201690673828, 0.033980224609375, 0.03433564758300781, 0.03442620849609375, 0.034585247039794924, 0.034236415863037106, 0.03401859283447266, 0.034013919830322266, 0.03421120071411133, 0.03548838424682617, 0.03443046569824219, 0.03492406463623047, 0.03447702407836914, 0.03507593536376953, 0.03447574234008789, 0.0340832633972168, 0.03419955062866211, 0.03389187240600586, 0.03413804626464844, 0.03400742340087891, 0.03436495971679687, 0.034119678497314454, 0.034076351165771485, 0.034323070526123045, 0.03431391906738281, 0.03459116744995117, 0.034214080810546874, 0.03434035110473633, 0.03457260894775391, 0.03418259048461914, 0.033946369171142576, 0.03431331253051758, 0.03403807830810547, 0.03467734527587891, 0.03471331024169922, 0.0343201904296875, 0.03506585693359375, 0.03468492889404297, 0.03446137619018555, 0.03532160186767578, 0.034304576873779295, 0.03399411010742188, 0.034611839294433594, 0.03465625762939453, 0.03457024002075195, 0.0341234245300293, 0.034029918670654295, 0.03400908660888672, 0.03390054321289063, 0.03404912185668945, 0.03421481704711914, 0.034129920959472655, 0.03396768188476563, 0.034009536743164065, 0.03421388626098633, 0.03626163101196289, 0.04180115127563477, 0.034040382385253906, 0.03383695983886719, 0.03378006362915039, 0.034136062622070314, 0.03509657669067383, 0.03397017669677734, 0.03452012634277344, 0.034374015808105465, 0.03441875076293945, 0.03469977569580078, 0.03411075210571289, 0.03429008102416992, 0.03417055892944336, 0.03404825592041016, 0.03393775939941406, 0.03398249435424805, 0.03423436737060547, 0.03402547073364258, 0.033775615692138675, 0.03450374221801758, 0.03441945648193359, 0.03446777725219727, 0.034885887145996095, 0.03464191818237305, 0.034640960693359375, 0.03462649536132813, 0.034564350128173826, 0.034205440521240235, 0.03402342224121094, 0.03378716659545898, 0.03385212707519531, 0.033947425842285155, 0.03384137725830078, 0.036345855712890625, 0.0343243522644043, 0.03408089447021485, 0.034050048828125, 0.03411763381958008, 0.0346844482421875, 0.033964511871337894, 0.03399270248413086, 0.03424812698364258, 0.034227649688720704, 0.03423068618774414, 0.03431219100952149, 0.03437148666381836, 0.03438396835327148, 0.03422342300415039, 0.0340200309753418, 0.03457404708862305, 0.03489616012573242, 0.03484467315673828, 0.0349488639831543, 0.034613502502441405, 0.034178848266601565, 0.03399087905883789, 0.03393523025512695, 0.03404355239868164, 0.034253280639648435, 0.03590768051147461, 0.03493264007568359, 0.03455385589599609, 0.034318336486816405, 0.034293056488037106, 0.03469382476806641, 0.03405811309814453, 0.03472937774658203, 0.03403952026367187, 0.03388518524169922, 0.033880062103271484, 0.033873920440673826, 0.03401113510131836, 0.03378790283203125, 0.03375718307495117, 0.03381836700439453, 0.034070175170898435, 0.034161247253417966, 0.03423027038574219, 0.03395711898803711, 0.03392982482910156, 0.034127296447753905, 0.03410812759399414, 0.03386163330078125, 0.033683456420898435, 0.03367731094360352, 0.03366022491455078, 0.03376403045654297, 0.03408486557006836, 0.033667072296142575, 0.0339002571105957, 0.03371587371826172, 0.03384998321533203, 0.034236415863037106, 0.03431219100952149, 0.03423436737060547, 0.03384463882446289, 0.03375369644165039, 0.033791999816894534, 0.03366912078857422, 0.03372003173828125, 0.03394384002685547, 0.03389440155029297, 0.03401728057861328, 0.03412947082519531, 0.03391718292236328, 0.03469043350219726, 0.03432723236083984, 0.03414220809936523, 0.03407462310791016, 0.03382886505126953, 0.03371753692626953, 0.03353059387207031, 0.034508800506591795, 0.035234848022460935, 0.034337696075439454, 0.03447814559936523, 0.03442422485351562, 0.03425545501708984, 0.03450470352172851, 0.03457843017578125, 0.03478112030029297, 0.03459251022338867, 0.03404345703125, 0.03398118209838867, 0.033699649810791016, 0.0337327995300293, 0.03400089645385742, 0.03397017669677734, 0.03385094451904297, 0.03373712158203125, 0.0337039680480957, 0.03375465774536133, 0.03382729721069336, 0.03392448043823242, 0.034050239562988284, 0.0341794548034668, 0.034003009796142576, 0.033809600830078126, 0.034127998352050784, 0.03416339111328125, 0.034105342864990236, 0.03402547073364258, 0.03395993423461914, 0.033890304565429685, 0.033923072814941405, 0.0339947509765625, 0.034094303131103516, 0.03396803283691406, 0.03417974472045898, 0.03402774429321289, 0.03401311874389648, 0.03383465576171875, 0.03384156799316406, 0.033804096221923825, 0.0342663688659668, 0.033962272644042966, 0.03392374420166016, 0.03384035110473633, 0.03391158294677735, 0.03386912155151367, 0.03392803192138672, 0.034598560333251954, 0.033925312042236325, 0.03383865737915039, 0.03383660888671875, 0.03389734268188477, 0.03399679946899414, 0.03409676742553711]",tokens/s,29.226538516806386,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 354057 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,856.547328,551.419904,0.0,148.897792,141.633024,s,1,9.57820703125,9.57820703125,0.0,9.57820703125,9.57820703125,9.57820703125,9.57820703125,[9.57820703125],,kWh,1.521645442916532e-05,1.6711102039045912e-06,4.502781380000178e-06,2.1390346013070088e-05,,MB,1354.58816,666.763264,0.0,249.561088,216.900608,s,17,0.21661916732788083,0.01274230396046358,0.0008335801129013028,0.0125415678024292,0.01275635223388672,0.013434029006958005,0.015515963821411132,"[0.016036447525024415, 0.012738304138183594, 0.012251680374145507, 0.012472991943359374, 0.012365663528442383, 0.01263548755645752, 0.0125415678024292, 0.012567392349243164, 0.012472800254821777, 0.012783424377441406, 0.012403039932250977, 0.012491711616516113, 0.012507967948913574, 0.012567071914672852, 0.012643487930297851, 0.012478367805480957, 0.012661760330200195]",tokens/s,20090.558253382493,kWh,3.659243401681279e-07,4.035197347168002e-08,2.0008370225563927e-07,6.063600158954472e-07,tokens/kWh,422191426.3623565,MB,1388.244992,691.929088,0.0,274.726912,216.903168,s,17,10.056873291015624,0.5915807818244484,0.0056718808257833,0.58987890625,0.6001079223632813,0.601384716796875,0.602600771484375,"[0.5995100708007812, 0.5925679321289062, 0.5853646850585937, 0.5868677978515625, 0.598260498046875, 0.60290478515625, 0.6010046997070313, 0.5909890747070312, 0.5922396240234375, 0.5891159057617188, 0.5851216430664062, 0.5850933837890625, 0.5885111694335937, 0.5864260864257812, 0.58757373046875, 0.58987890625, 0.5954432983398438]",tokens/s,106.49433168823806,kWh,1.6953253085567218e-05,1.869643816247884e-06,6.312862684567875e-06,2.5135759586382978e-05,tokens/kWh,2506389.344769575,,s,1071,10.04722269535065,0.009381160313119186,0.00025154755640711143,0.00933683204650879,0.009592479705810546,0.009721983909606935,0.010297468852996823,"[0.00902348804473877, 0.00933683204650879, 0.009551103591918946, 0.009560799598693848, 0.009392000198364258, 0.009303711891174316, 0.009386272430419921, 0.00950211238861084, 0.0093306884765625, 0.010844863891601563, 0.01121497631072998, 0.009527104377746582, 0.00939187240600586, 0.009582719802856444, 0.009519424438476563, 0.009919936180114747, 0.01010745620727539, 0.0095513916015625, 0.00958620834350586, 0.009439264297485352, 0.00929635238647461, 0.009346559524536132, 0.009599264144897462, 0.009497247695922851, 0.009558015823364258, 0.009398271560668945, 0.009555328369140624, 0.009472864151000977, 0.009406016349792481, 0.009421024322509765, 0.00934496021270752, 0.009320320129394532, 0.00928985595703125, 0.009253024101257324, 0.009439135551452637, 0.009543904304504395, 0.009555744171142579, 0.00941487979888916, 0.009461440086364747, 0.009411680221557616, 0.009271424293518066, 0.009320575714111328, 0.009459775924682617, 0.00931497573852539, 0.009323904037475586, 0.009318079948425293, 0.009610207557678222, 0.009666560173034668, 0.00946332836151123, 0.009365983963012695, 0.00938924789428711, 0.009524031639099121, 0.009521023750305176, 0.009590559959411622, 0.00952291202545166, 0.009455360412597657, 0.009458335876464843, 0.009477343559265137, 0.009392704010009766, 0.009590815544128418, 0.0095032958984375, 0.009426176071166992, 0.009311039924621583, 0.009200799942016601, 0.009324447631835937, 0.009413567543029785, 0.009578495979309083, 0.009496607780456544, 0.00940659236907959, 0.009227487564086914, 0.00928809642791748, 0.009461759567260742, 0.009373920440673828, 0.009436511993408203, 0.009351872444152831, 0.009491456031799317, 0.009542624473571777, 0.009445664405822753, 0.009282527923583984, 0.009386143684387206, 0.00934768009185791, 0.009104640007019042, 0.009394944190979004, 0.009513216018676758, 0.009287424087524414, 0.009164511680603027, 0.009233983993530273, 0.009329376220703125, 0.00932249641418457, 0.009215999603271484, 0.00928767967224121, 0.009258784294128419, 0.009062399864196777, 0.009034208297729492, 0.009207903861999512, 0.00927286434173584, 0.009132160186767577, 0.009125439643859864, 0.009371583938598632, 0.00946233558654785, 0.00955619239807129, 0.009297632217407226, 0.010208576202392579, 0.009539648056030274, 0.009239295959472656, 0.009076224327087403, 0.009472384452819824, 0.009647392272949218, 0.009419487953186036, 0.009322624206542968, 0.009445247650146484, 0.009471839904785156, 0.009168671607971192, 0.009171327590942384, 0.01054470443725586, 0.011919808387756348, 0.00979372787475586, 0.00943494415283203, 0.009301312446594238, 0.009219807624816894, 0.00942140769958496, 0.009171327590942384, 0.009062399864196777, 0.009043775558471679, 0.00913379192352295, 0.009153247833251953, 0.008755231857299805, 0.009224191665649414, 0.010030847549438476, 0.009220352172851562, 0.009131872177124023, 0.009453472137451171, 0.00956441593170166, 0.009391519546508789, 0.00929206371307373, 0.009588576316833497, 0.009587231636047364, 0.009277407646179198, 0.009078656196594238, 0.009027680397033692, 0.009053536415100098, 0.009182016372680664, 0.008979711532592773, 0.009048192024230958, 0.009148703575134278, 0.0091015043258667, 0.009050111770629882, 0.009265151977539063, 0.009348192214965821, 0.00911177635192871, 0.009040575981140136, 0.009457663536071777, 0.009375743865966797, 0.009191424369812011, 0.009158880233764648, 0.00920143985748291, 0.00920400047302246, 0.00922326374053955, 0.009160608291625976, 0.009407199859619141, 0.00947542381286621, 0.009396896362304688, 0.009303680419921875, 0.009204095840454101, 0.009113408088684083, 0.009075136184692382, 0.009064191818237305, 0.009064543724060058, 0.009522144317626952, 0.009096256256103516, 0.0090250883102417, 0.009154815673828125, 0.009078847885131835, 0.009237600326538085, 0.009292415618896484, 0.009220319747924805, 0.009234432220458985, 0.009416768074035645, 0.009367520332336426, 0.009876832008361817, 0.009478272438049317, 0.00952787208557129, 0.009359135627746581, 0.009345248222351074, 0.009342495918273925, 0.009617888450622558, 0.009494720458984375, 0.009523008346557617, 0.00952079963684082, 0.008997088432312011, 0.009215231895446777, 0.009351936340332032, 0.009578271865844727, 0.009490079879760742, 0.009506976127624512, 0.009611680030822753, 0.00960102367401123, 0.009576095581054687, 0.009458016395568848, 0.009445311546325684, 0.009465056419372559, 0.009148832321166991, 0.009298368453979492, 0.00933683204650879, 0.009225695610046387, 0.009264863967895508, 0.01010374355316162, 0.009366880416870118, 0.009390624046325683, 0.009494527816772461, 0.009443264007568359, 0.009158975601196289, 0.008967040061950683, 0.009008000373840332, 0.008980511665344238, 0.009137855529785157, 0.009301343917846679, 0.009388992309570313, 0.009229503631591796, 0.009140704154968262, 0.009289567947387695, 0.009183744430541992, 0.009113856315612793, 0.00904371166229248, 0.009127103805541992, 0.00923846435546875, 0.009081088066101074, 0.009224096298217773, 0.00940236759185791, 0.009353952407836914, 0.00912384033203125, 0.009146368026733399, 0.009314175605773925, 0.009347200393676758, 0.009422847747802734, 0.009758720397949219, 0.009612832069396973, 0.009496928215026856, 0.009277631759643554, 0.009250752449035645, 0.009203424453735351, 0.009163040161132813, 0.009162752151489258, 0.009231583595275879, 0.009173791885375976, 0.009185279846191406, 0.009085247993469239, 0.009233535766601563, 0.00958521556854248, 0.009378975868225097, 0.00925814437866211, 0.009172672271728516, 0.008902976036071777, 0.009291744232177735, 0.00944502353668213, 0.00949295997619629, 0.009462752342224122, 0.009443264007568359, 0.009408831596374512, 0.009631775856018066, 0.00948521614074707, 0.009467647552490235, 0.009676256179809571, 0.00958291244506836, 0.00945580768585205, 0.00948857593536377, 0.009590656280517577, 0.009471936225891113, 0.00942899227142334, 0.009479552268981934, 0.009499391555786134, 0.009459199905395508, 0.009457663536071777, 0.009562496185302735, 0.009452544212341308, 0.009530367851257325, 0.009430944442749023, 0.009625696182250976, 0.009267200469970703, 0.009278911590576172, 0.009292351722717284, 0.00941055965423584, 0.009592479705810546, 0.009591327667236329, 0.009578111648559571, 0.009555968284606933, 0.009449631690979003, 0.009375776290893555, 0.009524831771850586, 0.009535103797912598, 0.009421600341796874, 0.009532480239868165, 0.009378751754760743, 0.009680895805358887, 0.009491999626159668, 0.009513440132141114, 0.009525504112243652, 0.009429887771606445, 0.00944803237915039, 0.009772671699523925, 0.009497535705566407, 0.009610624313354492, 0.009729408264160156, 0.009505727767944335, 0.009490528106689454, 0.009461119651794433, 0.009470784187316894, 0.00943712043762207, 0.009336576461791993, 0.009670720100402833, 0.009229920387268066, 0.009289376258850098, 0.009947327613830567, 0.009461664199829101, 0.00963651180267334, 0.009273280143737793, 0.00936508846282959, 0.009463839530944824, 0.009636480331420899, 0.009578207969665527, 0.009510656356811524, 0.009576576232910156, 0.00950499153137207, 0.009476096153259277, 0.009460991859436035, 0.009634367942810059, 0.009385631561279298, 0.009640352249145508, 0.009715104103088379, 0.009941727638244628, 0.009654303550720215, 0.009874719619750977, 0.009640640258789062, 0.009505791664123535, 0.009767168045043946, 0.009623680114746094, 0.00962332820892334, 0.009570560455322265, 0.009736096382141114, 0.009640416145324708, 0.009756896018981933, 0.009858400344848632, 0.009560319900512695, 0.009498527526855469, 0.009376255989074708, 0.009349472045898437, 0.00936076831817627, 0.009768480300903321, 0.009617280006408692, 0.009515199661254883, 0.009452223777770996, 0.009568256378173828, 0.009497952461242676, 0.009412927627563477, 0.009492927551269532, 0.009536959648132325, 0.009443743705749512, 0.00943724822998047, 0.009391488075256348, 0.009454208374023438, 0.009495776176452636, 0.00943734359741211, 0.009734911918640137, 0.009534687995910644, 0.009490400314331056, 0.009505472183227538, 0.009544992446899414, 0.009534175872802735, 0.009551872253417968, 0.009616543769836425, 0.009537823677062988, 0.009476032257080078, 0.009531807899475098, 0.009549887657165527, 0.009443712234497071, 0.00972163200378418, 0.009820159912109374, 0.009616767883300781, 0.00912185573577881, 0.009459263801574707, 0.00946115207672119, 0.009380831718444824, 0.009612319946289062, 0.009534591674804688, 0.009497695922851563, 0.009513983726501465, 0.009382975578308106, 0.009294816017150879, 0.009334495544433594, 0.009338815689086915, 0.009416704177856445, 0.009435199737548828, 0.009386303901672364, 0.009394080162048339, 0.009445343971252441, 0.009354911804199218, 0.00946396827697754, 0.009414655685424805, 0.009352352142333984, 0.009394847869873046, 0.009730239868164063, 0.0096461763381958, 0.00960912036895752, 0.009471839904785156, 0.00944377613067627, 0.009262335777282716, 0.009293503761291504, 0.009370431900024415, 0.009410528182983399, 0.009467552185058594, 0.009642335891723633, 0.00962764835357666, 0.00974028778076172, 0.010001888275146484, 0.010278623580932617, 0.009521504402160645, 0.009409088134765625, 0.009320063591003418, 0.009355551719665527, 0.009370847702026367, 0.009327103614807129, 0.009373984336853027, 0.009523200035095216, 0.009408512115478516, 0.009327648162841796, 0.009335935592651367, 0.009340767860412597, 0.009381888389587402, 0.009372960090637207, 0.009386719703674317, 0.00942899227142334, 0.01009663963317871, 0.009506496429443359, 0.01033180809020996, 0.012004287719726562, 0.009924320220947266, 0.009455615997314454, 0.009549823760986328, 0.009446816444396973, 0.0095414400100708, 0.009364255905151367, 0.009324576377868652, 0.009519071578979492, 0.009426976203918457, 0.009354687690734864, 0.00934556770324707, 0.009264608383178712, 0.009338560104370118, 0.009341535568237304, 0.009325984001159669, 0.00942576026916504, 0.009588735580444336, 0.009424896240234374, 0.009433343887329101, 0.009305855751037598, 0.009282591819763184, 0.00930303955078125, 0.009242143630981445, 0.009177824020385743, 0.009277152061462403, 0.009267200469970703, 0.00930406379699707, 0.00943824005126953, 0.00967587184906006, 0.009330559730529785, 0.009594688415527344, 0.009494688034057617, 0.009420255661010742, 0.009441344261169434, 0.009371456146240234, 0.00945952033996582, 0.009466400146484375, 0.009443455696105958, 0.009537440299987793, 0.009577856063842774, 0.009404895782470703, 0.009388511657714843, 0.009393312454223634, 0.009408576011657715, 0.009593631744384766, 0.009440416336059571, 0.009400608062744141, 0.009310144424438477, 0.009278079986572266, 0.009514207839965821, 0.0093372802734375, 0.009277631759643554, 0.009238688468933106, 0.00922214412689209, 0.009229727745056152, 0.00931100845336914, 0.009297727584838867, 0.009287199974060058, 0.009299967765808105, 0.009236960411071778, 0.009189087867736817, 0.009216287612915038, 0.009302304267883301, 0.009333663940429688, 0.009286463737487793, 0.009322239875793456, 0.009315648078918458, 0.009409055709838868, 0.009635487556457519, 0.008992768287658692, 0.00936723232269287, 0.009312576293945312, 0.00943228816986084, 0.009347264289855956, 0.009771167755126953, 0.009339296340942382, 0.009313504219055175, 0.009261119842529296, 0.009202431678771973, 0.009354623794555664, 0.009217951774597168, 0.009299712181091309, 0.00942579174041748, 0.01028275203704834, 0.009418399810791015, 0.009343104362487793, 0.009447999954223633, 0.009347071647644043, 0.00930992031097412, 0.009309568405151368, 0.009655200004577636, 0.00926915168762207, 0.009200736045837403, 0.00927609634399414, 0.009347392082214355, 0.009230591773986817, 0.009235744476318359, 0.009216575622558594, 0.009240480422973632, 0.00925875186920166, 0.009240832328796387, 0.00927244758605957, 0.009272192001342773, 0.009342047691345215, 0.009212127685546875, 0.009185440063476562, 0.009206303596496582, 0.009252863883972168, 0.009218144416809081, 0.010108256340026855, 0.009809696197509765, 0.010705696105957032, 0.009318400382995605, 0.009459039688110352, 0.009359231948852539, 0.009315103530883788, 0.009281215667724609, 0.009255231857299805, 0.009461119651794433, 0.009351936340332032, 0.009354144096374513, 0.009321439743041992, 0.009644031524658203, 0.009400320053100587, 0.00940351963043213, 0.009244832038879394, 0.00925926399230957, 0.009287360191345215, 0.009478943824768066, 0.009510592460632324, 0.009515328407287598, 0.009566207885742188, 0.009256832122802735, 0.009588607788085938, 0.009722335815429688, 0.00953936004638672, 0.009618816375732421, 0.009550335884094239, 0.009525247573852539, 0.009439359664916993, 0.00949846363067627, 0.009367584228515625, 0.009377792358398437, 0.009291423797607421, 0.009261407852172852, 0.009442399978637696, 0.00936787223815918, 0.00936406421661377, 0.009558015823364258, 0.009267200469970703, 0.00928758430480957, 0.009287520408630371, 0.00924899196624756, 0.00938806438446045, 0.009321727752685548, 0.009200384140014648, 0.00920687961578369, 0.009313183784484863, 0.009258687973022461, 0.009646400451660157, 0.009203712463378906, 0.009198816299438476, 0.009167807579040527, 0.009159775733947753, 0.009208576202392578, 0.009390080451965332, 0.00921737575531006, 0.009191264152526855, 0.009161664009094237, 0.009385855674743652, 0.009248767852783203, 0.009225760459899902, 0.00927996826171875, 0.009633503913879394, 0.00923852825164795, 0.009261343955993652, 0.009377663612365722, 0.009209983825683594, 0.009183232307434081, 0.009199680328369141, 0.009368927955627441, 0.009288288116455079, 0.009191231727600098, 0.009241056442260743, 0.009311200141906738, 0.009322303771972656, 0.00924950408935547, 0.009379936218261718, 0.009885824203491211, 0.009209856033325196, 0.009390080451965332, 0.009359359741210938, 0.009301728248596191, 0.009345312118530274, 0.009371904373168946, 0.008989631652832031, 0.00919539165496826, 0.009258432388305664, 0.009212127685546875, 0.009228768348693848, 0.009248767852783203, 0.009274815559387207, 0.009260767936706544, 0.009207743644714356, 0.00925926399230957, 0.00931222438812256, 0.00930681610107422, 0.009367551803588867, 0.009285632133483887, 0.00932464027404785, 0.009420703887939454, 0.00935103988647461, 0.009279616355895996, 0.009320511817932128, 0.009496512413024903, 0.009480192184448242, 0.009349120140075684, 0.009365568161010742, 0.0095927677154541, 0.009467616081237792, 0.009363871574401855, 0.009523072242736817, 0.009431232452392578, 0.009376704216003418, 0.009407391548156738, 0.009342944145202637, 0.009244928359985352, 0.009275327682495118, 0.009218111991882324, 0.009244256019592285, 0.009235936164855957, 0.00923423957824707, 0.009251711845397949, 0.00924176025390625, 0.009155424118041992, 0.00917849636077881, 0.009142911911010743, 0.009156607627868652, 0.009164799690246582, 0.009196576118469238, 0.009488863945007324, 0.009273856163024903, 0.009197248458862304, 0.009181344032287597, 0.009173151969909667, 0.009207008361816406, 0.009232895851135254, 0.009240639686584473, 0.009158143997192383, 0.00917955207824707, 0.009254528045654296, 0.00926582431793213, 0.009160896301269532, 0.00920150375366211, 0.009549152374267579, 0.009180831909179687, 0.00918832015991211, 0.009193504333496094, 0.009148127555847168, 0.009523648262023926, 0.009415583610534668, 0.00933779239654541, 0.00925836753845215, 0.009243264198303222, 0.009308128356933594, 0.009297311782836914, 0.009316991806030274, 0.009256319999694824, 0.009310751914978028, 0.009271391868591309, 0.009261216163635254, 0.009322336196899414, 0.009291775703430176, 0.009291775703430176, 0.009262880325317384, 0.00921622371673584, 0.009418560028076171, 0.009403743743896485, 0.009407584190368651, 0.00941971206665039, 0.009309056282043458, 0.009297856330871582, 0.009265151977539063, 0.009303263664245606, 0.009306976318359375, 0.009539520263671875, 0.0091976318359375, 0.00920473575592041, 0.009188287734985352, 0.009204992294311523, 0.009144160270690918, 0.009145248413085937, 0.009152511596679687, 0.009537119865417481, 0.009650591850280762, 0.009469728469848632, 0.009220319747924805, 0.009244671821594238, 0.009223296165466308, 0.009179743766784668, 0.009373855590820313, 0.009287296295166015, 0.009183744430541992, 0.009178560256958007, 0.009123807907104492, 0.00919974422454834, 0.009183679580688477, 0.009217760086059571, 0.009232704162597655, 0.009209856033325196, 0.009403679847717285, 0.009296095848083497, 0.00918115234375, 0.009230879783630372, 0.009191424369812011, 0.009214112281799317, 0.00918511962890625, 0.009189375877380371, 0.00919324779510498, 0.00919279956817627, 0.009297823905944825, 0.008966143608093263, 0.009272383689880372, 0.009243007659912109, 0.009460288047790527, 0.00945366382598877, 0.009390111923217773, 0.0093121919631958, 0.0093787841796875, 0.009322943687438965, 0.00932044792175293, 0.0093985595703125, 0.009314559936523437, 0.009236479759216308, 0.009355263710021973, 0.009645376205444336, 0.00956886386871338, 0.00942835235595703, 0.00937174415588379, 0.009335424423217773, 0.0092708158493042, 0.009300191879272461, 0.00925209617614746, 0.009243647575378417, 0.009351167678833008, 0.00930617618560791, 0.00924665641784668, 0.009295999526977539, 0.009609087944030762, 0.009334752082824707, 0.009177472114562988, 0.00921884822845459, 0.009245375633239745, 0.009285311698913574, 0.009164287567138671, 0.009284607887268067, 0.009334783554077148, 0.009277440071105958, 0.009265055656433105, 0.009590880393981933, 0.009318400382995605, 0.009277440071105958, 0.00916204833984375, 0.009289440155029296, 0.00961564826965332, 0.009471808433532715, 0.009539775848388672, 0.010075936317443848, 0.0093372802734375, 0.00926681613922119, 0.009245280265808106, 0.009261311531066895, 0.009401535987854004, 0.009270079612731933, 0.00921996784210205, 0.009193568229675294, 0.009238559722900391, 0.009257023811340331, 0.00927023983001709, 0.009225184440612794, 0.00934502410888672, 0.009224063873291015, 0.00923356819152832, 0.009406847953796387, 0.009096960067749023, 0.009394432067871094, 0.009308159828186035, 0.009303071975708007, 0.00922265625, 0.009247200012207031, 0.009256959915161133, 0.009199520111083985, 0.009219648361206055, 0.00918278408050537, 0.009195551872253419, 0.009167872428894042, 0.00912553596496582, 0.009173376083374023, 0.009502079963684082, 0.009533984184265137, 0.009287455558776855, 0.009221728324890138, 0.00921452808380127, 0.009155808448791504, 0.009158495903015137, 0.009200639724731445, 0.00917625617980957, 0.00913491153717041, 0.009352352142333984, 0.009220800399780273, 0.00914038372039795, 0.009184960365295411, 0.009134400367736816, 0.009150464057922364, 0.009177056312561035, 0.009510623931884765, 0.009314528465270996, 0.009228192329406738, 0.009296064376831054, 0.009472000122070312, 0.009249792098999024, 0.009384896278381348, 0.00929513645172119, 0.009306719779968262, 0.009645376205444336, 0.009486944198608398, 0.009267487525939942, 0.009334783554077148, 0.009376959800720215, 0.009638751983642578, 0.009298015594482421, 0.00931007957458496, 0.00929792022705078, 0.009248607635498047, 0.009240511894226075, 0.00924079990386963, 0.009281120300292968, 0.00929423999786377, 0.009250975608825684, 0.009611104011535644, 0.009392127990722657, 0.009535039901733398, 0.009359199523925782, 0.009358943939208985, 0.00946076774597168, 0.00930403232574463, 0.009536671638488769, 0.009178879737854004, 0.009422944068908692, 0.009558815956115723, 0.009381216049194336, 0.00924675178527832, 0.009343711853027344, 0.009365504264831542, 0.009308192253112793, 0.009293791770935058, 0.009343008041381835, 0.009334112167358398, 0.009370240211486816, 0.009339008331298828, 0.00932044792175293, 0.009392000198364258, 0.009354656219482421, 0.009218655586242675, 0.00961235237121582, 0.009269791603088379, 0.00948265552520752, 0.009367551803588867, 0.009263296127319336, 0.00935916805267334, 0.009408512115478516, 0.009373696327209472, 0.009369855880737304, 0.009502464294433594, 0.009215519905090333, 0.0091976318359375, 0.009148832321166991, 0.00922214412689209, 0.009184800148010253, 0.009198111534118652, 0.009284799575805663, 0.00924950408935547, 0.009293855667114258, 0.00923852825164795, 0.009416704177856445, 0.00935097599029541, 0.009207776069641113, 0.009197279930114747, 0.009261407852172852, 0.009406784057617187, 0.00926425552368164, 0.009237215995788575, 0.009342975616455078, 0.009303872108459472, 0.00926144027709961, 0.009278528213500977, 0.009260031700134277, 0.009281567573547363, 0.009279232025146484, 0.009259072303771973, 0.0091810884475708, 0.009412735939025878, 0.00944320011138916, 0.009430272102355957, 0.009418720245361329, 0.009304863929748535, 0.009310208320617675, 0.00932204818725586, 0.009271327972412109, 0.009273759841918945, 0.009375743865966797, 0.009277407646179198, 0.009993951797485351, 0.009326911926269532, 0.009294015884399414, 0.009962847709655762, 0.009246272087097168, 0.009221023559570312, 0.00920358371734619, 0.009213760375976562, 0.00922655963897705, 0.009199007987976075, 0.009161248207092285, 0.009193535804748534, 0.009191424369812011, 0.009399392127990723, 0.009280351638793946, 0.009195584297180176, 0.009207807540893554, 0.009193535804748534, 0.009276512145996094, 0.009191583633422852, 0.009212063789367677, 0.009175104141235351, 0.009226719856262207, 0.009218175888061524, 0.00916262435913086, 0.009198880195617676, 0.009181920051574708, 0.00931827163696289, 0.009267616271972656, 0.00920751953125, 0.00920911979675293, 0.009241344451904297, 0.009179200172424316, 0.009193375587463378, 0.00917478370666504, 0.009164799690246582, 0.0091396484375, 0.009179967880249024, 0.00922214412689209, 0.009156607627868652, 0.009388031959533692, 0.00939417552947998, 0.009733280181884766, 0.011639039993286132, 0.009515616416931152, 0.009579808235168456, 0.010343135833740235, 0.009349120140075684, 0.009270848274230957, 0.009824704170227051, 0.009289504051208496, 0.009400544166564941, 0.009414655685424805, 0.009375295639038085, 0.009265664100646973, 0.009236703872680664, 0.009230048179626464, 0.009221471786499024, 0.009502688407897949, 0.00926358413696289, 0.0092326078414917, 0.00900716781616211, 0.00928694438934326, 0.009319168090820312, 0.009318367958068847, 0.009472000122070312, 0.009531135559082032, 0.009555935859680176, 0.009592448234558106, 0.009578304290771484, 0.00999715232849121, 0.009543904304504395, 0.009508576393127442, 0.009324288368225098, 0.009311871528625488, 0.009387776374816895, 0.00929417610168457, 0.009241184234619141, 0.009244671821594238, 0.009232383728027344, 0.009226240158081055, 0.009861120223999023, 0.00925814437866211, 0.009231072425842285, 0.010570079803466796, 0.009518879890441895, 0.00939788818359375, 0.00929856014251709, 0.00924342441558838, 0.009313247680664062, 0.009664511680603028, 0.009787199974060058, 0.009359040260314942, 0.009323007583618164, 0.009340928077697755, 0.00941875171661377, 0.009475104331970215, 0.010164256095886231, 0.009609343528747559, 0.009537568092346191, 0.009403167724609375, 0.009318400382995605, 0.00933683204650879, 0.009205151557922363, 0.009240703582763672, 0.009191360473632813, 0.009169440269470214, 0.009228287696838379, 0.009397472381591797, 0.009278143882751465, 0.009748576164245605, 0.009335007667541504, 0.009737855911254882, 0.010744000434875488, 0.009390048027038574, 0.009334176063537598, 0.009376352310180663, 0.00932204818725586, 0.009289664268493652, 0.0093373441696167, 0.009391679763793945, 0.009263392448425292, 0.009241855621337891, 0.009282655715942383]",tokens/s,106.59662201929744,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,854.54848,551.419904,0.0,148.897792,141.633024,s,1,9.6251806640625,9.6251806640625,0.0,9.6251806640625,9.6251806640625,9.6251806640625,9.6251806640625,[9.6251806640625],,kWh,1.5081354895834901e-05,1.655948729455328e-06,4.634448151999773e-06,2.1371751777290003e-05,,MB,1318.248448,666.763264,0.0,249.561088,216.900608,s,22,0.22175705432891843,0.01007986610585993,0.00018973987652826864,0.010012783527374267,0.010261337661743164,0.010272755241394043,0.010708159866333007,"[0.01025926399230957, 0.01082374382019043, 0.010065983772277833, 0.009968704223632812, 0.00993558406829834, 0.010261568069458007, 0.010102239608764649, 0.010008064270019532, 0.009954943656921386, 0.01006828784942627, 0.01005014419555664, 0.00998470401763916, 0.010075679779052734, 0.009984800338745117, 0.009937088012695312, 0.010273344039916992, 0.010011167526245117, 0.010014399528503418, 0.010033696174621583, 0.010010687828063965, 0.009955039978027344, 0.009977919578552246]",tokens/s,25397.162751117736,kWh,2.93192169896252e-07,3.233374962713023e-08,1.681491193473892e-07,4.936750388707714e-07,tokens/kWh,518559740.4024569,MB,1332.035584,691.929088,0.0,274.726912,216.903168,s,22,9.796990966796876,0.44531777121803984,0.004225096674222558,0.4444917297363281,0.44836306152343747,0.45253360290527345,0.4579859033203125,"[0.45274819946289063, 0.4444595947265625, 0.4420468444824219, 0.44723025512695314, 0.44845626831054686, 0.45937820434570314, 0.44408541870117185, 0.4411723937988281, 0.44526138305664065, 0.4433102111816406, 0.4407351989746094, 0.44452386474609373, 0.44116204833984374, 0.44167092895507815, 0.4455504150390625, 0.4475242004394531, 0.44692767333984373, 0.44521450805664065, 0.44021966552734376, 0.4444476623535156, 0.44711322021484373, 0.4437528076171875]",tokens/s,141.47200958920067,kWh,1.2790951781009488e-05,1.410571585076267e-06,5.046855052652605e-06,1.924837841873836e-05,tokens/kWh,3273002.983912104,,s,1386,9.784832614421843,0.007059763791069152,0.0002020278555316181,0.007011375904083252,0.007261568069458008,0.007351680159568786,0.007659615993499756,"[0.006930079936981201, 0.007521120071411133, 0.0075939841270446775, 0.007324960231781006, 0.007326432228088379, 0.007311359882354736, 0.007339807987213134, 0.0072167677879333494, 0.007252863883972168, 0.007288544178009033, 0.007288832187652588, 0.007217152118682861, 0.007108607769012451, 0.007141280174255371, 0.007227488040924072, 0.007147520065307617, 0.007432223796844483, 0.007323679924011231, 0.007284192085266114, 0.0073937602043151855, 0.007562304019927978, 0.007308320045471191, 0.0073129281997680665, 0.007252352237701416, 0.007299359798431397, 0.007182047843933105, 0.007153567790985108, 0.007327328205108643, 0.00712550401687622, 0.007046815872192383, 0.007040927886962891, 0.00709881591796875, 0.007208672046661377, 0.007033279895782471, 0.007010144233703613, 0.0070563840866088865, 0.006982656002044678, 0.006975488185882568, 0.006942048072814942, 0.006984352111816406, 0.006999680042266846, 0.007026879787445068, 0.006947008132934571, 0.0069632000923156735, 0.006950592041015625, 0.006973567962646484, 0.00695033597946167, 0.006957248210906982, 0.007185184001922608, 0.007168032169342041, 0.006968063831329346, 0.007006368160247803, 0.007447391986846924, 0.007008287906646728, 0.007274464130401611, 0.007049536228179932, 0.00705731201171875, 0.007219200134277344, 0.0070282559394836425, 0.007254271984100342, 0.007861567974090575, 0.007242432117462158, 0.007090176105499267, 0.006910272121429443, 0.007083903789520263, 0.0070390720367431645, 0.007024191856384278, 0.0069922242164611815, 0.007112480163574218, 0.007018335819244385, 0.006986112117767334, 0.007030240058898926, 0.007023136138916016, 0.007127039909362793, 0.0069959678649902345, 0.006995584011077881, 0.0069285759925842285, 0.007089471817016602, 0.007015423774719238, 0.007066495895385742, 0.007290112018585205, 0.007184288024902344, 0.007056320190429687, 0.007019423961639405, 0.007012351989746094, 0.00704259204864502, 0.00709065580368042, 0.006989727973937989, 0.007016543865203857, 0.006985504150390625, 0.007034848213195801, 0.0070330238342285155, 0.0070136961936950685, 0.006992640018463135, 0.007018176078796387, 0.006977856159210205, 0.007015967845916748, 0.006982111930847168, 0.007127007961273193, 0.00729091215133667, 0.007378943920135498, 0.007167456150054932, 0.007045824050903321, 0.007073631763458252, 0.0070553598403930665, 0.007051072120666504, 0.006998335838317871, 0.007047232151031494, 0.006983744144439697, 0.007015264034271241, 0.006970047950744629, 0.0069408960342407225, 0.006980639934539795, 0.006978528022766113, 0.00694271993637085, 0.0069959678649902345, 0.007249695777893066, 0.00701635217666626, 0.007007552146911621, 0.006986239910125733, 0.007113440036773682, 0.007110335826873779, 0.00706390380859375, 0.007016191959381104, 0.007034495830535888, 0.007061888217926025, 0.006773119926452637, 0.007188608169555664, 0.007071616172790528, 0.007049280166625977, 0.0070115838050842285, 0.007078176021575928, 0.007020959854125977, 0.006989151954650879, 0.007030943870544434, 0.007024384021759033, 0.007006847858428955, 0.00701577615737915, 0.006978335857391357, 0.007030784130096435, 0.006983583927154541, 0.007239776134490967, 0.0070347518920898435, 0.006975615978240967, 0.007013760089874268, 0.007021312236785889, 0.006977536201477051, 0.007046175956726074, 0.0069939842224121095, 0.0069929280281066895, 0.0069747200012207035, 0.006989984035491943, 0.0069881281852722165, 0.007010303974151612, 0.006941023826599121, 0.007021376132965088, 0.0071842560768127445, 0.007033567905426025, 0.007135263919830322, 0.007031007766723633, 0.007016479969024658, 0.007008384227752686, 0.006934368133544922, 0.006985504150390625, 0.006963391780853271, 0.0069836478233337405, 0.006922304153442383, 0.007000063896179199, 0.006987711906433105, 0.006942240238189698, 0.007129504203796387, 0.007089663982391357, 0.006955904006958008, 0.0069424638748168946, 0.00692633581161499, 0.007001664161682129, 0.006988224029541016, 0.007024640083312988, 0.006953184127807617, 0.006956096172332764, 0.006969855785369873, 0.006951168060302735, 0.006959167957305909, 0.006940576076507568, 0.007208992004394532, 0.006987711906433105, 0.0069502081871032715, 0.006980224132537842, 0.006979680061340332, 0.006653952121734619, 0.0069612798690795894, 0.00699132776260376, 0.00694214391708374, 0.007009151935577392, 0.006957087993621826, 0.00723360013961792, 0.00695091199874878, 0.0069816322326660156, 0.008666784286499023, 0.007168288230895996, 0.007000127792358399, 0.0071981439590454105, 0.007010591983795166, 0.007061791896820068, 0.007050975799560547, 0.0070433602333068845, 0.007102464199066162, 0.006977536201477051, 0.0070225920677185055, 0.006964640140533447, 0.006965856075286866, 0.006971104145050049, 0.0069798722267150875, 0.007262527942657471, 0.007005440235137939, 0.006969791889190674, 0.007108704090118408, 0.006976672172546387, 0.007002175807952881, 0.006965951919555664, 0.0069816322326660156, 0.006971392154693603, 0.007007775783538818, 0.006959455966949463, 0.007121024131774903, 0.007035232067108154, 0.006981279850006104, 0.006944287776947022, 0.006996448040008545, 0.006948895931243896, 0.006983327865600586, 0.006975935935974121, 0.006971392154693603, 0.00696895980834961, 0.006945024013519287, 0.007086207866668701, 0.006973375797271728, 0.00703276777267456, 0.006981855869293213, 0.006933504104614257, 0.0071178879737854, 0.007039968013763428, 0.007001920223236084, 0.006920447826385498, 0.0069639039039611815, 0.007040256023406982, 0.00865561580657959, 0.008900768280029297, 0.007038847923278808, 0.007036255836486816, 0.006998655796051026, 0.007028639793395996, 0.006988927841186523, 0.006982687950134277, 0.007018335819244385, 0.007019904136657715, 0.006996543884277344, 0.006952991962432861, 0.006938655853271484, 0.007009984016418457, 0.006932799816131592, 0.007052896022796631, 0.007031167984008789, 0.007049471855163574, 0.007102272033691406, 0.007007967948913574, 0.007123392105102539, 0.007019552230834961, 0.006986176013946533, 0.006936351776123047, 0.007053887844085694, 0.0069550080299377445, 0.0069550080299377445, 0.0069175357818603515, 0.006929279804229737, 0.006923999786376953, 0.006935647964477539, 0.006932928085327149, 0.006930240154266357, 0.0070826559066772465, 0.0068990721702575685, 0.006922880172729492, 0.00695091199874878, 0.0070759038925170895, 0.007039008140563965, 0.007026591777801514, 0.0070529599189758305, 0.007086431980133057, 0.0071188478469848635, 0.0072700161933898924, 0.007313344001770019, 0.00727839994430542, 0.007248511791229248, 0.007169727802276611, 0.0071214399337768556, 0.0072986879348754885, 0.007315711975097656, 0.007427135944366455, 0.007424863815307617, 0.007427775859832763, 0.007565375804901123, 0.0074152002334594725, 0.007357279777526856, 0.007426080226898194, 0.007194591999053955, 0.0072000322341918945, 0.0071766400337219234, 0.007200992107391358, 0.007166016101837158, 0.007158815860748291, 0.007199264049530029, 0.007100736141204834, 0.007110976219177246, 0.0071877760887146, 0.007162335872650146, 0.0070076799392700196, 0.007465536117553711, 0.007417856216430664, 0.00730838394165039, 0.00730614423751831, 0.007217152118682861, 0.007256063938140869, 0.007207071781158448, 0.007251808166503906, 0.007368415832519531, 0.007395616054534912, 0.007337024211883545, 0.0073693118095397945, 0.007417600154876709, 0.007361120223999024, 0.007235583782196045, 0.00740556812286377, 0.007520576000213623, 0.007110335826873779, 0.007265567779541016, 0.00734281587600708, 0.007342080116271973, 0.007394815921783447, 0.0074163517951965335, 0.007317471981048584, 0.007247871875762939, 0.007174208164215088, 0.007116127967834473, 0.007092959880828857, 0.00715334415435791, 0.0072193918228149415, 0.007221471786499023, 0.007423423767089844, 0.007339712142944336, 0.007411839962005615, 0.007324512004852295, 0.007364160060882569, 0.007258048057556152, 0.0072269439697265625, 0.007147552013397217, 0.0071378879547119144, 0.007412000179290772, 0.007091008186340332, 0.007108640193939209, 0.007072639942169189, 0.007095967769622803, 0.007045248031616211, 0.00739247989654541, 0.0072765440940856935, 0.007207583904266357, 0.007340384006500244, 0.007392640113830566, 0.007260608196258545, 0.007352000236511231, 0.0074388480186462404, 0.00727184009552002, 0.0072782402038574215, 0.007295936107635498, 0.007345215797424316, 0.007244736194610596, 0.00725219202041626, 0.007132959842681885, 0.007573503971099854, 0.006872799873352051, 0.007285024166107178, 0.007294976234436036, 0.007415167808532715, 0.007242176055908203, 0.007233727931976319, 0.007120160102844239, 0.00708787202835083, 0.007101568222045898, 0.0071039037704467775, 0.007473663806915283, 0.007124927997589112, 0.00707916784286499, 0.007275263786315918, 0.007045119762420654, 0.007007999897003174, 0.00697763204574585, 0.007086463928222656, 0.007118624210357666, 0.007137279987335205, 0.0069632000923156735, 0.00702400016784668, 0.006963456153869629, 0.0070251841545104984, 0.007030623912811279, 0.0070348801612854, 0.007265984058380127, 0.007073247909545898, 0.007009088039398193, 0.006930592060089111, 0.006961023807525635, 0.006946208000183106, 0.006908512115478516, 0.006897024154663086, 0.006903999805450439, 0.006949408054351807, 0.006913760185241699, 0.006979775905609131, 0.007090271949768067, 0.007036640167236328, 0.007037248134613037, 0.007014111995697021, 0.006967455863952637, 0.0069816322326660156, 0.007006207942962647, 0.007011648178100586, 0.0069597759246826174, 0.006916128158569336, 0.0069465599060058595, 0.006965504169464111, 0.007087999820709228, 0.0069612798690795894, 0.006940671920776367, 0.006938271999359131, 0.006949215888977051, 0.0069550080299377445, 0.006904064178466797, 0.006964000225067138, 0.006956352233886719, 0.006902880191802978, 0.007051648139953614, 0.007129280090332031, 0.007007808208465576, 0.006685215950012207, 0.007006239891052246, 0.006973631858825683, 0.006973023891448975, 0.007015647888183593, 0.006944799900054931, 0.006980576038360596, 0.006946815967559815, 0.006959104061126709, 0.006921535968780518, 0.007104703903198242, 0.006924736022949219, 0.006963263988494873, 0.0069119038581848145, 0.006905824184417725, 0.006958847999572754, 0.0069062399864196775, 0.006940192222595215, 0.006926015853881836, 0.006968095779418946, 0.00710640001296997, 0.007010464191436768, 0.006965055942535401, 0.006936768054962158, 0.007325695991516113, 0.007016223907470703, 0.007061728000640869, 0.006989823818206787, 0.007036928176879883, 0.007391232013702393, 0.007047167778015137, 0.006944672107696533, 0.007032735824584961, 0.006968768119812012, 0.006968224048614502, 0.006915840148925781, 0.006945024013519287, 0.006950623989105224, 0.007114016056060791, 0.006993824005126953, 0.007027167797088623, 0.006967328071594238, 0.006939072132110596, 0.006957280158996582, 0.006962111949920654, 0.006981567859649658, 0.006979743957519532, 0.00722102403640747, 0.006998432159423828, 0.0070043840408325194, 0.006981728076934815, 0.006952191829681396, 0.006937600135803222, 0.006921664237976074, 0.00701804780960083, 0.007037951946258545, 0.007065184116363525, 0.007051775932312012, 0.00702675199508667, 0.007020383834838867, 0.006975232124328613, 0.006985504150390625, 0.00699235200881958, 0.0066483840942382815, 0.006960927963256836, 0.006944447994232178, 0.006982367992401123, 0.006934368133544922, 0.0069632000923156735, 0.006946815967559815, 0.007147776126861572, 0.006976480007171631, 0.007019264221191406, 0.007112736225128174, 0.0073127360343933105, 0.007309055805206298, 0.007275424003601074, 0.007307263851165771, 0.007318880081176758, 0.007395135879516602, 0.007219488143920898, 0.007094848155975342, 0.007137279987335205, 0.007013599872589111, 0.007052063941955566, 0.007004064083099365, 0.007020703792572022, 0.007333824157714844, 0.006971712112426758, 0.006975168228149414, 0.006975327968597412, 0.006998303890228271, 0.006963071823120118, 0.006967328071594238, 0.006944159984588623, 0.006988351821899414, 0.007205920219421387, 0.007011168003082276, 0.0070247678756713864, 0.00704307222366333, 0.0070367040634155276, 0.007045407772064209, 0.007126431941986084, 0.007176735877990723, 0.007073311805725097, 0.007066080093383789, 0.007036223888397217, 0.007189184188842773, 0.0070835199356079105, 0.006990367889404297, 0.007024608135223389, 0.006979584217071533, 0.007266240119934082, 0.007034912109375, 0.006991903781890869, 0.0070594558715820314, 0.007030784130096435, 0.007088128089904785, 0.0070797119140625, 0.007005983829498291, 0.0070087041854858395, 0.006944863796234131, 0.0069359359741210935, 0.007031328201293945, 0.006951136112213135, 0.00694601583480835, 0.0067010560035705566, 0.006967072010040283, 0.006997824192047119, 0.006971807956695557, 0.007024064064025879, 0.006896224021911621, 0.0069508800506591795, 0.0069283838272094726, 0.006944767951965332, 0.006933599948883057, 0.006939712047576904, 0.0069261760711669925, 0.006934783935546875, 0.007122144222259521, 0.0070392642021179196, 0.0069818878173828125, 0.006971392154693603, 0.006957183837890625, 0.0069344000816345215, 0.006924287796020508, 0.006928160190582276, 0.006927807807922363, 0.006912479877471924, 0.0069307518005371094, 0.006914239883422851, 0.006915775775909424, 0.00691212797164917, 0.00714467191696167, 0.006961023807525635, 0.006984608173370361, 0.006956448078155517, 0.007049248218536377, 0.006951488018035888, 0.006955039978027344, 0.00694598388671875, 0.006916319847106934, 0.0069352960586547855, 0.0069920320510864255, 0.006995935916900634, 0.006949728012084961, 0.007181183815002441, 0.006983104228973388, 0.007502048015594482, 0.007295296192169189, 0.006959104061126709, 0.006989727973937989, 0.007659615993499756, 0.007047167778015137, 0.006981056213378906, 0.00697811222076416, 0.006968448162078858, 0.007055456161499023, 0.007297952175140381, 0.0072887039184570315, 0.007183584213256836, 0.007121695995330811, 0.007092383861541748, 0.007188511848449707, 0.007257535934448243, 0.007158304214477539, 0.007214975833892822, 0.007091519832611084, 0.0070416321754455564, 0.006694911956787109, 0.007038368225097656, 0.008172127723693847, 0.007060800075531006, 0.0069415678977966305, 0.0070061440467834475, 0.007036159992218018, 0.007004543781280518, 0.006951168060302735, 0.0070061120986938475, 0.007155807971954345, 0.007028736114501953, 0.006922239780426025, 0.006976895809173584, 0.006969120025634766, 0.0069558720588684085, 0.006933599948883057, 0.006931359767913818, 0.006952576160430908, 0.006949247837066651, 0.007013440132141113, 0.007045248031616211, 0.006961728096008301, 0.007010784149169922, 0.006911776065826416, 0.006924287796020508, 0.00692633581161499, 0.0069112319946289065, 0.006894336223602295, 0.0070043840408325194, 0.006950431823730468, 0.007034143924713135, 0.006910016059875488, 0.00695091199874878, 0.007006271839141846, 0.006929247856140137, 0.006956128120422364, 0.007234464168548584, 0.006987840175628662, 0.0069467520713806154, 0.007040256023406982, 0.006972159862518311, 0.0069484801292419435, 0.006908576011657715, 0.006915808200836182, 0.006899712085723877, 0.006936575889587402, 0.006868896007537842, 0.006887519836425781, 0.007004159927368164, 0.006927743911743164, 0.0069101758003234865, 0.006914752006530762, 0.006977248191833496, 0.006969120025634766, 0.0069491519927978515, 0.006926271915435791, 0.0069101758003234865, 0.007012095928192139, 0.006971072196960449, 0.007012127876281738, 0.006995840072631836, 0.007101119995117188, 0.00674015998840332, 0.006979392051696777, 0.007005631923675537, 0.006966911792755127, 0.006958015918731689, 0.006915808200836182, 0.006936319828033447, 0.006930912017822265, 0.007036992073059082, 0.006930431842803955, 0.006946815967559815, 0.0069214081764221195, 0.006930431842803955, 0.006945663928985595, 0.006936704158782959, 0.006989920139312744, 0.006958816051483154, 0.007266335964202881, 0.007097343921661377, 0.007223840236663818, 0.007061952114105224, 0.007095744132995605, 0.007043647766113281, 0.006989823818206787, 0.007044960021972657, 0.007266464233398437, 0.007081984043121338, 0.007341983795166015, 0.007659615993499756, 0.00712886381149292, 0.007372960090637207, 0.007084288120269775, 0.007036736011505127, 0.007024640083312988, 0.007200767993927002, 0.00707583999633789, 0.007034431934356689, 0.0070269122123718265, 0.007006752014160156, 0.007050943851470947, 0.0069448318481445314, 0.006974847793579102, 0.006955552101135254, 0.007022655963897705, 0.006930399894714356, 0.006989727973937989, 0.007022016048431397, 0.006984352111816406, 0.00693839979171753, 0.007016672134399414, 0.007075520038604737, 0.0073281598091125486, 0.007096223831176758, 0.007146687984466553, 0.00701859188079834, 0.006995903968811035, 0.007258912086486816, 0.0070412797927856445, 0.007042079925537109, 0.0070002880096435545, 0.006982399940490723, 0.006974751949310303, 0.007000800132751465, 0.006633471965789795, 0.006999360084533691, 0.007029600143432617, 0.007054751873016358, 0.0069493122100830074, 0.006969344139099121, 0.00703385591506958, 0.007066624164581299, 0.0070206718444824215, 0.007182208061218262, 0.0069983677864074705, 0.006991551876068115, 0.006942495822906494, 0.0070024957656860355, 0.007202623844146728, 0.007063456058502197, 0.00701804780960083, 0.006965439796447754, 0.006993375778198242, 0.006992767810821534, 0.006985695838928223, 0.006933856010437012, 0.006943424224853515, 0.0069202880859375, 0.006920224189758301, 0.006905727863311768, 0.006910208225250244, 0.006895359992980957, 0.007067615985870361, 0.007036960124969483, 0.006929920196533203, 0.006960959911346436, 0.0069549760818481444, 0.007020287990570069, 0.007123712062835693, 0.006997536182403565, 0.006953663825988769, 0.007028736114501953, 0.006957183837890625, 0.006963071823120118, 0.006934815883636474, 0.006936575889587402, 0.007165791988372802, 0.007010176181793213, 0.006945024013519287, 0.006995711803436279, 0.006996287822723389, 0.006966527938842774, 0.00693017578125, 0.007002816200256347, 0.006965248107910156, 0.007362559795379638, 0.00699951982498169, 0.00697760009765625, 0.00695958423614502, 0.007152895927429199, 0.007011072158813476, 0.00696672010421753, 0.006955423831939697, 0.0069554557800292965, 0.007034592151641846, 0.00694598388671875, 0.0069251198768615725, 0.006839680194854736, 0.007125760078430176, 0.007071296215057373, 0.0070919361114501955, 0.0070910401344299314, 0.007068511962890625, 0.007072671890258789, 0.007058527946472168, 0.006990752220153809, 0.006971583843231201, 0.006950623989105224, 0.007231584072113037, 0.007041024208068848, 0.007104479789733887, 0.007064703941345215, 0.007160704135894775, 0.0070217599868774416, 0.00697654390335083, 0.007083839893341064, 0.00704307222366333, 0.007056511878967285, 0.0069374718666076664, 0.007089183807373047, 0.007039231777191162, 0.006974495887756347, 0.006961984157562256, 0.006962048053741455, 0.006961152076721191, 0.006994207859039306, 0.006943615913391113, 0.00697327995300293, 0.006958079814910889, 0.0069959678649902345, 0.007094304084777832, 0.0070183038711547855, 0.006959424018859863, 0.006995776176452637, 0.006932415962219238, 0.006967391967773438, 0.0068915200233459475, 0.006933856010437012, 0.007066271781921387, 0.006879424095153809, 0.006917952060699463, 0.006973440170288086, 0.006931551933288574, 0.0069456958770751955, 0.007002111911773682, 0.007016448020935059, 0.007053311824798584, 0.0070965437889099125, 0.0070193281173706054, 0.006935200214385986, 0.006947135925292969, 0.006933568000793457, 0.006945343971252441, 0.006920576095581055, 0.00693839979171753, 0.0068848319053649906, 0.006935488224029541, 0.007124767780303955, 0.007013984203338623, 0.006951295852661133, 0.006648704051971436, 0.006954720020294189, 0.006979584217071533, 0.00693398380279541, 0.006912543773651123, 0.006911935806274414, 0.007338047981262207, 0.00693228816986084, 0.006983776092529297, 0.006963456153869629, 0.006971231937408447, 0.00690176010131836, 0.006908927917480469, 0.006988927841186523, 0.00713318395614624, 0.00715558385848999, 0.007035039901733399, 0.007259712219238281, 0.007035168170928955, 0.007028768062591553, 0.007028704166412354, 0.00698576021194458, 0.006977151870727539, 0.007047647953033447, 0.006942592144012451, 0.00693065595626831, 0.007005440235137939, 0.006932735919952393, 0.0069350719451904295, 0.007165696144104004, 0.0070563840866088865, 0.006931519985198974, 0.007012288093566895, 0.007018496036529541, 0.006987775802612305, 0.007231488227844239, 0.007032832145690918, 0.007081439971923828, 0.0070251841545104984, 0.007024960041046142, 0.006977248191833496, 0.006960351943969726, 0.006967072010040283, 0.007013152122497559, 0.006963551998138428, 0.007032671928405762, 0.007010303974151612, 0.007198719978332519, 0.007427680015563965, 0.007287040233612061, 0.007304927825927734, 0.007574111938476563, 0.007288671970367431, 0.007173151969909668, 0.007102943897247314, 0.007096831798553467, 0.007034912109375, 0.0070570559501647945, 0.007065919876098633, 0.007352320194244385, 0.007335231781005859, 0.007207903861999512, 0.007194143772125244, 0.006998144149780274, 0.007239456176757812, 0.007123199939727784, 0.007074944019317627, 0.007047327995300293, 0.007062208175659179, 0.007044672012329101, 0.007016863822937012, 0.0071528000831604, 0.0070011520385742185, 0.00702569580078125, 0.006976287841796875, 0.006968704223632813, 0.0069649600982666015, 0.00720358419418335, 0.007066783905029297, 0.006984223842620849, 0.006970943927764893, 0.007018720149993897, 0.007029664039611816, 0.006997439861297607, 0.006973472118377686, 0.006991551876068115, 0.0074999680519104005, 0.007404032230377197, 0.007151552200317383, 0.007091839790344238, 0.007055840015411377, 0.008187007904052735, 0.007221951961517334, 0.007091584205627441, 0.007068384170532227, 0.007038976192474365, 0.007519552230834961, 0.007078591823577881, 0.007124896049499512, 0.007033952236175537, 0.007047743797302246, 0.0071459197998046875, 0.007003712177276611, 0.007039423942565918, 0.0071188478469848635, 0.007323904037475586, 0.007251711845397949, 0.007104512214660645, 0.00707583999633789, 0.007051583766937256, 0.00698748779296875, 0.007016416072845459, 0.0070052480697631835, 0.007031424045562744, 0.007067232131958008, 0.007076576232910157, 0.006999392032623291, 0.007024543762207031, 0.007158336162567139, 0.00713651180267334, 0.006963615894317627, 0.007000224113464355, 0.006979968070983887, 0.0069736638069152835, 0.006934304237365723, 0.00697702407836914, 0.0066193599700927735, 0.007017536163330078, 0.006931200027465821, 0.006952320098876953, 0.00686729621887207, 0.0069484801292419435, 0.006943391799926758, 0.006963488101959229, 0.00689737606048584, 0.0069407038688659665, 0.006948256015777588, 0.007145567893981934, 0.007512639999389648, 0.007510111808776855, 0.007114560127258301, 0.007032192230224609, 0.007002528190612793, 0.0069772801399230954, 0.006940576076507568, 0.007944575786590576, 0.007785888195037842, 0.006964032173156738, 0.007096288204193116, 0.0070135040283203125, 0.006902400016784668, 0.0068949441909790035, 0.006902688026428223, 0.006866943836212158, 0.007129151821136475, 0.0069482879638671875, 0.006892032146453857, 0.006995520114898682, 0.006889472007751465, 0.007123392105102539, 0.00696230411529541, 0.007070591926574707, 0.006912032127380371, 0.006916319847106934, 0.006874879837036133, 0.006907360076904297, 0.0068932161331176755, 0.006929279804229737, 0.006845856189727783, 0.007082047939300537, 0.006867487907409668, 0.006889472007751465, 0.006846464157104492, 0.006874207973480225, 0.006910880088806152, 0.0068884482383728025, 0.006870016098022461, 0.006940671920776367, 0.0072325439453125, 0.007401919841766357, 0.007799327850341797, 0.009354304313659668, 0.007205760002136231, 0.00710640001296997, 0.007112927913665771, 0.007524384021759033, 0.007223231792449951, 0.00708406400680542, 0.007204864025115967, 0.007064000129699707, 0.007321536064147949, 0.007306272029876709, 0.007449535846710205, 0.0072847681045532225, 0.007178239822387696, 0.007163904190063477, 0.007276000022888184, 0.007064095973968506, 0.007089536190032959, 0.007033472061157227, 0.00702623987197876, 0.007035295963287353, 0.006989855766296387, 0.0069910078048706055, 0.007238495826721192, 0.007012224197387696, 0.007133152008056641, 0.007239840030670166, 0.007006207942962647, 0.006959104061126709, 0.007204864025115967, 0.007036992073059082, 0.006989759922027588, 0.006991744041442871, 0.006969632148742676, 0.007063712120056152, 0.007033535957336426, 0.007025792121887207, 0.00698576021194458, 0.00695420789718628, 0.006945055961608887, 0.0070041918754577635, 0.006976895809173584, 0.007037216186523437, 0.0070620479583740235, 0.007350719928741455, 0.007139135837554931, 0.007102335929870605, 0.007014400005340577, 0.0070488319396972655, 0.006951295852661133, 0.006960256099700928, 0.006971712112426758, 0.006955584049224853, 0.0069734082221984866, 0.006993599891662598, 0.006953184127807617, 0.0069797120094299316, 0.0070266880989074704, 0.006969344139099121, 0.006981215953826904, 0.0070145602226257325, 0.007162208080291748, 0.007085984230041504, 0.007057727813720703, 0.007062943935394287, 0.007014783859252929, 0.006948416233062744, 0.0069881281852722165, 0.00693452787399292, 0.00694271993637085, 0.0069550080299377445, 0.006652736186981202, 0.006995168209075928, 0.007014624118804932, 0.006971424102783203, 0.006996511936187744, 0.00694812822341919, 0.006963136196136474, 0.007006048202514648, 0.006962111949920654, 0.006957056045532227, 0.006923871994018555, 0.006950975894927979, 0.006895967960357666, 0.006914144039154053, 0.0069192957878112795, 0.007174367904663086, 0.007012127876281738, 0.006943103790283203, 0.006945184230804443, 0.00698473596572876, 0.006933472156524658, 0.006924287796020508, 0.00693830394744873, 0.006922560214996338, 0.00693232011795044, 0.007061471939086914, 0.006918335914611816, 0.006920224189758301, 0.006908224105834961, 0.0069259519577026364, 0.007122752189636231, 0.006989791870117188, 0.007110943794250488, 0.007063263893127441, 0.006926591873168945, 0.006930431842803955, 0.006879072189331054, 0.006903039932250977, 0.006906591892242432, 0.006901375770568848, 0.006892096042633056, 0.006881279945373535, 0.006903071880340576, 0.006914783954620361, 0.0069202880859375, 0.007198624134063721, 0.006950528144836426, 0.006985184192657471, 0.007090144157409668, 0.00699286413192749, 0.007028704166412354, 0.007009791851043701, 0.007002975940704346, 0.006983327865600586, 0.006977536201477051, 0.007096320152282715, 0.007030687808990479, 0.006955264091491699, 0.007126880168914795, 0.007071584224700928, 0.007338143825531006, 0.007064671993255616, 0.007021120071411133, 0.006792384147644043, 0.007078271865844726, 0.007069375991821289, 0.00703769588470459, 0.007024032115936279, 0.006955615997314453, 0.007032256126403808, 0.007007808208465576, 0.0071376957893371585, 0.006967360019683838, 0.0069554557800292965, 0.006989920139312744, 0.007176191806793213, 0.007083839893341064, 0.0069632000923156735, 0.0070330238342285155, 0.006973440170288086, 0.007255616188049316, 0.007088160037994385, 0.007047584056854248, 0.007006175994873047, 0.007002304077148438, 0.007123936176300049, 0.007004576206207275, 0.007231967926025391, 0.007057504177093506, 0.007085311889648438, 0.007117472171783447, 0.007120895862579346, 0.0069959678649902345, 0.0070471038818359375, 0.0070033278465270994, 0.0070416321754455564, 0.006963488101959229, 0.006983071804046631, 0.006957024097442627, 0.0074163517951965335, 0.007045152187347412, 0.007040192127227783, 0.007033599853515625, 0.0070769920349121095, 0.0069621758460998535, 0.00692633581161499, 0.006962592124938965, 0.006934912204742432, 0.006928192138671875, 0.006971807956695557, 0.0071979517936706545, 0.0070481600761413575, 0.007040128231048584, 0.007383711814880371, 0.007088128089904785, 0.007028223991394043, 0.007014912128448486, 0.007005824089050293, 0.00712278413772583, 0.007000256061553955, 0.007008895874023437, 0.0071554241180419925, 0.007073599815368653, 0.007003776073455811, 0.007026368141174316, 0.006981728076934815, 0.006680511951446534, 0.00698031997680664, 0.007034656047821045, 0.007036320209503174, 0.007002719879150391, 0.007712768077850342, 0.00970963191986084, 0.009172927856445313, 0.007144959926605224, 0.0072494401931762695, 0.007171072006225586, 0.007098495960235596, 0.00697702407836914, 0.007054815769195557, 0.007014688014984131, 0.007014527797698975, 0.006989920139312744, 0.007040895938873291, 0.007122911930084228, 0.007117343902587891, 0.0069686717987060545, 0.007058080196380615, 0.006981056213378906, 0.006981855869293213, 0.006955359935760498, 0.0069194560050964355, 0.006929120063781738, 0.0070997757911682126, 0.006939263820648194, 0.006987296104431153, 0.006969632148742676, 0.00693398380279541, 0.00693936014175415, 0.006936575889587402, 0.0068915200233459475, 0.007046495914459229, 0.006965055942535401, 0.006941376209259033, 0.006930624008178711, 0.006924384117126465, 0.007017759799957275, 0.006918752193450927, 0.006973311901092529, 0.007101791858673096, 0.006929183959960937, 0.006956480026245117, 0.0069797120094299316, 0.006916543960571289, 0.006966495990753174, 0.006959551811218261, 0.007016799926757813, 0.007178239822387696, 0.007052768230438233, 0.006973983764648438, 0.007018496036529541, 0.006926144123077392, 0.006990015983581543, 0.006946815967559815, 0.0070638079643249516, 0.00695030403137207, 0.0071471681594848634, 0.006992576122283936, 0.006985727787017822, 0.006682496070861816, 0.006988863945007324, 0.0069483199119567875, 0.007045536041259765, 0.0070348801612854, 0.006939712047576904, 0.006980544090270996, 0.00701423978805542, 0.006969024181365966, 0.007059936046600342, 0.007170048236846924, 0.007260223865509033, 0.00738099193572998, 0.007302527904510498, 0.0072197761535644535, 0.007188479900360107, 0.007332159996032715, 0.007081664085388183, 0.007077888011932373, 0.00705302381515503, 0.007119135856628418, 0.007188000202178955, 0.007031263828277588, 0.007024640083312988, 0.0070730881690979006, 0.0070557122230529784, 0.007108960151672363, 0.0070351362228393555, 0.007079679965972901, 0.006997856140136718, 0.0069913921356201175, 0.006984320163726807, 0.007200352191925049, 0.007002079963684082, 0.007025087833404541, 0.007094111919403076, 0.007008031845092773, 0.007055391788482666, 0.006994272232055664, 0.0070236802101135256, 0.007051839828491211, 0.0069547839164733885, 0.006924704074859619, 0.0069695358276367185, 0.006921472072601319, 0.0069231362342834476, 0.006905727863311768, 0.007114751815795899, 0.007018655776977539, 0.0069201278686523435, 0.006983871936798096, 0.006952672004699707, 0.00694271993637085, 0.006950751781463623, 0.006948575973510742, 0.006986048221588135, 0.007008384227752686, 0.006924511909484864, 0.006926112174987793, 0.006952928066253662, 0.006889503955841064, 0.0070219202041625975, 0.007195295810699463]",tokens/s,141.64779865086066,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,854.110208,551.419904,0.0,148.897792,141.633024,s,1,9.7522666015625,9.7522666015625,0.0,9.7522666015625,9.7522666015625,9.7522666015625,9.7522666015625,[9.7522666015625],,kWh,1.4637448233331913e-05,1.6068119793177383e-06,4.656670391999743e-06,2.0900930604649394e-05,,MB,1315.336192,666.763264,0.0,249.561088,216.900608,s,22,0.2253181447982788,0.01024173385446722,9.747876691574485e-05,0.010220911979675293,0.010380845069885255,0.010411631965637207,0.010472912683486938,"[0.010413023948669433, 0.010174943923950195, 0.010117024421691894, 0.01018502426147461, 0.010488832473754883, 0.01015782356262207, 0.010385184288024903, 0.010188960075378417, 0.010317472457885743, 0.010111136436462402, 0.010153696060180665, 0.010217951774597169, 0.010309439659118652, 0.01026857566833496, 0.010226335525512695, 0.010187359809875488, 0.010223872184753418, 0.010341792106628419, 0.010256287574768066, 0.010136032104492187, 0.010269632339477539, 0.010187744140625]",tokens/s,24995.767673491966,kWh,2.932139726968168e-07,3.2336122279550504e-08,1.6705122241809045e-07,4.926013173944578e-07,tokens/kWh,519690043.3682848,MB,1328.791552,691.929088,0.0,274.726912,216.903168,s,22,10.15951300048828,0.4617960454767401,0.005313130281718918,0.46031187438964843,0.4691746490478516,0.4702244277954101,0.47515853424072263,"[0.470276123046875, 0.4685665283203125, 0.46924221801757815, 0.47645639038085935, 0.4639389343261719, 0.4620194091796875, 0.4586206970214844, 0.463431884765625, 0.456529052734375, 0.45988595581054686, 0.461214599609375, 0.45840524291992185, 0.46028033447265626, 0.4546676025390625, 0.4599427185058594, 0.45737994384765623, 0.46538705444335937, 0.4620137939453125, 0.4591249694824219, 0.4603434143066406, 0.45737136840820314, 0.4544147644042969]",tokens/s,136.42386204273637,kWh,1.3107193630901736e-05,1.4454885979363354e-06,5.169152541490986e-06,1.9721834770329057e-05,tokens/kWh,3194428.9531713207,,s,1386,10.147082084655771,0.007321127045206177,0.0002687557523140361,0.007281535863876342,0.007504832029342651,0.007601784110069275,0.00837610683441163,"[0.0071233601570129396, 0.007409440040588379, 0.007411935806274414, 0.0073478717803955075, 0.007397727966308594, 0.007460927963256836, 0.007368159770965576, 0.007356895923614502, 0.007268352031707764, 0.007239871978759765, 0.0072509760856628415, 0.007799136161804199, 0.007356671810150146, 0.007370944023132324, 0.00749567985534668, 0.007454336166381836, 0.007923615932464599, 0.010178784370422363, 0.010123519897460938, 0.01021241569519043, 0.007437056064605713, 0.007284927845001221, 0.007434016227722168, 0.007287007808685303, 0.007233759880065918, 0.007230751991271973, 0.0073211841583251955, 0.007168928146362305, 0.007137279987335205, 0.0071068801879882815, 0.007142591953277588, 0.007090496063232422, 0.007286272048950195, 0.007284639835357666, 0.007299871921539307, 0.0072988801002502445, 0.007254208087921142, 0.0072700481414794925, 0.007266655921936035, 0.007211008071899414, 0.007314911842346191, 0.007160160064697265, 0.007368896007537842, 0.007251967906951904, 0.0072782402038574215, 0.007340447902679443, 0.00729702377319336, 0.007280576229095459, 0.007210080146789551, 0.007277472019195557, 0.007280799865722656, 0.007292031764984131, 0.007447264194488525, 0.007296544075012207, 0.007379360198974609, 0.007286848068237305, 0.007299071788787842, 0.007303359985351562, 0.007298943996429444, 0.007308512210845947, 0.00735203218460083, 0.007465312004089356, 0.007319200038909912, 0.0069407358169555665, 0.007281792163848877, 0.007363327980041504, 0.007329887866973877, 0.007268447875976563, 0.007283967971801758, 0.007281407833099365, 0.007425951957702637, 0.007388351917266845, 0.007418432235717773, 0.007354720115661621, 0.007284736156463623, 0.007230815887451172, 0.007365280151367187, 0.007305215835571289, 0.007415808200836181, 0.00732534408569336, 0.007472640037536621, 0.007211872100830078, 0.007253983974456787, 0.007255167961120606, 0.007254528045654297, 0.00736297607421875, 0.007272352218627929, 0.007562751770019531, 0.007368383884429931, 0.007326496124267578, 0.007264575958251953, 0.00728659200668335, 0.007357503890991211, 0.007953343868255615, 0.007390463829040527, 0.0073203201293945315, 0.007278719902038574, 0.007304543972015381, 0.007360640048980713, 0.007463327884674072, 0.007341919898986816, 0.007364192008972168, 0.007299136161804199, 0.007318016052246094, 0.0076308479309082035, 0.007501728057861328, 0.007355519771575928, 0.007287615776062011, 0.0072984638214111325, 0.007222015857696534, 0.007208960056304932, 0.007307263851165771, 0.008361151695251465, 0.009530271530151367, 0.00889590358734131, 0.008074912071228027, 0.007385248184204101, 0.00743449592590332, 0.007309760093688965, 0.007353343963623047, 0.007504576206207275, 0.007262527942657471, 0.0073482561111450195, 0.0073149437904357914, 0.007262688159942627, 0.00725932788848877, 0.006959392070770264, 0.007311391830444336, 0.007346399784088135, 0.00732089614868164, 0.0072904000282287594, 0.0073266239166259765, 0.007323935985565185, 0.007386847972869873, 0.007311359882354736, 0.00733625602722168, 0.007286464214324951, 0.007276864051818848, 0.007339712142944336, 0.0072932162284851075, 0.0074683518409729005, 0.007356832027435303, 0.0072375679016113284, 0.007187679767608642, 0.007168863773345947, 0.007126175880432129, 0.007235583782196045, 0.007707136154174805, 0.007557472229003906, 0.009035776138305664, 0.010506400108337402, 0.009629535675048827, 0.008323360443115234, 0.007323359966278076, 0.007149695873260498, 0.007393119812011718, 0.0071693120002746585, 0.0071560640335083005, 0.007347743988037109, 0.007097536087036132, 0.007210015773773193, 0.007167744159698486, 0.007166848182678222, 0.007124991893768311, 0.007167103767395019, 0.00722819185256958, 0.007257247924804687, 0.007283455848693847, 0.007316703796386719, 0.007456831932067871, 0.008196224212646484, 0.007322432041168213, 0.007356416225433349, 0.007311103820800781, 0.007263552188873291, 0.007330751895904541, 0.007351647853851318, 0.007321599960327148, 0.007269343852996826, 0.0073190717697143555, 0.00725324821472168, 0.007234464168548584, 0.007458752155303955, 0.007247007846832275, 0.007424928188323974, 0.0073400321006774905, 0.007332863807678222, 0.007322624206542969, 0.0073400321006774905, 0.006963359832763672, 0.007264768123626709, 0.007599423885345459, 0.00870793628692627, 0.00809055995941162, 0.007583744049072265, 0.007307263851165771, 0.008582880020141601, 0.007657760143280029, 0.007614463806152344, 0.007634848117828369, 0.007477344036102295, 0.007354335784912109, 0.007434271812438965, 0.007407616138458252, 0.0084519681930542, 0.007604512214660644, 0.007323328018188476, 0.007310527801513672, 0.007414752006530761, 0.00745030403137207, 0.007305535793304443, 0.007302815914154053, 0.007521759986877441, 0.007383935928344727, 0.007294559955596924, 0.007243936061859131, 0.00729468822479248, 0.007268256187438965, 0.007544447898864746, 0.007457824230194092, 0.0073067197799682616, 0.007356800079345703, 0.007299007892608643, 0.007379136085510254, 0.007576992034912109, 0.007604832172393799, 0.007505087852478027, 0.00753542423248291, 0.007411488056182861, 0.00744649600982666, 0.00739299201965332, 0.007356800079345703, 0.007853759765625, 0.007581727981567383, 0.007952576160430908, 0.008270079612731934, 0.008533696174621581, 0.007642591953277588, 0.007734111785888672, 0.0075632638931274416, 0.0074711041450500485, 0.007370751857757568, 0.007448927879333496, 0.007505824089050293, 0.007655327796936035, 0.007545983791351319, 0.007440896034240723, 0.007472799777984619, 0.00758841609954834, 0.0073927040100097655, 0.00742252779006958, 0.0074108800888061524, 0.007090591907501221, 0.0073201279640197755, 0.007373888015747071, 0.0073218240737915035, 0.007278592109680176, 0.0074291200637817386, 0.007298783779144287, 0.007428095817565918, 0.007415135860443115, 0.0074901762008666994, 0.007357600212097168, 0.00732044792175293, 0.007351615905761719, 0.007291456222534179, 0.007350399971008301, 0.007335999965667725, 0.007395264148712158, 0.00740556812286377, 0.007456639766693115, 0.007315680027008057, 0.007400864124298095, 0.0073272957801818845, 0.007312320232391358, 0.007372799873352051, 0.0072765440940856935, 0.007358463764190673, 0.007523712158203125, 0.0074860801696777345, 0.007370751857757568, 0.007427231788635254, 0.007226399898529053, 0.007361728191375733, 0.007320191860198975, 0.0072206082344055175, 0.007471744060516357, 0.0073539199829101565, 0.007295008182525635, 0.007319327831268311, 0.007301760196685791, 0.0072202239036560055, 0.0073695359230041505, 0.007580959796905517, 0.0074126400947570805, 0.007407008171081543, 0.007406176090240479, 0.007329792022705078, 0.007362559795379638, 0.007372799873352051, 0.007417280197143555, 0.007446144104003906, 0.007353280067443848, 0.007216896057128906, 0.0073047041893005375, 0.007369472026824951, 0.007277952194213867, 0.007375232219696045, 0.007436607837677002, 0.007225279808044434, 0.0074561920166015625, 0.007246399879455566, 0.007249152183532715, 0.007363264083862305, 0.007390624046325684, 0.006985504150390625, 0.007435359954833984, 0.007391071796417236, 0.0073400321006774905, 0.007411712169647216, 0.007221248149871826, 0.0074132800102233885, 0.007311615943908691, 0.007332064151763916, 0.007362271785736084, 0.007317791938781739, 0.007211232185363769, 0.007321375846862793, 0.007319200038909912, 0.007201119899749756, 0.007319551944732666, 0.007349279880523682, 0.007181280136108399, 0.007331488132476807, 0.007295104026794433, 0.007157279968261719, 0.007505856037139893, 0.007369215965270996, 0.007227551937103271, 0.007359936237335205, 0.007387807846069336, 0.007198624134063721, 0.0072971200942993164, 0.007194272041320801, 0.007276927947998047, 0.0073417601585388185, 0.007268640041351318, 0.007276127815246582, 0.007292384147644043, 0.00715667200088501, 0.007243167877197265, 0.007477407932281494, 0.00723798418045044, 0.007300864219665528, 0.007238016128540039, 0.007229407787322998, 0.007610591888427734, 0.007384607791900635, 0.0072176318168640135, 0.0073885760307312014, 0.007423935890197754, 0.007336607933044434, 0.007251743793487549, 0.0073290238380432126, 0.007356639862060547, 0.007663424015045166, 0.007418591976165772, 0.007286784172058106, 0.007647232055664062, 0.007419904232025146, 0.007247231960296631, 0.007418015956878662, 0.007315455913543701, 0.0072032961845397946, 0.007301119804382325, 0.007444128036499024, 0.00726255989074707, 0.007421216011047363, 0.006985727787017822, 0.007366112232208252, 0.007461567878723144, 0.007310336112976074, 0.007291744232177735, 0.0073335361480712895, 0.0071699838638305665, 0.007233503818511963, 0.007223743915557862, 0.007227263927459717, 0.0072154560089111325, 0.0072347202301025395, 0.007427711963653564, 0.007334911823272705, 0.007268352031707764, 0.007214879989624024, 0.007274720191955566, 0.007245728015899658, 0.00713318395614624, 0.007270463943481445, 0.007223296165466309, 0.007163519859313965, 0.007196288108825683, 0.0071950721740722656, 0.007274847984313965, 0.0073175039291381834, 0.007183584213256836, 0.00717903995513916, 0.0072740797996521, 0.007299007892608643, 0.007157663822174073, 0.007230080127716064, 0.0071042881011962895, 0.007180448055267334, 0.007447840213775635, 0.007303904056549073, 0.007521984100341797, 0.007321919918060303, 0.007258207798004151, 0.007257535934448243, 0.007285215854644775, 0.007145376205444336, 0.007284832000732422, 0.0072336640357971195, 0.007259007930755615, 0.007271423816680909, 0.008062080383300781, 0.00733296012878418, 0.00737171220779419, 0.007322815895080567, 0.007195712089538574, 0.007259903907775879, 0.00722982406616211, 0.007141856193542481, 0.007333568096160889, 0.007191936016082764, 0.0072344961166381835, 0.0072520642280578616, 0.007366559982299805, 0.00723740816116333, 0.007364831924438477, 0.0071553921699523924, 0.007221727848052978, 0.006998015880584717, 0.007212255954742432, 0.007287136077880859, 0.007242176055908203, 0.007153664112091064, 0.007286623954772949, 0.007247200012207031, 0.007227263927459717, 0.007289792060852051, 0.007430047988891602, 0.007245888233184814, 0.00728879976272583, 0.0072213120460510255, 0.007273632049560547, 0.007416160106658936, 0.007294559955596924, 0.0073613119125366215, 0.007295104026794433, 0.00743833589553833, 0.007308320045471191, 0.007430624008178711, 0.00872704029083252, 0.00908681583404541, 0.007364031791687012, 0.0074206399917602535, 0.0072882242202758785, 0.007315904140472412, 0.007487071990966797, 0.007288832187652588, 0.007281216144561767, 0.0073192639350891115, 0.00723692798614502, 0.007457759857177735, 0.0073331518173217775, 0.007192383766174317, 0.007355296134948731, 0.0072765440940856935, 0.007176191806793213, 0.007278592109680176, 0.007184383869171143, 0.007170048236846924, 0.0072540159225463864, 0.007127039909362793, 0.0072837438583374025, 0.007254655838012695, 0.007102816104888916, 0.007253727912902832, 0.007244063854217529, 0.00715718412399292, 0.007234079837799072, 0.007223328113555908, 0.0072800002098083495, 0.007262911796569824, 0.007187551975250244, 0.007582560062408448, 0.007489535808563232, 0.007266304016113281, 0.007231488227844239, 0.0075196800231933595, 0.007410016059875488, 0.007327231884002685, 0.0076564159393310545, 0.0073274879455566405, 0.0069172801971435546, 0.007250815868377686, 0.007292928218841553, 0.007207136154174804, 0.007232416152954102, 0.007299967765808105, 0.007202559947967529, 0.007312704086303711, 0.0074988799095153805, 0.0073992319107055666, 0.0073498239517211916, 0.0074199681282043455, 0.007288383960723877, 0.007269216060638428, 0.007266272068023682, 0.007251327991485596, 0.007508607864379882, 0.007280640125274658, 0.007237535953521728, 0.007243072032928467, 0.007277344226837158, 0.007180287837982178, 0.007180287837982178, 0.0071874880790710445, 0.007183328151702881, 0.007106719970703125, 0.007139135837554931, 0.007151423931121826, 0.007116928100585938, 0.0071868162155151365, 0.007130847930908203, 0.007288352012634277, 0.007236288070678711, 0.007178016185760498, 0.007138815879821778, 0.007176703929901123, 0.007145247936248779, 0.0072911038398742675, 0.007143487930297851, 0.007120831966400147, 0.007124512195587158, 0.007192063808441162, 0.007129631996154785, 0.007192255973815918, 0.007226111888885498, 0.0071823358535766605, 0.007369760036468506, 0.007310239791870117, 0.00713862419128418, 0.007246304035186768, 0.007256192207336426, 0.0071558718681335445, 0.007507359981536866, 0.0072420802116394045, 0.007225599765777588, 0.0073001918792724605, 0.007207839965820312, 0.0071989760398864745, 0.007275424003601074, 0.007237664222717285, 0.007465792179107666, 0.007265503883361816, 0.007230303764343262, 0.006966303825378418, 0.007310304164886474, 0.007237599849700928, 0.007247007846832275, 0.007308159828186035, 0.007135231971740722, 0.007241343975067139, 0.0072195839881896975, 0.007131135940551757, 0.0072520642280578616, 0.007290080070495606, 0.00713593578338623, 0.007242815971374512, 0.007406367778778076, 0.007266464233398437, 0.007284063816070557, 0.007225599765777588, 0.007141600131988526, 0.007314655780792236, 0.007226336002349854, 0.0072206401824951175, 0.007257919788360595, 0.007158559799194336, 0.007206624031066895, 0.007303328037261963, 0.007285888195037842, 0.00751526403427124, 0.007438464164733887, 0.007187359809875489, 0.00727945613861084, 0.007286784172058106, 0.00751200008392334, 0.007327072143554688, 0.007297183990478516, 0.007184959888458252, 0.0072765440940856935, 0.0074050559997558595, 0.007221856117248535, 0.007500991821289062, 0.007340767860412598, 0.007231488227844239, 0.007493663787841797, 0.007318880081176758, 0.00737113618850708, 0.007334144115447998, 0.0073211522102355955, 0.007229087829589843, 0.007333695888519287, 0.0072650880813598636, 0.0072111678123474125, 0.007518208026885987, 0.007276576042175293, 0.007199999809265137, 0.0073671360015869145, 0.007244031906127929, 0.007282688140869141, 0.007317759990692139, 0.0072005119323730465, 0.00760422420501709, 0.00733187198638916, 0.0072453441619873046, 0.007480768203735352, 0.007362592220306397, 0.0071190400123596195, 0.007506591796875, 0.007313375949859619, 0.007362592220306397, 0.007307007789611817, 0.007329055786132813, 0.007440832138061524, 0.007296895980834961, 0.007459487915039063, 0.007304895877838135, 0.0075285758972167965, 0.007308800220489502, 0.007217120170593261, 0.00729472017288208, 0.007365280151367187, 0.0072533760070800785, 0.007330592155456543, 0.007358399868011475, 0.007392735958099366, 0.007426815986633301, 0.007689856052398682, 0.0075339522361755375, 0.007648416042327881, 0.007651328086853027, 0.007480319976806641, 0.007434336185455323, 0.007396255970001221, 0.007305215835571289, 0.007354207992553711, 0.007383039951324463, 0.007272255897521973, 0.007291071891784668, 0.007188608169555664, 0.007318431854248047, 0.007228384017944336, 0.007233535766601563, 0.0072063679695129395, 0.00723529577255249, 0.007421823978424072, 0.007246784210205078, 0.007169888019561767, 0.007159967899322509, 0.007147520065307617, 0.007198944091796875, 0.007182240009307861, 0.007363967895507813, 0.007174816131591797, 0.007274335861206055, 0.007186431884765625, 0.007178239822387696, 0.007153664112091064, 0.007262400150299072, 0.007261663913726807, 0.007110464096069336, 0.007291423797607422, 0.0073088321685791015, 0.007172224044799805, 0.007192416191101074, 0.0071133761405944825, 0.007235424041748047, 0.007600128173828125, 0.007239871978759765, 0.007247680187225341, 0.0069753599166870114, 0.007295167922973632, 0.007291935920715332, 0.007305984020233154, 0.007215007781982422, 0.007236288070678711, 0.007257023811340332, 0.007230336189270019, 0.007323488235473633, 0.007211008071899414, 0.007294976234436036, 0.0073499841690063475, 0.007323935985565185, 0.0073994240760803225, 0.007442495822906494, 0.007294911861419678, 0.007215104103088379, 0.007579648017883301, 0.007292448043823242, 0.007227871894836426, 0.007299200057983398, 0.007132287979125977, 0.007254784107208252, 0.0072427520751953125, 0.0072121920585632325, 0.007249760150909424, 0.007286687850952148, 0.007157472133636475, 0.007307648181915283, 0.007221248149871826, 0.007165952205657959, 0.0071784000396728515, 0.007260000228881836, 0.007480607986450195, 0.007313759803771973, 0.007268032073974609, 0.007303872108459473, 0.007443808078765869, 0.007758815765380859, 0.007206016063690186, 0.007241919994354248, 0.00736243200302124, 0.007297535896301269, 0.007249343872070313, 0.007371327877044678, 0.007172095775604248, 0.007137279987335205, 0.007155712127685547, 0.007104512214660645, 0.00751580810546875, 0.007223648071289062, 0.007307263851165771, 0.007175968170166015, 0.007155935764312744, 0.007131199836730957, 0.007124927997589112, 0.0074035201072692874, 0.007170048236846924, 0.007165823936462402, 0.0071190400123596195, 0.007124927997589112, 0.007370975971221924, 0.007270175933837891, 0.006883391857147217, 0.007251296043395996, 0.007229184150695801, 0.007235743999481201, 0.00720358419418335, 0.007577856063842773, 0.007364352226257324, 0.007264256000518799, 0.007301119804382325, 0.0075673599243164065, 0.007269440174102783, 0.007399680137634278, 0.007374688148498535, 0.007469920158386231, 0.007828800201416016, 0.007313248157501221, 0.007428959846496582, 0.007479296207427978, 0.007335455894470215, 0.0073274879455566405, 0.007256256103515625, 0.007344351768493652, 0.0072292160987854, 0.007277088165283203, 0.007208960056304932, 0.007158944129943848, 0.007230303764343262, 0.0075054078102111815, 0.0073497920036315915, 0.007332831859588623, 0.0072984957695007326, 0.0072048320770263675, 0.007276703834533691, 0.0074347519874572755, 0.007372735977172851, 0.007327712059020996, 0.007288864135742187, 0.007364607810974121, 0.007274496078491211, 0.007215104103088379, 0.007428095817565918, 0.007312607765197754, 0.007199520111083984, 0.007259168148040771, 0.007235648155212403, 0.007189184188842773, 0.0072173762321472166, 0.007351935863494873, 0.007162208080291748, 0.007231040000915527, 0.007250400066375733, 0.007204864025115967, 0.007404799938201904, 0.007367424011230469, 0.0071918082237243655, 0.007211552143096924, 0.007174784183502197, 0.007155551910400391, 0.007144832134246827, 0.007173727989196777, 0.007309567928314209, 0.007268896102905273, 0.007188479900360107, 0.006879072189331054, 0.007159359931945801, 0.007139935970306397, 0.00721724796295166, 0.007194303989410401, 0.007092192173004151, 0.007112959861755371, 0.00714137601852417, 0.007147295951843262, 0.007145696163177491, 0.00748748779296875, 0.007198719978332519, 0.00717955207824707, 0.007213791847229004, 0.007159808158874512, 0.0071718721389770505, 0.0071553921699523924, 0.007161600112915039, 0.007131936073303223, 0.007118048191070557, 0.007136032104492187, 0.007372704029083252, 0.0071394238471984865, 0.0071363840103149416, 0.00719974422454834, 0.007159872055053711, 0.007227200031280518, 0.0071205759048461915, 0.007129407882690429, 0.007106560230255127, 0.0071188478469848635, 0.007326720237731933, 0.007187327861785889, 0.007153759956359864, 0.007146687984466553, 0.007209375858306884, 0.0071418237686157226, 0.007214272022247314, 0.008043295860290527, 0.0072550721168518064, 0.007250944137573242, 0.007376543998718262, 0.00725875186920166, 0.0071699519157409665, 0.007186463832855225, 0.007161632061004639, 0.007301119804382325, 0.007197792053222656, 0.007109536170959473, 0.007127168178558349, 0.007176352024078369, 0.0072631678581237796, 0.007223584175109863, 0.007186944007873535, 0.007165952205657959, 0.007180287837982178, 0.007362080097198486, 0.007209440231323242, 0.007259840011596679, 0.007399744033813477, 0.00728166389465332, 0.007238656044006348, 0.0072761597633361815, 0.006926047801971436, 0.007327104091644287, 0.007420032024383545, 0.007254591941833496, 0.0071649918556213376, 0.00717299222946167, 0.007582784175872803, 0.007226304054260254, 0.007267776012420654, 0.007272960186004639, 0.007565375804901123, 0.007292928218841553, 0.007213024139404297, 0.0071536960601806645, 0.007210175991058349, 0.007236320018768311, 0.007161695957183838, 0.007481599807739258, 0.007274496078491211, 0.007196671962738037, 0.007469056129455566, 0.007264480113983154, 0.00724560022354126, 0.007628799915313721, 0.0072696638107299805, 0.007328479766845703, 0.007301119804382325, 0.007247680187225341, 0.007264448165893555, 0.007345439910888672, 0.007301055908203125, 0.007274335861206055, 0.007369952201843262, 0.007354080200195312, 0.007515903949737549, 0.007446784019470215, 0.00738918399810791, 0.00737011194229126, 0.007334303855895996, 0.007279007911682129, 0.007298431873321533, 0.007298783779144287, 0.007215839862823486, 0.007235583782196045, 0.007337024211883545, 0.007162816047668457, 0.007282495975494385, 0.00726854419708252, 0.007292992115020752, 0.007325600147247315, 0.007628191947937011, 0.0073259520530700685, 0.007309247970581054, 0.007304800033569336, 0.007198944091796875, 0.007243552207946777, 0.007193439960479736, 0.007127039909362793, 0.007220479965209961, 0.007141856193542481, 0.007180543899536132, 0.0071536960601806645, 0.007262207984924316, 0.006866047859191895, 0.0071138558387756345, 0.007089920043945312, 0.00714137601852417, 0.0071495680809020995, 0.007093344211578369, 0.0072178239822387694, 0.007254271984100342, 0.0071842560768127445, 0.007344255924224854, 0.007174015998840332, 0.007066048145294189, 0.007216383934020996, 0.007094175815582276, 0.007602240085601807, 0.007264575958251953, 0.007063712120056152, 0.007130112171173096, 0.0071979198455810545, 0.0070936641693115235, 0.007178656101226807, 0.0071695041656494145, 0.007071839809417725, 0.00712502384185791, 0.007127168178558349, 0.007051519870758057, 0.0071844801902771, 0.00710640001296997, 0.007337920188903809, 0.007135359764099121, 0.0070425281524658205, 0.007129312038421631, 0.007187039852142334, 0.007085792064666748, 0.007174143791198731, 0.007148767948150635, 0.007121695995330811, 0.007229440212249756, 0.007266047954559326, 0.007261919975280762, 0.007347839832305908, 0.007434815883636475, 0.00741315221786499, 0.007676864147186279, 0.007524415969848632, 0.007494847774505615, 0.007510335922241211, 0.007466432094573975, 0.007260640144348144, 0.007307487964630127, 0.007330016136169433, 0.0071636481285095215, 0.007268703937530517, 0.007325695991516113, 0.00726643180847168, 0.0076919360160827635, 0.007563680171966553, 0.007464767932891846, 0.007413760185241699, 0.00739247989654541, 0.007230239868164063, 0.007359968185424805, 0.0074122557640075686, 0.006893760204315186, 0.007374720096588135, 0.007600416183471679, 0.007580095767974853, 0.0074988799095153805, 0.007523295879364014, 0.007567264080047607, 0.007571455955505371, 0.00739737606048584, 0.007259232044219971, 0.007309247970581054, 0.0075491838455200196, 0.00726259183883667, 0.0073810238838195805, 0.007410240173339844, 0.007718207836151123, 0.007569856166839599, 0.007395584106445313, 0.007390687942504883, 0.0075205440521240235, 0.00740556812286377, 0.007319551944732666, 0.007560256004333496, 0.007204864025115967, 0.007328703880310059, 0.007204864025115967, 0.007102464199066162, 0.007159103870391845, 0.007170400142669678, 0.007162176132202149, 0.0072923521995544435, 0.007471712112426758, 0.007468287944793701, 0.007467775821685791, 0.007304255962371826, 0.007221439838409424, 0.007242496013641357, 0.007378079891204834, 0.007502687931060791, 0.007330048084259033, 0.007196415901184082, 0.007239520072937012, 0.007559008121490478, 0.007391488075256348, 0.007419392108917237, 0.007432767868041992, 0.00721340799331665, 0.007253791809082031, 0.007303040027618408, 0.007237376213073731, 0.007217408180236817, 0.00755017614364624, 0.007442592144012451, 0.0072481918334960935, 0.007420159816741943, 0.007215136051177978, 0.00720198392868042, 0.007414912223815918, 0.007337503910064698, 0.007452991962432862, 0.00784169578552246, 0.0075788478851318355, 0.00754969596862793, 0.007323647975921631, 0.007419904232025146, 0.007394783973693848, 0.007399775981903076, 0.007350304126739502, 0.007293056011199952, 0.007557375907897949, 0.007372223854064942, 0.007268671989440918, 0.007287168025970459, 0.007343776226043701, 0.007415584087371826, 0.007437727928161621, 0.007311967849731446, 0.007356607913970947, 0.0074052481651306155, 0.007244192123413086, 0.007264063835144043, 0.007690368175506591, 0.007476704120635986, 0.0075392317771911625, 0.007468800067901612, 0.007352575778961181, 0.007294976234436036, 0.007312384128570557, 0.007236608028411865, 0.007326848030090332, 0.007236320018768311, 0.007168064117431641, 0.007250048160552978, 0.007229407787322998, 0.00718006420135498, 0.007173632144927979, 0.007221983909606934, 0.007121056079864502, 0.007444096088409424, 0.00729478406906128, 0.007188735961914062, 0.007231647968292236, 0.00715721607208252, 0.007164447784423828, 0.007125216007232666, 0.007116576194763183, 0.00716918420791626, 0.007088992118835449, 0.007116799831390381, 0.007103648185729981, 0.007168863773345947, 0.0071608638763427735, 0.007148128032684326, 0.007299456119537353, 0.007425759792327881, 0.007683360099792481, 0.007535520076751709, 0.007478784084320069, 0.007594592094421386, 0.007610496044158936, 0.00753385591506958, 0.007405504226684571, 0.007367263793945313, 0.007331552028656006, 0.007387487888336181, 0.007394944190979004, 0.006965023994445801, 0.007194719791412354, 0.007243360042572021, 0.00723203182220459, 0.0071922879219055175, 0.0072904000282287594, 0.007441152095794678, 0.007159808158874512, 0.00714137601852417, 0.007224832057952881, 0.007108352184295654, 0.007027455806732178, 0.007042655944824219, 0.007084447860717774, 0.007280640125274658, 0.00720688009262085, 0.00718998384475708, 0.007141952037811279, 0.007426047801971435, 0.007683231830596924, 0.00717091178894043, 0.007065375804901123, 0.007135168075561523, 0.007114272117614746, 0.007205632209777832, 0.007130815982818604, 0.007291200160980224, 0.00711084794998169, 0.0071075520515441895, 0.007286911964416504, 0.007104383945465088, 0.007074656009674073, 0.0070728640556335445, 0.0071361598968505855, 0.007131264209747314, 0.0072211198806762696, 0.0072399678230285645, 0.007236544132232666, 0.007340479850769043, 0.007395679950714111, 0.007333920001983642, 0.0074399042129516604, 0.007436736106872559, 0.007763967990875244, 0.007494783878326416, 0.007361055850982666, 0.007508319854736328, 0.007507967948913574, 0.007573376178741455, 0.007536543846130371, 0.007411935806274414, 0.00740780782699585, 0.007566559791564942, 0.007438943862915039, 0.007274015903472901, 0.007252448081970215, 0.007460864067077637, 0.007319551944732666, 0.007231488227844239, 0.007232992172241211, 0.007573535919189453, 0.007258624076843262, 0.007315455913543701, 0.00686729621887207, 0.00721401596069336, 0.007125184059143066, 0.00717087984085083, 0.007158495903015137, 0.007130271911621093, 0.007200607776641846, 0.00713647985458374, 0.0072254080772399904, 0.0072997121810913085, 0.007200863838195801, 0.007423423767089844, 0.007309760093688965, 0.007173279762268067, 0.007318496227264404, 0.007163904190063477, 0.007178239822387696, 0.007109856128692627, 0.007097055912017822, 0.007155776023864746, 0.007385087966918945, 0.007413568019866944, 0.007557663917541504, 0.0077695040702819825, 0.0075532798767089845, 0.007269599914550781, 0.007322463989257812, 0.00732473611831665, 0.007230688095092774, 0.007235231876373291, 0.007158048152923584, 0.007114655971527099, 0.007102272033691406, 0.007086368083953858, 0.007261631965637207, 0.0071027522087097165, 0.007105567932128906, 0.007427040100097656, 0.007192575931549072, 0.007071743965148926, 0.007098527908325195, 0.007079775810241699, 0.007096320152282715, 0.0071476798057556155, 0.007181407928466797, 0.007336192131042481, 0.008362719535827637, 0.008158080101013184, 0.008287136077880859, 0.007841792106628418, 0.007386367797851562, 0.007387904167175293, 0.0071907520294189455, 0.007175583839416504, 0.007346271991729736, 0.0072147841453552245, 0.007698815822601318, 0.007254176139831543, 0.007303232192993164, 0.007489535808563232, 0.007163936138153076, 0.007116767883300781, 0.0071348481178283695, 0.007046847820281982, 0.007282944202423096, 0.007182623863220215, 0.007179679870605468, 0.007092927932739258, 0.007173376083374023, 0.007659776210784912, 0.007133056163787841, 0.007291520118713379, 0.007391424179077149, 0.0071693120002746585, 0.00717468786239624, 0.007362559795379638, 0.007434239864349365, 0.007931903839111328, 0.007628960132598877, 0.007579296112060547, 0.007493535995483399, 0.007666207790374756, 0.007421311855316162, 0.00727897596359253, 0.00725110387802124, 0.007221439838409424, 0.007184639930725097, 0.007180704116821289, 0.007126431941986084, 0.007108672142028809, 0.007281311988830566, 0.007364575862884522, 0.007161024093627929, 0.00714576005935669, 0.007274208068847657, 0.007137375831604004, 0.007215744018554688, 0.007071743965148926, 0.00705292797088623, 0.0070553278923034665, 0.007378367900848388, 0.007193888187408447, 0.007122623920440674, 0.007056672096252442, 0.007250656127929687, 0.007280608177185058, 0.007155744075775147, 0.007077919960021973, 0.007049183845520019, 0.007108320236206054, 0.007064991950988769, 0.007013247966766357, 0.007207007884979248, 0.007151519775390625, 0.007183968067169189, 0.007094687938690185, 0.007129087924957276, 0.0073003840446472165, 0.00756604814529419, 0.007325600147247315, 0.007211103916168213, 0.007156032085418701, 0.0073842878341674805, 0.007195104122161866, 0.007583424091339112, 0.007174367904663086, 0.006804255962371826, 0.0071222720146179195, 0.007142367839813232, 0.007170015811920166, 0.0071450557708740235, 0.007145792007446289, 0.0072947521209716796, 0.007137311935424805, 0.007126912117004395, 0.007114336013793945, 0.00712992000579834, 0.007144864082336426, 0.0071495041847229, 0.007135359764099121, 0.007164159774780274, 0.00745907211303711, 0.007298816204071045, 0.007199935913085938, 0.007197760105133057, 0.0072046079635620115, 0.007120672225952148, 0.007121119976043701, 0.007258111953735351, 0.007230688095092774, 0.007252960205078125, 0.007114367961883545, 0.0071511039733886715, 0.007139935970306397, 0.007114848136901855, 0.007074975967407226, 0.007296864032745361, 0.007134208202362061, 0.007118112087249756, 0.007146207809448242, 0.0070986242294311525, 0.007110400199890137, 0.007098144054412842, 0.007168288230895996, 0.007325632095336914, 0.0071578240394592286, 0.00735430383682251, 0.007434239864349365, 0.007430272102355957, 0.007462783813476563, 0.007460608005523682, 0.00773363208770752, 0.007419839859008789, 0.007356031894683838, 0.007212543964385986, 0.007203648090362549, 0.007190368175506592, 0.007250368118286133, 0.00721072006225586, 0.0071280961036682125, 0.007119359970092773, 0.007124832153320312, 0.007118591785430908, 0.007125887870788574, 0.007096320152282715, 0.007370751857757568, 0.007157760143280029, 0.007180287837982178, 0.0072007040977478025]",tokens/s,136.59099122652066,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,854.544384,551.419904,0.0,148.897792,141.633024,s,1,9.49558203125,9.49558203125,0.0,9.49558203125,9.49558203125,9.49558203125,9.49558203125,[9.49558203125],,kWh,1.4472736220833344e-05,1.5859803081615646e-06,4.580281442000036e-06,2.0638997970994944e-05,,MB,1379.201024,666.763264,0.0,249.561088,216.900608,s,17,0.21852278518676757,0.012854281481574564,7.928810538723887e-05,0.012840031623840332,0.01293898868560791,0.012979187011718749,0.013018365478515625,"[0.01281328010559082, 0.012918560028076172, 0.012687264442443847, 0.012873632431030273, 0.01279638385772705, 0.012893919944763183, 0.012877568244934082, 0.012792736053466798, 0.013028160095214843, 0.012814304351806641, 0.012966943740844726, 0.012905695915222167, 0.01281488037109375, 0.012840031623840332, 0.012820063591003418, 0.012759008407592774, 0.012920351982116699]",tokens/s,19915.543343823036,kWh,3.7348273242221124e-07,4.118830739715239e-08,2.0491953023273676e-07,6.195905700521004e-07,tokens/kWh,413176075.2563961,MB,1412.317184,691.929088,0.0,274.726912,216.903168,s,17,10.379307739257813,0.6105475140739889,0.00405922480382058,0.6091581420898438,0.615909375,0.6176719360351562,0.6199537719726562,"[0.6082665405273437, 0.6063970336914063, 0.6087763671875, 0.60854150390625, 0.6077777099609375, 0.6205242309570312, 0.615076171875, 0.6091581420898438, 0.6169588623046875, 0.6098659057617187, 0.615209716796875, 0.611879638671875, 0.6101171264648437, 0.6062125244140625, 0.611785888671875, 0.6069061279296875, 0.605854248046875]",tokens/s,103.18607241493964,kWh,1.7430595285960185e-05,1.9222915076461e-06,6.509791723884916e-06,2.5862678517491193e-05,tokens/kWh,2435942.5864336696,,s,1071,10.369719833374031,0.009682278089051376,0.0002842819892125104,0.009639936447143555,0.009834400177001953,0.009927311897277833,0.010466054153442381,"[0.009277440071105958, 0.00961740779876709, 0.010216992378234863, 0.009599455833435059, 0.009748031616210937, 0.009488832473754883, 0.009487584114074707, 0.009509663581848144, 0.009512831687927246, 0.009495679855346679, 0.009454655647277831, 0.009553855895996093, 0.009543680191040039, 0.009469951629638672, 0.009440671920776368, 0.009499232292175292, 0.009631327629089356, 0.009586655616760253, 0.009575167655944824, 0.009639231681823731, 0.009789024353027344, 0.009661215782165528, 0.009846783638000489, 0.009602144241333007, 0.00968832015991211, 0.009633472442626953, 0.009830368041992188, 0.009633983612060547, 0.009602879524230957, 0.009731936454772949, 0.009617728233337402, 0.00966659164428711, 0.00970751953125, 0.009592639923095702, 0.00963379192352295, 0.009895520210266113, 0.010053088188171386, 0.009617792129516602, 0.009639712333679199, 0.009671456336975097, 0.009686431884765624, 0.00974294376373291, 0.009653375625610352, 0.00966486358642578, 0.009576992034912109, 0.009556032180786133, 0.009608863830566406, 0.009909952163696289, 0.009603679656982422, 0.00960211181640625, 0.009767871856689453, 0.009613311767578125, 0.009540736198425293, 0.009722592353820801, 0.009551903724670411, 0.00954793643951416, 0.00967801570892334, 0.009771327972412109, 0.00977558422088623, 0.009729567527770997, 0.009691807746887207, 0.00964793586730957, 0.009566368103027344, 0.009289759635925293, 0.009796192169189453, 0.009871423721313477, 0.009586784362792969, 0.009573504447937012, 0.009600095748901367, 0.00970524787902832, 0.009645440101623536, 0.00960985565185547, 0.009633567810058594, 0.009713888168334961, 0.0098121919631958, 0.010536224365234374, 0.009726143836975098, 0.00972220802307129, 0.009743840217590331, 0.009800512313842774, 0.009834176063537597, 0.009635199546813964, 0.009558752059936523, 0.009602975845336915, 0.009838303565979005, 0.00968115234375, 0.009721887588500977, 0.00960912036895752, 0.0095250244140625, 0.009519136428833008, 0.009541024208068847, 0.009536031723022462, 0.009523551940917968, 0.009578495979309083, 0.00953769588470459, 0.009540767669677734, 0.009527135848999023, 0.009644895553588868, 0.009500672340393066, 0.009484288215637206, 0.00949465560913086, 0.009481120109558105, 0.009472991943359375, 0.00951910400390625, 0.009664511680603028, 0.00953657627105713, 0.009677760124206543, 0.009558239936828613, 0.009555999755859374, 0.009514752388000489, 0.009504768371582031, 0.009762016296386719, 0.009542431831359863, 0.009569503784179688, 0.009594816207885742, 0.00952835178375244, 0.009461567878723145, 0.00951910400390625, 0.009498208045959473, 0.009488096237182617, 0.009536383628845214, 0.00962281608581543, 0.009766655921936035, 0.009528096199035645, 0.009631263732910155, 0.009526847839355469, 0.00928767967224121, 0.009548895835876465, 0.009563039779663086, 0.009535488128662109, 0.00966649627685547, 0.009572416305541993, 0.009605152130126953, 0.009586527824401855, 0.009599072456359863, 0.009547712326049805, 0.009565792083740234, 0.009641535758972168, 0.009941503524780274, 0.009779647827148437, 0.00970911979675293, 0.009676992416381836, 0.009830656051635743, 0.009668800354003906, 0.009641792297363281, 0.009640064239501953, 0.009649951934814454, 0.009615360260009765, 0.009706591606140137, 0.009532256126403809, 0.009608448028564453, 0.009650976181030273, 0.00960524845123291, 0.009592479705810546, 0.00964412784576416, 0.009689344406127929, 0.009541631698608399, 0.009668607711791993, 0.009752575874328612, 0.009518112182617187, 0.009571071624755859, 0.00948419189453125, 0.009520959854125977, 0.009522944450378419, 0.009492704391479492, 0.009630240440368653, 0.009615360260009765, 0.009631903648376464, 0.009561920166015625, 0.00952070426940918, 0.009555423736572265, 0.009575200080871583, 0.00953980827331543, 0.010010623931884765, 0.009709407806396484, 0.011329695701599122, 0.009967295646667481, 0.00973350429534912, 0.009717856407165527, 0.009623519897460938, 0.009613632202148437, 0.009681471824645996, 0.009584927558898925, 0.009578240394592286, 0.00955622386932373, 0.009536704063415528, 0.009627327919006348, 0.00970019245147705, 0.009645312309265137, 0.00937382411956787, 0.009819968223571778, 0.010458239555358887, 0.009665472030639648, 0.00983414363861084, 0.009635711669921875, 0.009674688339233399, 0.009589311599731445, 0.009550975799560548, 0.009631648063659667, 0.009525216102600098, 0.009515999794006347, 0.009646080017089843, 0.009586688041687011, 0.009598143577575683, 0.009634623527526856, 0.009671968460083008, 0.009640671730041505, 0.009744383811950684, 0.009629695892333985, 0.009647616386413574, 0.009605055809020996, 0.009531968116760254, 0.009528448104858398, 0.009691328048706054, 0.009522047996520996, 0.009699135780334473, 0.009766912460327149, 0.00974028778076172, 0.009586688041687011, 0.009592063903808594, 0.009558783531188966, 0.009623552322387695, 0.009637120246887208, 0.009818880081176758, 0.009781248092651367, 0.00970751953125, 0.009727999687194825, 0.009650176048278808, 0.009684384346008301, 0.009619104385375976, 0.009536160469055175, 0.009652576446533202, 0.009614527702331543, 0.009573375701904297, 0.009711296081542968, 0.009545023918151856, 0.009496992111206054, 0.00958080005645752, 0.00952342414855957, 0.009654303550720215, 0.009535519599914551, 0.00987440013885498, 0.009732000350952149, 0.00965113639831543, 0.009750687599182128, 0.009661760330200195, 0.009663007736206054, 0.009629695892333985, 0.009570367813110351, 0.009699487686157226, 0.009671839714050293, 0.009517696380615235, 0.009246720314025878, 0.009539199829101562, 0.009552255630493164, 0.00961571216583252, 0.009762463569641113, 0.009592831611633301, 0.009594143867492676, 0.009608096122741699, 0.009538975715637207, 0.009523200035095216, 0.009551456451416016, 0.009478464126586914, 0.009456128120422362, 0.009486047744750977, 0.00949276828765869, 0.009639936447143555, 0.009656319618225098, 0.009615519523620605, 0.009617247581481934, 0.00956982421875, 0.009605600357055663, 0.009513248443603515, 0.009731807708740235, 0.009596575736999511, 0.00961571216583252, 0.00974233627319336, 0.009693408012390136, 0.00955577564239502, 0.009628959655761719, 0.009554112434387207, 0.00965011215209961, 0.01013366413116455, 0.009638303756713868, 0.00963584041595459, 0.009705471992492675, 0.009778559684753418, 0.009662912368774415, 0.009705120086669922, 0.00976540756225586, 0.009736191749572755, 0.010189151763916016, 0.009717151641845704, 0.00963814353942871, 0.009838591575622559, 0.009732128143310547, 0.009614912033081055, 0.009734399795532226, 0.0097609281539917, 0.009600319862365723, 0.009648256301879883, 0.009523520469665528, 0.009583104133605956, 0.009614144325256348, 0.009604096412658691, 0.009557056427001952, 0.009541983604431153, 0.009668352127075196, 0.009648927688598633, 0.009567839622497559, 0.009556384086608886, 0.009682944297790527, 0.009596735954284667, 0.009822400093078613, 0.009265119552612305, 0.009585760116577148, 0.009564319610595702, 0.009515520095825195, 0.009611071586608887, 0.009542112350463866, 0.009539584159851074, 0.00953116798400879, 0.009533151626586914, 0.009613663673400878, 0.009597087860107422, 0.009607487678527833, 0.009606847763061524, 0.009489407539367676, 0.009561087608337402, 0.009574399948120118, 0.009584799766540527, 0.009537376403808594, 0.009805824279785156, 0.009560064315795898, 0.009547776222229003, 0.009576448440551758, 0.009760767936706542, 0.009670656204223632, 0.00960524845123291, 0.009707551956176758, 0.009594271659851075, 0.009601408004760743, 0.009847904205322265, 0.009748800277709961, 0.011253503799438477, 0.01392527961730957, 0.01419264030456543, 0.010000384330749512, 0.009875455856323241, 0.010047167778015136, 0.009761088371276856, 0.00963587188720703, 0.009572223663330078, 0.00985859203338623, 0.010405759811401367, 0.010062527656555176, 0.010156031608581542, 0.010417792320251465, 0.009994879722595214, 0.009641152381896972, 0.009621919631958008, 0.009620896339416504, 0.009534208297729493, 0.009495871543884277, 0.009742688179016114, 0.00959488010406494, 0.009613663673400878, 0.009727999687194825, 0.009686847686767579, 0.009660608291625976, 0.009578495979309083, 0.009570303916931153, 0.009570240020751953, 0.009556096076965332, 0.009582528114318848, 0.009506208419799805, 0.009626208305358886, 0.009258336067199707, 0.009628640174865723, 0.009584320068359376, 0.009549823760986328, 0.009944255828857423, 0.009688063621520996, 0.009690943717956544, 0.009607168197631836, 0.009623552322387695, 0.009551872253417968, 0.009591872215270997, 0.009694208145141601, 0.009658080101013183, 0.00971174430847168, 0.01015171241760254, 0.009820480346679688, 0.009737567901611328, 0.0096528959274292, 0.009657728195190429, 0.010963583946228027, 0.009711615562438965, 0.009704575538635254, 0.009790143966674804, 0.009654687881469727, 0.009599871635437011, 0.009630335807800292, 0.009724191665649414, 0.009692288398742676, 0.009872256278991699, 0.00971776008605957, 0.00979094409942627, 0.009685567855834961, 0.009663711547851563, 0.009672639846801757, 0.009667391777038575, 0.00963814353942871, 0.009677824020385742, 0.009722623825073242, 0.009684991836547852, 0.009584639549255371, 0.009633184432983399, 0.009579104423522949, 0.009574175834655762, 0.010484288215637207, 0.010544672012329101, 0.012020031929016113, 0.009784128189086914, 0.00961638355255127, 0.009858976364135743, 0.009566304206848144, 0.009632927894592285, 0.009636704444885253, 0.009777376174926758, 0.009549407958984376, 0.009791647911071778, 0.009815584182739258, 0.009613663673400878, 0.009582976341247558, 0.009540863990783691, 0.009578240394592286, 0.009515135765075684, 0.00953328037261963, 0.00964076805114746, 0.009281536102294922, 0.00955344009399414, 0.009810400009155274, 0.009762816429138184, 0.009632896423339844, 0.009587583541870117, 0.009644031524658203, 0.009584480285644532, 0.009590784072875976, 0.00965443229675293, 0.009549823760986328, 0.009555264472961426, 0.009687775611877442, 0.009650208473205567, 0.009571295738220215, 0.009786335945129395, 0.009596927642822266, 0.009570303916931153, 0.009620991706848145, 0.009454048156738282, 0.009488384246826171, 0.009500160217285156, 0.00949891185760498, 0.009730496406555177, 0.009616512298583985, 0.009734848022460937, 0.009805567741394043, 0.009733535766601563, 0.009708064079284668, 0.009703743934631347, 0.00969484806060791, 0.009633343696594238, 0.00965340805053711, 0.009575519561767578, 0.009700032234191894, 0.009731967926025391, 0.009637887954711915, 0.009635904312133788, 0.009711711883544923, 0.009664352416992188, 0.009689087867736817, 0.009660096168518066, 0.009613471984863281, 0.009814175605773925, 0.0096112642288208, 0.00985200023651123, 0.009726655960083009, 0.00974665641784668, 0.009768192291259765, 0.00977996826171875, 0.009791071891784669, 0.009744223594665527, 0.009732768058776856, 0.009810015678405762, 0.00966431999206543, 0.009642080307006836, 0.009729951858520507, 0.009589056015014648, 0.009623231887817383, 0.00966652774810791, 0.00962889575958252, 0.009701791763305665, 0.00968950366973877, 0.009382271766662597, 0.009668416023254394, 0.009732288360595703, 0.009704799652099609, 0.009599200248718262, 0.009711456298828126, 0.009585311889648437, 0.009662176132202148, 0.009677023887634277, 0.009691136360168457, 0.009815936088562011, 0.0097096004486084, 0.009793631553649903, 0.009897983551025391, 0.009684384346008301, 0.009669216156005859, 0.00967193603515625, 0.00972646427154541, 0.009685088157653808, 0.009932064056396484, 0.009770112037658691, 0.009750271797180176, 0.009570303916931153, 0.009825823783874511, 0.009580863952636718, 0.009576607704162597, 0.009646080017089843, 0.009664223670959473, 0.00967728042602539, 0.009568063735961915, 0.00982425594329834, 0.009565728187561035, 0.009581151962280274, 0.009624671936035157, 0.009612064361572265, 0.009631839752197266, 0.009566143989562988, 0.009879648208618164, 0.009684864044189454, 0.009589823722839355, 0.009626560211181641, 0.011395071983337402, 0.012722175598144531, 0.009842432022094727, 0.009797792434692382, 0.009703104019165038, 0.009709055900573731, 0.009687968254089355, 0.009738559722900391, 0.009633152008056641, 0.00967465591430664, 0.009654687881469727, 0.009577983856201172, 0.009822079658508301, 0.009828351974487304, 0.009753215789794921, 0.009718848228454589, 0.009880512237548829, 0.009764415740966797, 0.009836832046508788, 0.009825599670410157, 0.010289055824279785, 0.00970847988128662, 0.00927177619934082, 0.00960307216644287, 0.00960655975341797, 0.009601632118225097, 0.009693183898925782, 0.009653375625610352, 0.009644927978515625, 0.009618847846984864, 0.009579360008239746, 0.009657855987548827, 0.009578911781311036, 0.009573984146118163, 0.009625215530395508, 0.00966486358642578, 0.009734432220458984, 0.00961740779876709, 0.009574048042297363, 0.00998639965057373, 0.009740063667297363, 0.009611424446105956, 0.009545791625976562, 0.0096145601272583, 0.009628447532653808, 0.00975164794921875, 0.009585503578186036, 0.009541695594787597, 0.009557536125183105, 0.010359264373779297, 0.009843935966491699, 0.009679903984069824, 0.00963702392578125, 0.009594623565673827, 0.009672639846801757, 0.009595808029174804, 0.009670656204223632, 0.009719840049743652, 0.009586655616760253, 0.009648127555847168, 0.009658368110656738, 0.009600640296936035, 0.009705856323242187, 0.009794848442077637, 0.009673439979553222, 0.009743488311767579, 0.009755071640014648, 0.009687264442443848, 0.009597503662109375, 0.009691935539245605, 0.00969980812072754, 0.009713503837585449, 0.009638079643249512, 0.00965875244140625, 0.009588735580444336, 0.009719743728637695, 0.00967903995513916, 0.00962342357635498, 0.009846783638000489, 0.009761919975280762, 0.009662655830383301, 0.009736895561218262, 0.009678048133850098, 0.009787391662597657, 0.009726752281188964, 0.009297504425048828, 0.009586912155151367, 0.009637280464172364, 0.00965225601196289, 0.009854175567626954, 0.009817824363708497, 0.009873408317565918, 0.009828351974487304, 0.00973136043548584, 0.009687775611877442, 0.009754624366760254, 0.009912320137023926, 0.009778271675109864, 0.009845664024353027, 0.009807583808898925, 0.00972009563446045, 0.00961945629119873, 0.009596447944641113, 0.009630175590515136, 0.009686976432800294, 0.009784543991088867, 0.00975692844390869, 0.009847647666931152, 0.009672415733337403, 0.009648159980773926, 0.009867520332336425, 0.009645824432373046, 0.009795200347900391, 0.009705504417419434, 0.00990447998046875, 0.00997929573059082, 0.009896160125732422, 0.009869088172912597, 0.009697440147399903, 0.009676608085632324, 0.009683584213256836, 0.0096845121383667, 0.009671327590942382, 0.009700480461120606, 0.009630016326904297, 0.009670463562011719, 0.009777024269104003, 0.009622207641601562, 0.00962559986114502, 0.009665696144104004, 0.00970867156982422, 0.009645792007446289, 0.009666399955749512, 0.009891136169433594, 0.009824992179870606, 0.009895263671875, 0.009959424018859863, 0.010012479782104492, 0.009817055702209473, 0.009838848114013672, 0.009862943649291992, 0.010119423866271973, 0.009884832382202148, 0.009856703758239747, 0.009796480178833007, 0.009653504371643067, 0.009643936157226562, 0.009771807670593262, 0.009306400299072266, 0.009560256004333497, 0.010099840164184571, 0.009878111839294433, 0.009736063957214356, 0.009732319831848144, 0.00973369598388672, 0.00969977569580078, 0.009713664054870605, 0.009721440315246582, 0.009683391571044923, 0.009709535598754883, 0.009682368278503418, 0.009650688171386718, 0.009621503829956055, 0.009662495613098145, 0.00965225601196289, 0.009803232192993163, 0.009835040092468262, 0.009736191749572755, 0.009964672088623047, 0.009698399543762207, 0.009827456474304198, 0.009638560295104981, 0.009673919677734375, 0.00967353630065918, 0.00956396770477295, 0.009593024253845214, 0.00953536033630371, 0.00959011173248291, 0.00961411190032959, 0.009629695892333985, 0.009613183975219727, 0.009678400039672852, 0.009574975967407227, 0.009692864418029785, 0.009457728385925294, 0.009636096000671387, 0.00957209587097168, 0.009758175849914551, 0.009677727699279786, 0.00956982421875, 0.009624928474426269, 0.009645055770874024, 0.009523232460021973, 0.009657312393188476, 0.009626463890075683, 0.009709728240966798, 0.009729120254516601, 0.009831328392028809, 0.009643839836120606, 0.009571840286254883, 0.009833151817321778, 0.009830656051635743, 0.009872639656066894, 0.009892352104187012, 0.009823583602905273, 0.009927328109741212, 0.0098088960647583, 0.009759743690490723, 0.009875455856323241, 0.009827584266662597, 0.009792192459106445, 0.009360896110534669, 0.009841312408447265, 0.009717599868774414, 0.009646431922912597, 0.009643327713012696, 0.009613663673400878, 0.00964799976348877, 0.009750528335571289, 0.00968511962890625, 0.009695232391357422, 0.009768959999084472, 0.009811936378479004, 0.009736224174499512, 0.00966431999206543, 0.009609408378601074, 0.009682623863220215, 0.009764736175537109, 0.009673151969909668, 0.00970956802368164, 0.00980288028717041, 0.009997504234313965, 0.009963232040405274, 0.009629471778869629, 0.009617471694946289, 0.00960707187652588, 0.009654496192932129, 0.009654272079467773, 0.009553983688354492, 0.009603327751159667, 0.00956163215637207, 0.009824416160583497, 0.009861120223999023, 0.009730048179626465, 0.009668352127075196, 0.009652352333068848, 0.009646431922912597, 0.00978921604156494, 0.009766688346862793, 0.010085824012756348, 0.00956816005706787, 0.009759391784667969, 0.009748703956604004, 0.009625472068786622, 0.009498687744140625, 0.009475359916687012, 0.009626399993896485, 0.009644031524658203, 0.009666560173034668, 0.009602208137512207, 0.009583456039428711, 0.009588800430297851, 0.009573472023010255, 0.009540096282958984, 0.009521151542663574, 0.0095349760055542, 0.009550239562988281, 0.009668959617614745, 0.00956777572631836, 0.00969980812072754, 0.009664704322814941, 0.00970355224609375, 0.009733920097351073, 0.009706624031066894, 0.009340928077697755, 0.00959222412109375, 0.009575263977050781, 0.009619487762451171, 0.009594207763671874, 0.009519488334655761, 0.009525247573852539, 0.009528544425964356, 0.00960591983795166, 0.009602399826049804, 0.00963980770111084, 0.009485088348388671, 0.00962559986114502, 0.009520416259765624, 0.010269311904907227, 0.009563872337341309, 0.009621888160705567, 0.009603008270263672, 0.009513279914855958, 0.009500415802001953, 0.009513055801391602, 0.009631391525268554, 0.009527551651000976, 0.009523200035095216, 0.009622688293457032, 0.009573216438293458, 0.009864895820617677, 0.009711296081542968, 0.009582783699035645, 0.009664704322814941, 0.00965452766418457, 0.009614463806152343, 0.00984665584564209, 0.00990675163269043, 0.009927295684814453, 0.010022175788879395, 0.009843263626098633, 0.009752511978149414, 0.009699359893798828, 0.009631775856018066, 0.00956208038330078, 0.009844736099243164, 0.009569919586181641, 0.009606816291809083, 0.009581119537353515, 0.009592991828918457, 0.009495679855346679, 0.009622400283813476, 0.009527296066284179, 0.009519424438476563, 0.009512576103210449, 0.009582880020141602, 0.009533151626586914, 0.009525407791137695, 0.00948624038696289, 0.009547264099121093, 0.009550335884094239, 0.00952079963684082, 0.009423199653625489, 0.009525247573852539, 0.009558208465576172, 0.009484095573425293, 0.009537535667419434, 0.00932316780090332, 0.009567423820495606, 0.00955884838104248, 0.009611552238464355, 0.009565983772277832, 0.009567456245422363, 0.009699711799621582, 0.009506303787231446, 0.009531552314758301, 0.009511615753173828, 0.009607168197631836, 0.009605119705200196, 0.009470175743103028, 0.009543647766113281, 0.009537088394165038, 0.00969059181213379, 0.009484959602355956, 0.009533568382263184, 0.009660063743591308, 0.00959721565246582, 0.009754688262939452, 0.00985206413269043, 0.009673184394836425, 0.009713664054870605, 0.009566623687744141, 0.009625696182250976, 0.009613183975219727, 0.009721856117248535, 0.009911456108093263, 0.009822751998901367, 0.009926079750061035, 0.010059871673583985, 0.009773504257202149, 0.009653663635253907, 0.00970847988128662, 0.00973209571838379, 0.010061247825622559, 0.009740863800048828, 0.00974028778076172, 0.009929920196533203, 0.010214367866516113, 0.010069855690002441, 0.009967616081237793, 0.009942655563354491, 0.009870944023132324, 0.009818400382995605, 0.009693792343139648, 0.009784223556518555, 0.009597344398498535, 0.009676416397094727, 0.009582880020141602, 0.009550592422485352, 0.009725888252258301, 0.010354975700378417, 0.009547103881835937, 0.009688575744628907, 0.009683232307434082, 0.009564736366271972, 0.009721887588500977, 0.00973840045928955, 0.009579936027526855, 0.009541152000427246, 0.00956816005706787, 0.009199935913085938, 0.009480128288269043, 0.009545791625976562, 0.009496576309204101, 0.009553183555603027, 0.009577183723449708, 0.00954099178314209, 0.009519743919372558, 0.009539584159851074, 0.009599136352539063, 0.009700736045837403, 0.009553791999816895, 0.0095600004196167, 0.009630368232727051, 0.009633919715881348, 0.009659584045410157, 0.009834400177001953, 0.009681599617004395, 0.009564096450805665, 0.009580127716064453, 0.009629792213439941, 0.009591263771057129, 0.00961945629119873, 0.009635231971740722, 0.009500736236572266, 0.009504544258117675, 0.009554207801818848, 0.009548255920410156, 0.009580127716064453, 0.009607583999633788, 0.00960102367401123, 0.009694560050964356, 0.009591456413269042, 0.009533439636230469, 0.00952723217010498, 0.009611392021179199, 0.009637503623962403, 0.00966652774810791, 0.009633184432983399, 0.009622655868530273, 0.009641792297363281, 0.009629631996154785, 0.009633407592773438, 0.009599616050720215, 0.009600831985473632, 0.00977286434173584, 0.0096810884475708, 0.009641183853149413, 0.009607680320739746, 0.009668895721435547, 0.009705471992492675, 0.009676223754882812, 0.009689087867736817, 0.009813728332519532, 0.00998896026611328, 0.009721887588500977, 0.009686880111694336, 0.009649791717529298, 0.009849120140075683, 0.009721887588500977, 0.009640128135681152, 0.009611328125, 0.009750432014465332, 0.009343104362487793, 0.009700287818908691, 0.00953775978088379, 0.009581279754638673, 0.009545280456542968, 0.009591232299804688, 0.009713664054870605, 0.00960307216644287, 0.009572352409362793, 0.009524895668029785, 0.009590144157409669, 0.009581024169921876, 0.009579008102416992, 0.009572352409362793, 0.009590656280517577, 0.009582304000854492, 0.00958505630493164, 0.009707391738891602, 0.009658687591552735, 0.009627455711364747, 0.009663935661315917, 0.009584223747253418, 0.009665696144104004, 0.0096561279296875, 0.009631360054016113, 0.009598400115966797, 0.009782208442687988, 0.009659711837768554, 0.009599679946899415, 0.010202943801879883, 0.009609503746032715, 0.009721407890319824, 0.009498623847961426, 0.009529696464538575, 0.00951296043395996, 0.009558176040649414, 0.009572192192077637, 0.009584639549255371, 0.009633983612060547, 0.009507712364196777, 0.009506815910339356, 0.009476479530334472, 0.009508447647094726, 0.009503711700439453, 0.009453824043273926, 0.00943283176422119, 0.009457119941711426, 0.009486944198608398, 0.009500608444213868, 0.009645248413085937, 0.009813920021057129, 0.00963203239440918, 0.009681119918823242, 0.0096976957321167, 0.009652480125427246, 0.009770751953125, 0.009754624366760254, 0.009707648277282715, 0.009613311767578125, 0.009676671981811524, 0.009574496269226074, 0.009590592384338379, 0.009599072456359863]",tokens/s,103.28147888365136,,